re PR lto/85574 (LTO bootstapped binaries differ)
[official-gcc.git] / gcc / gimple-fold.c
blobd3ef05b52433c5065be815ebced880e2f3129657
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2019 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
69 enum strlen_range_kind {
70 /* Compute the exact constant string length. */
71 SRK_STRLEN,
72 /* Compute the maximum constant string length. */
73 SRK_STRLENMAX,
74 /* Compute a range of string lengths bounded by object sizes. When
75 the length of a string cannot be determined, consider as the upper
76 bound the size of the enclosing object the string may be a member
77 or element of. Also determine the size of the largest character
78 array the string may refer to. */
79 SRK_LENRANGE,
80 /* Determine the integer value of the argument (not string length). */
81 SRK_INT_VALUE
84 static bool
85 get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
87 /* Return true when DECL can be referenced from current unit.
88 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
89 We can get declarations that are not possible to reference for various
90 reasons:
92 1) When analyzing C++ virtual tables.
93 C++ virtual tables do have known constructors even
94 when they are keyed to other compilation unit.
95 Those tables can contain pointers to methods and vars
96 in other units. Those methods have both STATIC and EXTERNAL
97 set.
98 2) In WHOPR mode devirtualization might lead to reference
99 to method that was partitioned elsehwere.
100 In this case we have static VAR_DECL or FUNCTION_DECL
101 that has no corresponding callgraph/varpool node
102 declaring the body.
103 3) COMDAT functions referred by external vtables that
104 we devirtualize only during final compilation stage.
105 At this time we already decided that we will not output
106 the function body and thus we can't reference the symbol
107 directly. */
109 static bool
110 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
112 varpool_node *vnode;
113 struct cgraph_node *node;
114 symtab_node *snode;
116 if (DECL_ABSTRACT_P (decl))
117 return false;
119 /* We are concerned only about static/external vars and functions. */
120 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
121 || !VAR_OR_FUNCTION_DECL_P (decl))
122 return true;
124 /* Static objects can be referred only if they are defined and not optimized
125 out yet. */
126 if (!TREE_PUBLIC (decl))
128 if (DECL_EXTERNAL (decl))
129 return false;
130 /* Before we start optimizing unreachable code we can be sure all
131 static objects are defined. */
132 if (symtab->function_flags_ready)
133 return true;
134 snode = symtab_node::get (decl);
135 if (!snode || !snode->definition)
136 return false;
137 node = dyn_cast <cgraph_node *> (snode);
138 return !node || !node->global.inlined_to;
141 /* We will later output the initializer, so we can refer to it.
142 So we are concerned only when DECL comes from initializer of
143 external var or var that has been optimized out. */
144 if (!from_decl
145 || !VAR_P (from_decl)
146 || (!DECL_EXTERNAL (from_decl)
147 && (vnode = varpool_node::get (from_decl)) != NULL
148 && vnode->definition)
149 || (flag_ltrans
150 && (vnode = varpool_node::get (from_decl)) != NULL
151 && vnode->in_other_partition))
152 return true;
153 /* We are folding reference from external vtable. The vtable may reffer
154 to a symbol keyed to other compilation unit. The other compilation
155 unit may be in separate DSO and the symbol may be hidden. */
156 if (DECL_VISIBILITY_SPECIFIED (decl)
157 && DECL_EXTERNAL (decl)
158 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
159 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
160 return false;
161 /* When function is public, we always can introduce new reference.
162 Exception are the COMDAT functions where introducing a direct
163 reference imply need to include function body in the curren tunit. */
164 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
165 return true;
166 /* We have COMDAT. We are going to check if we still have definition
167 or if the definition is going to be output in other partition.
168 Bypass this when gimplifying; all needed functions will be produced.
170 As observed in PR20991 for already optimized out comdat virtual functions
171 it may be tempting to not necessarily give up because the copy will be
172 output elsewhere when corresponding vtable is output.
173 This is however not possible - ABI specify that COMDATs are output in
174 units where they are used and when the other unit was compiled with LTO
175 it is possible that vtable was kept public while the function itself
176 was privatized. */
177 if (!symtab->function_flags_ready)
178 return true;
180 snode = symtab_node::get (decl);
181 if (!snode
182 || ((!snode->definition || DECL_EXTERNAL (decl))
183 && (!snode->in_other_partition
184 || (!snode->forced_by_abi && !snode->force_output))))
185 return false;
186 node = dyn_cast <cgraph_node *> (snode);
187 return !node || !node->global.inlined_to;
190 /* Create a temporary for TYPE for a statement STMT. If the current function
191 is in SSA form, a SSA name is created. Otherwise a temporary register
192 is made. */
194 tree
195 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
197 if (gimple_in_ssa_p (cfun))
198 return make_ssa_name (type, stmt);
199 else
200 return create_tmp_reg (type);
203 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
204 acceptable form for is_gimple_min_invariant.
205 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
207 tree
208 canonicalize_constructor_val (tree cval, tree from_decl)
210 tree orig_cval = cval;
211 STRIP_NOPS (cval);
212 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
213 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
215 tree ptr = TREE_OPERAND (cval, 0);
216 if (is_gimple_min_invariant (ptr))
217 cval = build1_loc (EXPR_LOCATION (cval),
218 ADDR_EXPR, TREE_TYPE (ptr),
219 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
220 ptr,
221 fold_convert (ptr_type_node,
222 TREE_OPERAND (cval, 1))));
224 if (TREE_CODE (cval) == ADDR_EXPR)
226 tree base = NULL_TREE;
227 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
229 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
230 if (base)
231 TREE_OPERAND (cval, 0) = base;
233 else
234 base = get_base_address (TREE_OPERAND (cval, 0));
235 if (!base)
236 return NULL_TREE;
238 if (VAR_OR_FUNCTION_DECL_P (base)
239 && !can_refer_decl_in_current_unit_p (base, from_decl))
240 return NULL_TREE;
241 if (TREE_TYPE (base) == error_mark_node)
242 return NULL_TREE;
243 if (VAR_P (base))
244 TREE_ADDRESSABLE (base) = 1;
245 else if (TREE_CODE (base) == FUNCTION_DECL)
247 /* Make sure we create a cgraph node for functions we'll reference.
248 They can be non-existent if the reference comes from an entry
249 of an external vtable for example. */
250 cgraph_node::get_create (base);
252 /* Fixup types in global initializers. */
253 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
254 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
256 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
257 cval = fold_convert (TREE_TYPE (orig_cval), cval);
258 return cval;
260 if (TREE_OVERFLOW_P (cval))
261 return drop_tree_overflow (cval);
262 return orig_cval;
265 /* If SYM is a constant variable with known value, return the value.
266 NULL_TREE is returned otherwise. */
268 tree
269 get_symbol_constant_value (tree sym)
271 tree val = ctor_for_folding (sym);
272 if (val != error_mark_node)
274 if (val)
276 val = canonicalize_constructor_val (unshare_expr (val), sym);
277 if (val && is_gimple_min_invariant (val))
278 return val;
279 else
280 return NULL_TREE;
282 /* Variables declared 'const' without an initializer
283 have zero as the initializer if they may not be
284 overridden at link or run time. */
285 if (!val
286 && is_gimple_reg_type (TREE_TYPE (sym)))
287 return build_zero_cst (TREE_TYPE (sym));
290 return NULL_TREE;
295 /* Subroutine of fold_stmt. We perform several simplifications of the
296 memory reference tree EXPR and make sure to re-gimplify them properly
297 after propagation of constant addresses. IS_LHS is true if the
298 reference is supposed to be an lvalue. */
300 static tree
301 maybe_fold_reference (tree expr, bool is_lhs)
303 tree result;
305 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
306 || TREE_CODE (expr) == REALPART_EXPR
307 || TREE_CODE (expr) == IMAGPART_EXPR)
308 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
309 return fold_unary_loc (EXPR_LOCATION (expr),
310 TREE_CODE (expr),
311 TREE_TYPE (expr),
312 TREE_OPERAND (expr, 0));
313 else if (TREE_CODE (expr) == BIT_FIELD_REF
314 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
315 return fold_ternary_loc (EXPR_LOCATION (expr),
316 TREE_CODE (expr),
317 TREE_TYPE (expr),
318 TREE_OPERAND (expr, 0),
319 TREE_OPERAND (expr, 1),
320 TREE_OPERAND (expr, 2));
322 if (!is_lhs
323 && (result = fold_const_aggregate_ref (expr))
324 && is_gimple_min_invariant (result))
325 return result;
327 return NULL_TREE;
331 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
332 replacement rhs for the statement or NULL_TREE if no simplification
333 could be made. It is assumed that the operands have been previously
334 folded. */
336 static tree
337 fold_gimple_assign (gimple_stmt_iterator *si)
339 gimple *stmt = gsi_stmt (*si);
340 enum tree_code subcode = gimple_assign_rhs_code (stmt);
341 location_t loc = gimple_location (stmt);
343 tree result = NULL_TREE;
345 switch (get_gimple_rhs_class (subcode))
347 case GIMPLE_SINGLE_RHS:
349 tree rhs = gimple_assign_rhs1 (stmt);
351 if (TREE_CLOBBER_P (rhs))
352 return NULL_TREE;
354 if (REFERENCE_CLASS_P (rhs))
355 return maybe_fold_reference (rhs, false);
357 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
359 tree val = OBJ_TYPE_REF_EXPR (rhs);
360 if (is_gimple_min_invariant (val))
361 return val;
362 else if (flag_devirtualize && virtual_method_call_p (rhs))
364 bool final;
365 vec <cgraph_node *>targets
366 = possible_polymorphic_call_targets (rhs, stmt, &final);
367 if (final && targets.length () <= 1 && dbg_cnt (devirt))
369 if (dump_enabled_p ())
371 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
372 "resolving virtual function address "
373 "reference to function %s\n",
374 targets.length () == 1
375 ? targets[0]->name ()
376 : "NULL");
378 if (targets.length () == 1)
380 val = fold_convert (TREE_TYPE (val),
381 build_fold_addr_expr_loc
382 (loc, targets[0]->decl));
383 STRIP_USELESS_TYPE_CONVERSION (val);
385 else
386 /* We cannot use __builtin_unreachable here because it
387 cannot have address taken. */
388 val = build_int_cst (TREE_TYPE (val), 0);
389 return val;
394 else if (TREE_CODE (rhs) == ADDR_EXPR)
396 tree ref = TREE_OPERAND (rhs, 0);
397 tree tem = maybe_fold_reference (ref, true);
398 if (tem
399 && TREE_CODE (tem) == MEM_REF
400 && integer_zerop (TREE_OPERAND (tem, 1)))
401 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
402 else if (tem)
403 result = fold_convert (TREE_TYPE (rhs),
404 build_fold_addr_expr_loc (loc, tem));
405 else if (TREE_CODE (ref) == MEM_REF
406 && integer_zerop (TREE_OPERAND (ref, 1)))
407 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
409 if (result)
411 /* Strip away useless type conversions. Both the
412 NON_LVALUE_EXPR that may have been added by fold, and
413 "useless" type conversions that might now be apparent
414 due to propagation. */
415 STRIP_USELESS_TYPE_CONVERSION (result);
417 if (result != rhs && valid_gimple_rhs_p (result))
418 return result;
422 else if (TREE_CODE (rhs) == CONSTRUCTOR
423 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
425 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
426 unsigned i;
427 tree val;
429 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
430 if (! CONSTANT_CLASS_P (val))
431 return NULL_TREE;
433 return build_vector_from_ctor (TREE_TYPE (rhs),
434 CONSTRUCTOR_ELTS (rhs));
437 else if (DECL_P (rhs))
438 return get_symbol_constant_value (rhs);
440 break;
442 case GIMPLE_UNARY_RHS:
443 break;
445 case GIMPLE_BINARY_RHS:
446 break;
448 case GIMPLE_TERNARY_RHS:
449 result = fold_ternary_loc (loc, subcode,
450 TREE_TYPE (gimple_assign_lhs (stmt)),
451 gimple_assign_rhs1 (stmt),
452 gimple_assign_rhs2 (stmt),
453 gimple_assign_rhs3 (stmt));
455 if (result)
457 STRIP_USELESS_TYPE_CONVERSION (result);
458 if (valid_gimple_rhs_p (result))
459 return result;
461 break;
463 case GIMPLE_INVALID_RHS:
464 gcc_unreachable ();
467 return NULL_TREE;
471 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
472 adjusting the replacement stmts location and virtual operands.
473 If the statement has a lhs the last stmt in the sequence is expected
474 to assign to that lhs. */
476 static void
477 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
479 gimple *stmt = gsi_stmt (*si_p);
481 if (gimple_has_location (stmt))
482 annotate_all_with_location (stmts, gimple_location (stmt));
484 /* First iterate over the replacement statements backward, assigning
485 virtual operands to their defining statements. */
486 gimple *laststore = NULL;
487 for (gimple_stmt_iterator i = gsi_last (stmts);
488 !gsi_end_p (i); gsi_prev (&i))
490 gimple *new_stmt = gsi_stmt (i);
491 if ((gimple_assign_single_p (new_stmt)
492 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
493 || (is_gimple_call (new_stmt)
494 && (gimple_call_flags (new_stmt)
495 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
497 tree vdef;
498 if (!laststore)
499 vdef = gimple_vdef (stmt);
500 else
501 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
502 gimple_set_vdef (new_stmt, vdef);
503 if (vdef && TREE_CODE (vdef) == SSA_NAME)
504 SSA_NAME_DEF_STMT (vdef) = new_stmt;
505 laststore = new_stmt;
509 /* Second iterate over the statements forward, assigning virtual
510 operands to their uses. */
511 tree reaching_vuse = gimple_vuse (stmt);
512 for (gimple_stmt_iterator i = gsi_start (stmts);
513 !gsi_end_p (i); gsi_next (&i))
515 gimple *new_stmt = gsi_stmt (i);
516 /* If the new statement possibly has a VUSE, update it with exact SSA
517 name we know will reach this one. */
518 if (gimple_has_mem_ops (new_stmt))
519 gimple_set_vuse (new_stmt, reaching_vuse);
520 gimple_set_modified (new_stmt, true);
521 if (gimple_vdef (new_stmt))
522 reaching_vuse = gimple_vdef (new_stmt);
525 /* If the new sequence does not do a store release the virtual
526 definition of the original statement. */
527 if (reaching_vuse
528 && reaching_vuse == gimple_vuse (stmt))
530 tree vdef = gimple_vdef (stmt);
531 if (vdef
532 && TREE_CODE (vdef) == SSA_NAME)
534 unlink_stmt_vdef (stmt);
535 release_ssa_name (vdef);
539 /* Finally replace the original statement with the sequence. */
540 gsi_replace_with_seq (si_p, stmts, false);
543 /* Convert EXPR into a GIMPLE value suitable for substitution on the
544 RHS of an assignment. Insert the necessary statements before
545 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
546 is replaced. If the call is expected to produces a result, then it
547 is replaced by an assignment of the new RHS to the result variable.
548 If the result is to be ignored, then the call is replaced by a
549 GIMPLE_NOP. A proper VDEF chain is retained by making the first
550 VUSE and the last VDEF of the whole sequence be the same as the replaced
551 statement and using new SSA names for stores in between. */
553 void
554 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
556 tree lhs;
557 gimple *stmt, *new_stmt;
558 gimple_stmt_iterator i;
559 gimple_seq stmts = NULL;
561 stmt = gsi_stmt (*si_p);
563 gcc_assert (is_gimple_call (stmt));
565 push_gimplify_context (gimple_in_ssa_p (cfun));
567 lhs = gimple_call_lhs (stmt);
568 if (lhs == NULL_TREE)
570 gimplify_and_add (expr, &stmts);
571 /* We can end up with folding a memcpy of an empty class assignment
572 which gets optimized away by C++ gimplification. */
573 if (gimple_seq_empty_p (stmts))
575 pop_gimplify_context (NULL);
576 if (gimple_in_ssa_p (cfun))
578 unlink_stmt_vdef (stmt);
579 release_defs (stmt);
581 gsi_replace (si_p, gimple_build_nop (), false);
582 return;
585 else
587 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
588 new_stmt = gimple_build_assign (lhs, tmp);
589 i = gsi_last (stmts);
590 gsi_insert_after_without_update (&i, new_stmt,
591 GSI_CONTINUE_LINKING);
594 pop_gimplify_context (NULL);
596 gsi_replace_with_seq_vops (si_p, stmts);
600 /* Replace the call at *GSI with the gimple value VAL. */
602 void
603 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
605 gimple *stmt = gsi_stmt (*gsi);
606 tree lhs = gimple_call_lhs (stmt);
607 gimple *repl;
608 if (lhs)
610 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
611 val = fold_convert (TREE_TYPE (lhs), val);
612 repl = gimple_build_assign (lhs, val);
614 else
615 repl = gimple_build_nop ();
616 tree vdef = gimple_vdef (stmt);
617 if (vdef && TREE_CODE (vdef) == SSA_NAME)
619 unlink_stmt_vdef (stmt);
620 release_ssa_name (vdef);
622 gsi_replace (gsi, repl, false);
625 /* Replace the call at *GSI with the new call REPL and fold that
626 again. */
628 static void
629 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
631 gimple *stmt = gsi_stmt (*gsi);
632 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
633 gimple_set_location (repl, gimple_location (stmt));
634 if (gimple_vdef (stmt)
635 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
637 gimple_set_vdef (repl, gimple_vdef (stmt));
638 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
640 if (gimple_vuse (stmt))
641 gimple_set_vuse (repl, gimple_vuse (stmt));
642 gsi_replace (gsi, repl, false);
643 fold_stmt (gsi);
646 /* Return true if VAR is a VAR_DECL or a component thereof. */
648 static bool
649 var_decl_component_p (tree var)
651 tree inner = var;
652 while (handled_component_p (inner))
653 inner = TREE_OPERAND (inner, 0);
654 return (DECL_P (inner)
655 || (TREE_CODE (inner) == MEM_REF
656 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
659 /* Return TRUE if the SIZE argument, representing the size of an
660 object, is in a range of values of which exactly zero is valid. */
662 static bool
663 size_must_be_zero_p (tree size)
665 if (integer_zerop (size))
666 return true;
668 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
669 return false;
671 tree type = TREE_TYPE (size);
672 int prec = TYPE_PRECISION (type);
674 /* Compute the value of SSIZE_MAX, the largest positive value that
675 can be stored in ssize_t, the signed counterpart of size_t. */
676 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
677 value_range valid_range (VR_RANGE,
678 build_int_cst (type, 0),
679 wide_int_to_tree (type, ssize_max));
680 value_range vr;
681 get_range_info (size, vr);
682 vr.intersect (&valid_range);
683 return vr.zero_p ();
686 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
687 diagnose (otherwise undefined) overlapping copies without preventing
688 folding. When folded, GCC guarantees that overlapping memcpy has
689 the same semantics as memmove. Call to the library memcpy need not
690 provide the same guarantee. Return false if no simplification can
691 be made. */
693 static bool
694 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
695 tree dest, tree src, enum built_in_function code)
697 gimple *stmt = gsi_stmt (*gsi);
698 tree lhs = gimple_call_lhs (stmt);
699 tree len = gimple_call_arg (stmt, 2);
700 tree destvar, srcvar;
701 location_t loc = gimple_location (stmt);
703 /* If the LEN parameter is a constant zero or in range where
704 the only valid value is zero, return DEST. */
705 if (size_must_be_zero_p (len))
707 gimple *repl;
708 if (gimple_call_lhs (stmt))
709 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
710 else
711 repl = gimple_build_nop ();
712 tree vdef = gimple_vdef (stmt);
713 if (vdef && TREE_CODE (vdef) == SSA_NAME)
715 unlink_stmt_vdef (stmt);
716 release_ssa_name (vdef);
718 gsi_replace (gsi, repl, false);
719 return true;
722 /* If SRC and DEST are the same (and not volatile), return
723 DEST{,+LEN,+LEN-1}. */
724 if (operand_equal_p (src, dest, 0))
726 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
727 It's safe and may even be emitted by GCC itself (see bug
728 32667). */
729 unlink_stmt_vdef (stmt);
730 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
731 release_ssa_name (gimple_vdef (stmt));
732 if (!lhs)
734 gsi_replace (gsi, gimple_build_nop (), false);
735 return true;
737 goto done;
739 else
741 tree srctype, desttype;
742 unsigned int src_align, dest_align;
743 tree off0;
744 const char *tmp_str;
745 unsigned HOST_WIDE_INT tmp_len;
747 /* Build accesses at offset zero with a ref-all character type. */
748 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
749 ptr_mode, true), 0);
751 /* If we can perform the copy efficiently with first doing all loads
752 and then all stores inline it that way. Currently efficiently
753 means that we can load all the memory into a single integer
754 register which is what MOVE_MAX gives us. */
755 src_align = get_pointer_alignment (src);
756 dest_align = get_pointer_alignment (dest);
757 if (tree_fits_uhwi_p (len)
758 && compare_tree_int (len, MOVE_MAX) <= 0
759 /* ??? Don't transform copies from strings with known length this
760 confuses the tree-ssa-strlen.c. This doesn't handle
761 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
762 reason. */
763 && !c_strlen (src, 2)
764 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
765 && memchr (tmp_str, 0, tmp_len) == NULL))
767 unsigned ilen = tree_to_uhwi (len);
768 if (pow2p_hwi (ilen))
770 /* Detect out-of-bounds accesses without issuing warnings.
771 Avoid folding out-of-bounds copies but to avoid false
772 positives for unreachable code defer warning until after
773 DCE has worked its magic.
774 -Wrestrict is still diagnosed. */
775 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
776 dest, src, len, len,
777 false, false))
778 if (warning != OPT_Wrestrict)
779 return false;
781 scalar_int_mode mode;
782 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
783 if (type
784 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
785 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
786 /* If the destination pointer is not aligned we must be able
787 to emit an unaligned store. */
788 && (dest_align >= GET_MODE_ALIGNMENT (mode)
789 || !targetm.slow_unaligned_access (mode, dest_align)
790 || (optab_handler (movmisalign_optab, mode)
791 != CODE_FOR_nothing)))
793 tree srctype = type;
794 tree desttype = type;
795 if (src_align < GET_MODE_ALIGNMENT (mode))
796 srctype = build_aligned_type (type, src_align);
797 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
798 tree tem = fold_const_aggregate_ref (srcmem);
799 if (tem)
800 srcmem = tem;
801 else if (src_align < GET_MODE_ALIGNMENT (mode)
802 && targetm.slow_unaligned_access (mode, src_align)
803 && (optab_handler (movmisalign_optab, mode)
804 == CODE_FOR_nothing))
805 srcmem = NULL_TREE;
806 if (srcmem)
808 gimple *new_stmt;
809 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
811 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
812 srcmem
813 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
814 new_stmt);
815 gimple_assign_set_lhs (new_stmt, srcmem);
816 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
817 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
819 if (dest_align < GET_MODE_ALIGNMENT (mode))
820 desttype = build_aligned_type (type, dest_align);
821 new_stmt
822 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
823 dest, off0),
824 srcmem);
825 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
826 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
827 if (gimple_vdef (new_stmt)
828 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
829 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
830 if (!lhs)
832 gsi_replace (gsi, new_stmt, false);
833 return true;
835 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
836 goto done;
842 if (code == BUILT_IN_MEMMOVE)
844 /* Both DEST and SRC must be pointer types.
845 ??? This is what old code did. Is the testing for pointer types
846 really mandatory?
848 If either SRC is readonly or length is 1, we can use memcpy. */
849 if (!dest_align || !src_align)
850 return false;
851 if (readonly_data_expr (src)
852 || (tree_fits_uhwi_p (len)
853 && (MIN (src_align, dest_align) / BITS_PER_UNIT
854 >= tree_to_uhwi (len))))
856 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
857 if (!fn)
858 return false;
859 gimple_call_set_fndecl (stmt, fn);
860 gimple_call_set_arg (stmt, 0, dest);
861 gimple_call_set_arg (stmt, 1, src);
862 fold_stmt (gsi);
863 return true;
866 /* If *src and *dest can't overlap, optimize into memcpy as well. */
867 if (TREE_CODE (src) == ADDR_EXPR
868 && TREE_CODE (dest) == ADDR_EXPR)
870 tree src_base, dest_base, fn;
871 poly_int64 src_offset = 0, dest_offset = 0;
872 poly_uint64 maxsize;
874 srcvar = TREE_OPERAND (src, 0);
875 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
876 if (src_base == NULL)
877 src_base = srcvar;
878 destvar = TREE_OPERAND (dest, 0);
879 dest_base = get_addr_base_and_unit_offset (destvar,
880 &dest_offset);
881 if (dest_base == NULL)
882 dest_base = destvar;
883 if (!poly_int_tree_p (len, &maxsize))
884 maxsize = -1;
885 if (SSA_VAR_P (src_base)
886 && SSA_VAR_P (dest_base))
888 if (operand_equal_p (src_base, dest_base, 0)
889 && ranges_maybe_overlap_p (src_offset, maxsize,
890 dest_offset, maxsize))
891 return false;
893 else if (TREE_CODE (src_base) == MEM_REF
894 && TREE_CODE (dest_base) == MEM_REF)
896 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
897 TREE_OPERAND (dest_base, 0), 0))
898 return false;
899 poly_offset_int full_src_offset
900 = mem_ref_offset (src_base) + src_offset;
901 poly_offset_int full_dest_offset
902 = mem_ref_offset (dest_base) + dest_offset;
903 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
904 full_dest_offset, maxsize))
905 return false;
907 else
908 return false;
910 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
911 if (!fn)
912 return false;
913 gimple_call_set_fndecl (stmt, fn);
914 gimple_call_set_arg (stmt, 0, dest);
915 gimple_call_set_arg (stmt, 1, src);
916 fold_stmt (gsi);
917 return true;
920 /* If the destination and source do not alias optimize into
921 memcpy as well. */
922 if ((is_gimple_min_invariant (dest)
923 || TREE_CODE (dest) == SSA_NAME)
924 && (is_gimple_min_invariant (src)
925 || TREE_CODE (src) == SSA_NAME))
927 ao_ref destr, srcr;
928 ao_ref_init_from_ptr_and_size (&destr, dest, len);
929 ao_ref_init_from_ptr_and_size (&srcr, src, len);
930 if (!refs_may_alias_p_1 (&destr, &srcr, false))
932 tree fn;
933 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
934 if (!fn)
935 return false;
936 gimple_call_set_fndecl (stmt, fn);
937 gimple_call_set_arg (stmt, 0, dest);
938 gimple_call_set_arg (stmt, 1, src);
939 fold_stmt (gsi);
940 return true;
944 return false;
947 if (!tree_fits_shwi_p (len))
948 return false;
949 if (!POINTER_TYPE_P (TREE_TYPE (src))
950 || !POINTER_TYPE_P (TREE_TYPE (dest)))
951 return false;
952 /* In the following try to find a type that is most natural to be
953 used for the memcpy source and destination and that allows
954 the most optimization when memcpy is turned into a plain assignment
955 using that type. In theory we could always use a char[len] type
956 but that only gains us that the destination and source possibly
957 no longer will have their address taken. */
958 srctype = TREE_TYPE (TREE_TYPE (src));
959 if (TREE_CODE (srctype) == ARRAY_TYPE
960 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
961 srctype = TREE_TYPE (srctype);
962 desttype = TREE_TYPE (TREE_TYPE (dest));
963 if (TREE_CODE (desttype) == ARRAY_TYPE
964 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
965 desttype = TREE_TYPE (desttype);
966 if (TREE_ADDRESSABLE (srctype)
967 || TREE_ADDRESSABLE (desttype))
968 return false;
970 /* Make sure we are not copying using a floating-point mode or
971 a type whose size possibly does not match its precision. */
972 if (FLOAT_MODE_P (TYPE_MODE (desttype))
973 || TREE_CODE (desttype) == BOOLEAN_TYPE
974 || TREE_CODE (desttype) == ENUMERAL_TYPE)
975 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
976 if (FLOAT_MODE_P (TYPE_MODE (srctype))
977 || TREE_CODE (srctype) == BOOLEAN_TYPE
978 || TREE_CODE (srctype) == ENUMERAL_TYPE)
979 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
980 if (!srctype)
981 srctype = desttype;
982 if (!desttype)
983 desttype = srctype;
984 if (!srctype)
985 return false;
987 src_align = get_pointer_alignment (src);
988 dest_align = get_pointer_alignment (dest);
989 if (dest_align < TYPE_ALIGN (desttype)
990 || src_align < TYPE_ALIGN (srctype))
991 return false;
993 destvar = NULL_TREE;
994 if (TREE_CODE (dest) == ADDR_EXPR
995 && var_decl_component_p (TREE_OPERAND (dest, 0))
996 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
997 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
999 srcvar = NULL_TREE;
1000 if (TREE_CODE (src) == ADDR_EXPR
1001 && var_decl_component_p (TREE_OPERAND (src, 0))
1002 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1004 if (!destvar
1005 || src_align >= TYPE_ALIGN (desttype))
1006 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1007 src, off0);
1008 else if (!STRICT_ALIGNMENT)
1010 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1011 src_align);
1012 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1016 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1017 return false;
1019 if (srcvar == NULL_TREE)
1021 if (src_align >= TYPE_ALIGN (desttype))
1022 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1023 else
1025 if (STRICT_ALIGNMENT)
1026 return false;
1027 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1028 src_align);
1029 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1032 else if (destvar == NULL_TREE)
1034 if (dest_align >= TYPE_ALIGN (srctype))
1035 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1036 else
1038 if (STRICT_ALIGNMENT)
1039 return false;
1040 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1041 dest_align);
1042 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1046 /* Same as above, detect out-of-bounds accesses without issuing
1047 warnings. Avoid folding out-of-bounds copies but to avoid
1048 false positives for unreachable code defer warning until
1049 after DCE has worked its magic.
1050 -Wrestrict is still diagnosed. */
1051 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1052 dest, src, len, len,
1053 false, false))
1054 if (warning != OPT_Wrestrict)
1055 return false;
1057 gimple *new_stmt;
1058 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1060 tree tem = fold_const_aggregate_ref (srcvar);
1061 if (tem)
1062 srcvar = tem;
1063 if (! is_gimple_min_invariant (srcvar))
1065 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1066 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1067 new_stmt);
1068 gimple_assign_set_lhs (new_stmt, srcvar);
1069 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1070 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1072 new_stmt = gimple_build_assign (destvar, srcvar);
1073 goto set_vop_and_replace;
1076 /* We get an aggregate copy. Use an unsigned char[] type to
1077 perform the copying to preserve padding and to avoid any issues
1078 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1079 desttype = build_array_type_nelts (unsigned_char_type_node,
1080 tree_to_uhwi (len));
1081 srctype = desttype;
1082 if (src_align > TYPE_ALIGN (srctype))
1083 srctype = build_aligned_type (srctype, src_align);
1084 if (dest_align > TYPE_ALIGN (desttype))
1085 desttype = build_aligned_type (desttype, dest_align);
1086 new_stmt
1087 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1088 fold_build2 (MEM_REF, srctype, src, off0));
1089 set_vop_and_replace:
1090 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1091 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1092 if (gimple_vdef (new_stmt)
1093 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1094 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1095 if (!lhs)
1097 gsi_replace (gsi, new_stmt, false);
1098 return true;
1100 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1103 done:
1104 gimple_seq stmts = NULL;
1105 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1106 len = NULL_TREE;
1107 else if (code == BUILT_IN_MEMPCPY)
1109 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1110 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1111 TREE_TYPE (dest), dest, len);
1113 else
1114 gcc_unreachable ();
1116 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1117 gimple *repl = gimple_build_assign (lhs, dest);
1118 gsi_replace (gsi, repl, false);
1119 return true;
1122 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1123 to built-in memcmp (a, b, len). */
1125 static bool
1126 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1128 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1130 if (!fn)
1131 return false;
1133 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1135 gimple *stmt = gsi_stmt (*gsi);
1136 tree a = gimple_call_arg (stmt, 0);
1137 tree b = gimple_call_arg (stmt, 1);
1138 tree len = gimple_call_arg (stmt, 2);
1140 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1141 replace_call_with_call_and_fold (gsi, repl);
1143 return true;
1146 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1147 to built-in memmove (dest, src, len). */
1149 static bool
1150 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1152 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1154 if (!fn)
1155 return false;
1157 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1158 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1159 len) into memmove (dest, src, len). */
1161 gimple *stmt = gsi_stmt (*gsi);
1162 tree src = gimple_call_arg (stmt, 0);
1163 tree dest = gimple_call_arg (stmt, 1);
1164 tree len = gimple_call_arg (stmt, 2);
1166 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1167 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1168 replace_call_with_call_and_fold (gsi, repl);
1170 return true;
1173 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1174 to built-in memset (dest, 0, len). */
1176 static bool
1177 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1179 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1181 if (!fn)
1182 return false;
1184 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1186 gimple *stmt = gsi_stmt (*gsi);
1187 tree dest = gimple_call_arg (stmt, 0);
1188 tree len = gimple_call_arg (stmt, 1);
1190 gimple_seq seq = NULL;
1191 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1192 gimple_seq_add_stmt_without_update (&seq, repl);
1193 gsi_replace_with_seq_vops (gsi, seq);
1194 fold_stmt (gsi);
1196 return true;
1199 /* Fold function call to builtin memset or bzero at *GSI setting the
1200 memory of size LEN to VAL. Return whether a simplification was made. */
1202 static bool
1203 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1205 gimple *stmt = gsi_stmt (*gsi);
1206 tree etype;
1207 unsigned HOST_WIDE_INT length, cval;
1209 /* If the LEN parameter is zero, return DEST. */
1210 if (integer_zerop (len))
1212 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1213 return true;
1216 if (! tree_fits_uhwi_p (len))
1217 return false;
1219 if (TREE_CODE (c) != INTEGER_CST)
1220 return false;
1222 tree dest = gimple_call_arg (stmt, 0);
1223 tree var = dest;
1224 if (TREE_CODE (var) != ADDR_EXPR)
1225 return false;
1227 var = TREE_OPERAND (var, 0);
1228 if (TREE_THIS_VOLATILE (var))
1229 return false;
1231 etype = TREE_TYPE (var);
1232 if (TREE_CODE (etype) == ARRAY_TYPE)
1233 etype = TREE_TYPE (etype);
1235 if (!INTEGRAL_TYPE_P (etype)
1236 && !POINTER_TYPE_P (etype))
1237 return NULL_TREE;
1239 if (! var_decl_component_p (var))
1240 return NULL_TREE;
1242 length = tree_to_uhwi (len);
1243 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1244 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1245 return NULL_TREE;
1247 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1248 return NULL_TREE;
1250 if (integer_zerop (c))
1251 cval = 0;
1252 else
1254 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1255 return NULL_TREE;
1257 cval = TREE_INT_CST_LOW (c);
1258 cval &= 0xff;
1259 cval |= cval << 8;
1260 cval |= cval << 16;
1261 cval |= (cval << 31) << 1;
1264 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1265 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1266 gimple_set_vuse (store, gimple_vuse (stmt));
1267 tree vdef = gimple_vdef (stmt);
1268 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1270 gimple_set_vdef (store, gimple_vdef (stmt));
1271 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1273 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1274 if (gimple_call_lhs (stmt))
1276 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1277 gsi_replace (gsi, asgn, false);
1279 else
1281 gimple_stmt_iterator gsi2 = *gsi;
1282 gsi_prev (gsi);
1283 gsi_remove (&gsi2, true);
1286 return true;
1289 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1291 static bool
1292 get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1293 c_strlen_data *pdata, unsigned eltsize)
1295 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1297 /* The length computed by this invocation of the function. */
1298 tree val = NULL_TREE;
1300 /* True if VAL is an optimistic (tight) bound determined from
1301 the size of the character array in which the string may be
1302 stored. In that case, the computed VAL is used to set
1303 PDATA->MAXBOUND. */
1304 bool tight_bound = false;
1306 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1307 if (TREE_CODE (arg) == ADDR_EXPR
1308 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1310 tree op = TREE_OPERAND (arg, 0);
1311 if (integer_zerop (TREE_OPERAND (op, 1)))
1313 tree aop0 = TREE_OPERAND (op, 0);
1314 if (TREE_CODE (aop0) == INDIRECT_REF
1315 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1316 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1317 pdata, eltsize);
1319 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1320 && rkind == SRK_LENRANGE)
1322 /* Fail if an array is the last member of a struct object
1323 since it could be treated as a (fake) flexible array
1324 member. */
1325 tree idx = TREE_OPERAND (op, 1);
1327 arg = TREE_OPERAND (op, 0);
1328 tree optype = TREE_TYPE (arg);
1329 if (tree dom = TYPE_DOMAIN (optype))
1330 if (tree bound = TYPE_MAX_VALUE (dom))
1331 if (TREE_CODE (bound) == INTEGER_CST
1332 && TREE_CODE (idx) == INTEGER_CST
1333 && tree_int_cst_lt (bound, idx))
1334 return false;
1338 if (rkind == SRK_INT_VALUE)
1340 /* We are computing the maximum value (not string length). */
1341 val = arg;
1342 if (TREE_CODE (val) != INTEGER_CST
1343 || tree_int_cst_sgn (val) < 0)
1344 return false;
1346 else
1348 c_strlen_data lendata = { };
1349 val = c_strlen (arg, 1, &lendata, eltsize);
1351 if (!val && lendata.decl)
1353 /* ARG refers to an unterminated const character array.
1354 DATA.DECL with size DATA.LEN. */
1355 val = lendata.minlen;
1356 pdata->decl = lendata.decl;
1360 if (!val && rkind == SRK_LENRANGE)
1362 if (TREE_CODE (arg) == ADDR_EXPR)
1363 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1364 pdata, eltsize);
1366 if (TREE_CODE (arg) == ARRAY_REF)
1368 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1370 /* Determine the "innermost" array type. */
1371 while (TREE_CODE (optype) == ARRAY_TYPE
1372 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1373 optype = TREE_TYPE (optype);
1375 /* Avoid arrays of pointers. */
1376 tree eltype = TREE_TYPE (optype);
1377 if (TREE_CODE (optype) != ARRAY_TYPE
1378 || !INTEGRAL_TYPE_P (eltype))
1379 return false;
1381 /* Fail when the array bound is unknown or zero. */
1382 val = TYPE_SIZE_UNIT (optype);
1383 if (!val || integer_zerop (val))
1384 return false;
1386 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1387 integer_one_node);
1389 /* Set the minimum size to zero since the string in
1390 the array could have zero length. */
1391 pdata->minlen = ssize_int (0);
1393 tight_bound = true;
1395 else if (TREE_CODE (arg) == COMPONENT_REF
1396 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1397 == ARRAY_TYPE))
1399 /* Use the type of the member array to determine the upper
1400 bound on the length of the array. This may be overly
1401 optimistic if the array itself isn't NUL-terminated and
1402 the caller relies on the subsequent member to contain
1403 the NUL but that would only be considered valid if
1404 the array were the last member of a struct. */
1406 tree fld = TREE_OPERAND (arg, 1);
1408 tree optype = TREE_TYPE (fld);
1410 /* Determine the "innermost" array type. */
1411 while (TREE_CODE (optype) == ARRAY_TYPE
1412 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1413 optype = TREE_TYPE (optype);
1415 /* Fail when the array bound is unknown or zero. */
1416 val = TYPE_SIZE_UNIT (optype);
1417 if (!val || integer_zerop (val))
1418 return false;
1419 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1420 integer_one_node);
1422 /* Set the minimum size to zero since the string in
1423 the array could have zero length. */
1424 pdata->minlen = ssize_int (0);
1426 /* The array size determined above is an optimistic bound
1427 on the length. If the array isn't nul-terminated the
1428 length computed by the library function would be greater.
1429 Even though using strlen to cross the subobject boundary
1430 is undefined, avoid drawing conclusions from the member
1431 type about the length here. */
1432 tight_bound = true;
1434 else if (VAR_P (arg))
1436 /* Avoid handling pointers to arrays. GCC might misuse
1437 a pointer to an array of one bound to point to an array
1438 object of a greater bound. */
1439 tree argtype = TREE_TYPE (arg);
1440 if (TREE_CODE (argtype) == ARRAY_TYPE)
1442 val = TYPE_SIZE_UNIT (argtype);
1443 if (!val
1444 || TREE_CODE (val) != INTEGER_CST
1445 || integer_zerop (val))
1446 return false;
1447 val = wide_int_to_tree (TREE_TYPE (val),
1448 wi::sub (wi::to_wide (val), 1));
1450 /* Set the minimum size to zero since the string in
1451 the array could have zero length. */
1452 pdata->minlen = ssize_int (0);
1457 if (!val)
1458 return false;
1460 /* Adjust the lower bound on the string length as necessary. */
1461 if (!pdata->minlen
1462 || (rkind != SRK_STRLEN
1463 && TREE_CODE (pdata->minlen) == INTEGER_CST
1464 && TREE_CODE (val) == INTEGER_CST
1465 && tree_int_cst_lt (val, pdata->minlen)))
1466 pdata->minlen = val;
1468 if (pdata->maxbound)
1470 /* Adjust the tighter (more optimistic) string length bound
1471 if necessary and proceed to adjust the more conservative
1472 bound. */
1473 if (TREE_CODE (val) == INTEGER_CST)
1475 if (TREE_CODE (pdata->maxbound) == INTEGER_CST)
1477 if (tree_int_cst_lt (pdata->maxbound, val))
1478 pdata->maxbound = val;
1480 else
1481 pdata->maxbound = build_all_ones_cst (size_type_node);
1483 else
1484 pdata->maxbound = val;
1486 else
1487 pdata->maxbound = val;
1489 if (tight_bound)
1491 /* VAL computed above represents an optimistically tight bound
1492 on the length of the string based on the referenced object's
1493 or subobject's type. Determine the conservative upper bound
1494 based on the enclosing object's size if possible. */
1495 if (rkind == SRK_LENRANGE)
1497 poly_int64 offset;
1498 tree base = get_addr_base_and_unit_offset (arg, &offset);
1499 if (!base)
1501 /* When the call above fails due to a non-constant offset
1502 assume the offset is zero and use the size of the whole
1503 enclosing object instead. */
1504 base = get_base_address (arg);
1505 offset = 0;
1507 /* If the base object is a pointer no upper bound on the length
1508 can be determined. Otherwise the maximum length is equal to
1509 the size of the enclosing object minus the offset of
1510 the referenced subobject minus 1 (for the terminating nul). */
1511 tree type = TREE_TYPE (base);
1512 if (TREE_CODE (type) == POINTER_TYPE
1513 || !VAR_P (base) || !(val = DECL_SIZE_UNIT (base)))
1514 val = build_all_ones_cst (size_type_node);
1515 else
1517 val = DECL_SIZE_UNIT (base);
1518 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1519 size_int (offset + 1));
1522 else
1523 return false;
1526 if (pdata->maxlen)
1528 /* Adjust the more conservative bound if possible/necessary
1529 and fail otherwise. */
1530 if (rkind != SRK_STRLEN)
1532 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1533 || TREE_CODE (val) != INTEGER_CST)
1534 return false;
1536 if (tree_int_cst_lt (pdata->maxlen, val))
1537 pdata->maxlen = val;
1538 return true;
1540 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1542 /* Fail if the length of this ARG is different from that
1543 previously determined from another ARG. */
1544 return false;
1548 pdata->maxlen = val;
1549 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1552 /* For an ARG referencing one or more strings, try to obtain the range
1553 of their lengths, or the size of the largest array ARG referes to if
1554 the range of lengths cannot be determined, and store all in *PDATA.
1555 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1556 the maximum constant value.
1557 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1558 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1559 length or if we are unable to determine the length, return false.
1560 VISITED is a bitmap of visited variables.
1561 RKIND determines the kind of value or range to obtain (see
1562 strlen_range_kind).
1563 Set PDATA->DECL if ARG refers to an unterminated constant array.
1564 On input, set ELTSIZE to 1 for normal single byte character strings,
1565 and either 2 or 4 for wide characer strings (the size of wchar_t).
1566 Return true if *PDATA was successfully populated and false otherwise. */
1568 static bool
1569 get_range_strlen (tree arg, bitmap *visited,
1570 strlen_range_kind rkind,
1571 c_strlen_data *pdata, unsigned eltsize)
1574 if (TREE_CODE (arg) != SSA_NAME)
1575 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1577 /* If ARG is registered for SSA update we cannot look at its defining
1578 statement. */
1579 if (name_registered_for_update_p (arg))
1580 return false;
1582 /* If we were already here, break the infinite cycle. */
1583 if (!*visited)
1584 *visited = BITMAP_ALLOC (NULL);
1585 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1586 return true;
1588 tree var = arg;
1589 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1591 switch (gimple_code (def_stmt))
1593 case GIMPLE_ASSIGN:
1594 /* The RHS of the statement defining VAR must either have a
1595 constant length or come from another SSA_NAME with a constant
1596 length. */
1597 if (gimple_assign_single_p (def_stmt)
1598 || gimple_assign_unary_nop_p (def_stmt))
1600 tree rhs = gimple_assign_rhs1 (def_stmt);
1601 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1603 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1605 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1606 gimple_assign_rhs3 (def_stmt) };
1608 for (unsigned int i = 0; i < 2; i++)
1609 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1611 if (rkind != SRK_LENRANGE)
1612 return false;
1613 /* Set the upper bound to the maximum to prevent
1614 it from being adjusted in the next iteration but
1615 leave MINLEN and the more conservative MAXBOUND
1616 determined so far alone (or leave them null if
1617 they haven't been set yet). That the MINLEN is
1618 in fact zero can be determined from MAXLEN being
1619 unbounded but the discovered minimum is used for
1620 diagnostics. */
1621 pdata->maxlen = build_all_ones_cst (size_type_node);
1623 return true;
1625 return false;
1627 case GIMPLE_PHI:
1628 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1629 must have a constant length. */
1630 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1632 tree arg = gimple_phi_arg (def_stmt, i)->def;
1634 /* If this PHI has itself as an argument, we cannot
1635 determine the string length of this argument. However,
1636 if we can find a constant string length for the other
1637 PHI args then we can still be sure that this is a
1638 constant string length. So be optimistic and just
1639 continue with the next argument. */
1640 if (arg == gimple_phi_result (def_stmt))
1641 continue;
1643 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1645 if (rkind != SRK_LENRANGE)
1646 return false;
1647 /* Set the upper bound to the maximum to prevent
1648 it from being adjusted in the next iteration but
1649 leave MINLEN and the more conservative MAXBOUND
1650 determined so far alone (or leave them null if
1651 they haven't been set yet). That the MINLEN is
1652 in fact zero can be determined from MAXLEN being
1653 unbounded but the discovered minimum is used for
1654 diagnostics. */
1655 pdata->maxlen = build_all_ones_cst (size_type_node);
1658 return true;
1660 default:
1661 return false;
1665 /* Determine the minimum and maximum value or string length that ARG
1666 refers to and store each in the first two elements of MINMAXLEN.
1667 For expressions that point to strings of unknown lengths that are
1668 character arrays, use the upper bound of the array as the maximum
1669 length. For example, given an expression like 'x ? array : "xyz"'
1670 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1671 to 0 and MINMAXLEN[1] to 7, the longest string that could be
1672 stored in array.
1673 Return true if the range of the string lengths has been obtained
1674 from the upper bound of an array at the end of a struct. Such
1675 an array may hold a string that's longer than its upper bound
1676 due to it being used as a poor-man's flexible array member.
1678 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1679 and false if PHIs and COND_EXPRs are to be handled optimistically,
1680 if we can determine string length minimum and maximum; it will use
1681 the minimum from the ones where it can be determined.
1682 STRICT false should be only used for warning code.
1683 When non-null, clear *NONSTR if ARG refers to a constant array
1684 that is known not be nul-terminated. Otherwise set it to
1685 the declaration of the constant non-terminated array.
1687 ELTSIZE is 1 for normal single byte character strings, and 2 or
1688 4 for wide characer strings. ELTSIZE is by default 1. */
1690 bool
1691 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1693 bitmap visited = NULL;
1695 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
1697 /* On failure extend the length range to an impossible maximum
1698 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1699 members can stay unchanged regardless. */
1700 pdata->minlen = ssize_int (0);
1701 pdata->maxlen = build_all_ones_cst (size_type_node);
1703 else if (!pdata->minlen)
1704 pdata->minlen = ssize_int (0);
1706 /* Unless its null, leave the more conservative MAXBOUND unchanged. */
1707 if (!pdata->maxbound)
1708 pdata->maxbound = pdata->maxlen;
1710 if (visited)
1711 BITMAP_FREE (visited);
1713 return !integer_all_onesp (pdata->maxlen);
1716 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1717 For ARG of pointer types, NONSTR indicates if the caller is prepared
1718 to handle unterminated strings. For integer ARG and when RKIND ==
1719 SRK_INT_VALUE, NONSTR must be null.
1721 If an unterminated array is discovered and our caller handles
1722 unterminated arrays, then bubble up the offending DECL and
1723 return the maximum size. Otherwise return NULL. */
1725 static tree
1726 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1728 /* A non-null NONSTR is meaningless when determining the maximum
1729 value of an integer ARG. */
1730 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1731 /* ARG must have an integral type when RKIND says so. */
1732 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1734 bitmap visited = NULL;
1736 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1737 is unbounded. */
1738 c_strlen_data lendata = { };
1739 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
1740 lendata.maxlen = NULL_TREE;
1741 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1742 lendata.maxlen = NULL_TREE;
1744 if (visited)
1745 BITMAP_FREE (visited);
1747 if (nonstr)
1749 /* For callers prepared to handle unterminated arrays set
1750 *NONSTR to point to the declaration of the array and return
1751 the maximum length/size. */
1752 *nonstr = lendata.decl;
1753 return lendata.maxlen;
1756 /* Fail if the constant array isn't nul-terminated. */
1757 return lendata.decl ? NULL_TREE : lendata.maxlen;
1761 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1762 If LEN is not NULL, it represents the length of the string to be
1763 copied. Return NULL_TREE if no simplification can be made. */
1765 static bool
1766 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1767 tree dest, tree src)
1769 gimple *stmt = gsi_stmt (*gsi);
1770 location_t loc = gimple_location (stmt);
1771 tree fn;
1773 /* If SRC and DEST are the same (and not volatile), return DEST. */
1774 if (operand_equal_p (src, dest, 0))
1776 /* Issue -Wrestrict unless the pointers are null (those do
1777 not point to objects and so do not indicate an overlap;
1778 such calls could be the result of sanitization and jump
1779 threading). */
1780 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1782 tree func = gimple_call_fndecl (stmt);
1784 warning_at (loc, OPT_Wrestrict,
1785 "%qD source argument is the same as destination",
1786 func);
1789 replace_call_with_value (gsi, dest);
1790 return true;
1793 if (optimize_function_for_size_p (cfun))
1794 return false;
1796 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1797 if (!fn)
1798 return false;
1800 /* Set to non-null if ARG refers to an unterminated array. */
1801 tree nonstr = NULL;
1802 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
1804 if (nonstr)
1806 /* Avoid folding calls with unterminated arrays. */
1807 if (!gimple_no_warning_p (stmt))
1808 warn_string_no_nul (loc, "strcpy", src, nonstr);
1809 gimple_set_no_warning (stmt, true);
1810 return false;
1813 if (!len)
1814 return false;
1816 len = fold_convert_loc (loc, size_type_node, len);
1817 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1818 len = force_gimple_operand_gsi (gsi, len, true,
1819 NULL_TREE, true, GSI_SAME_STMT);
1820 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1821 replace_call_with_call_and_fold (gsi, repl);
1822 return true;
1825 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1826 If SLEN is not NULL, it represents the length of the source string.
1827 Return NULL_TREE if no simplification can be made. */
1829 static bool
1830 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1831 tree dest, tree src, tree len)
1833 gimple *stmt = gsi_stmt (*gsi);
1834 location_t loc = gimple_location (stmt);
1835 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1837 /* If the LEN parameter is zero, return DEST. */
1838 if (integer_zerop (len))
1840 /* Avoid warning if the destination refers to a an array/pointer
1841 decorate with attribute nonstring. */
1842 if (!nonstring)
1844 tree fndecl = gimple_call_fndecl (stmt);
1846 /* Warn about the lack of nul termination: the result is not
1847 a (nul-terminated) string. */
1848 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1849 if (slen && !integer_zerop (slen))
1850 warning_at (loc, OPT_Wstringop_truncation,
1851 "%G%qD destination unchanged after copying no bytes "
1852 "from a string of length %E",
1853 stmt, fndecl, slen);
1854 else
1855 warning_at (loc, OPT_Wstringop_truncation,
1856 "%G%qD destination unchanged after copying no bytes",
1857 stmt, fndecl);
1860 replace_call_with_value (gsi, dest);
1861 return true;
1864 /* We can't compare slen with len as constants below if len is not a
1865 constant. */
1866 if (TREE_CODE (len) != INTEGER_CST)
1867 return false;
1869 /* Now, we must be passed a constant src ptr parameter. */
1870 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1871 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1872 return false;
1874 /* The size of the source string including the terminating nul. */
1875 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1877 /* We do not support simplification of this case, though we do
1878 support it when expanding trees into RTL. */
1879 /* FIXME: generate a call to __builtin_memset. */
1880 if (tree_int_cst_lt (ssize, len))
1881 return false;
1883 /* Diagnose truncation that leaves the copy unterminated. */
1884 maybe_diag_stxncpy_trunc (*gsi, src, len);
1886 /* OK transform into builtin memcpy. */
1887 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1888 if (!fn)
1889 return false;
1891 len = fold_convert_loc (loc, size_type_node, len);
1892 len = force_gimple_operand_gsi (gsi, len, true,
1893 NULL_TREE, true, GSI_SAME_STMT);
1894 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1895 replace_call_with_call_and_fold (gsi, repl);
1897 return true;
1900 /* Fold function call to builtin strchr or strrchr.
1901 If both arguments are constant, evaluate and fold the result,
1902 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1903 In general strlen is significantly faster than strchr
1904 due to being a simpler operation. */
1905 static bool
1906 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1908 gimple *stmt = gsi_stmt (*gsi);
1909 tree str = gimple_call_arg (stmt, 0);
1910 tree c = gimple_call_arg (stmt, 1);
1911 location_t loc = gimple_location (stmt);
1912 const char *p;
1913 char ch;
1915 if (!gimple_call_lhs (stmt))
1916 return false;
1918 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1920 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1922 if (p1 == NULL)
1924 replace_call_with_value (gsi, integer_zero_node);
1925 return true;
1928 tree len = build_int_cst (size_type_node, p1 - p);
1929 gimple_seq stmts = NULL;
1930 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1931 POINTER_PLUS_EXPR, str, len);
1932 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1933 gsi_replace_with_seq_vops (gsi, stmts);
1934 return true;
1937 if (!integer_zerop (c))
1938 return false;
1940 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1941 if (is_strrchr && optimize_function_for_size_p (cfun))
1943 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1945 if (strchr_fn)
1947 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1948 replace_call_with_call_and_fold (gsi, repl);
1949 return true;
1952 return false;
1955 tree len;
1956 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1958 if (!strlen_fn)
1959 return false;
1961 /* Create newstr = strlen (str). */
1962 gimple_seq stmts = NULL;
1963 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1964 gimple_set_location (new_stmt, loc);
1965 len = create_tmp_reg_or_ssa_name (size_type_node);
1966 gimple_call_set_lhs (new_stmt, len);
1967 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1969 /* Create (str p+ strlen (str)). */
1970 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1971 POINTER_PLUS_EXPR, str, len);
1972 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1973 gsi_replace_with_seq_vops (gsi, stmts);
1974 /* gsi now points at the assignment to the lhs, get a
1975 stmt iterator to the strlen.
1976 ??? We can't use gsi_for_stmt as that doesn't work when the
1977 CFG isn't built yet. */
1978 gimple_stmt_iterator gsi2 = *gsi;
1979 gsi_prev (&gsi2);
1980 fold_stmt (&gsi2);
1981 return true;
1984 /* Fold function call to builtin strstr.
1985 If both arguments are constant, evaluate and fold the result,
1986 additionally fold strstr (x, "") into x and strstr (x, "c")
1987 into strchr (x, 'c'). */
1988 static bool
1989 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1991 gimple *stmt = gsi_stmt (*gsi);
1992 tree haystack = gimple_call_arg (stmt, 0);
1993 tree needle = gimple_call_arg (stmt, 1);
1994 const char *p, *q;
1996 if (!gimple_call_lhs (stmt))
1997 return false;
1999 q = c_getstr (needle);
2000 if (q == NULL)
2001 return false;
2003 if ((p = c_getstr (haystack)))
2005 const char *r = strstr (p, q);
2007 if (r == NULL)
2009 replace_call_with_value (gsi, integer_zero_node);
2010 return true;
2013 tree len = build_int_cst (size_type_node, r - p);
2014 gimple_seq stmts = NULL;
2015 gimple *new_stmt
2016 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2017 haystack, len);
2018 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2019 gsi_replace_with_seq_vops (gsi, stmts);
2020 return true;
2023 /* For strstr (x, "") return x. */
2024 if (q[0] == '\0')
2026 replace_call_with_value (gsi, haystack);
2027 return true;
2030 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2031 if (q[1] == '\0')
2033 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2034 if (strchr_fn)
2036 tree c = build_int_cst (integer_type_node, q[0]);
2037 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2038 replace_call_with_call_and_fold (gsi, repl);
2039 return true;
2043 return false;
2046 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2047 to the call.
2049 Return NULL_TREE if no simplification was possible, otherwise return the
2050 simplified form of the call as a tree.
2052 The simplified form may be a constant or other expression which
2053 computes the same value, but in a more efficient manner (including
2054 calls to other builtin functions).
2056 The call may contain arguments which need to be evaluated, but
2057 which are not useful to determine the result of the call. In
2058 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2059 COMPOUND_EXPR will be an argument which must be evaluated.
2060 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2061 COMPOUND_EXPR in the chain will contain the tree for the simplified
2062 form of the builtin function call. */
2064 static bool
2065 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2067 gimple *stmt = gsi_stmt (*gsi);
2068 location_t loc = gimple_location (stmt);
2070 const char *p = c_getstr (src);
2072 /* If the string length is zero, return the dst parameter. */
2073 if (p && *p == '\0')
2075 replace_call_with_value (gsi, dst);
2076 return true;
2079 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2080 return false;
2082 /* See if we can store by pieces into (dst + strlen(dst)). */
2083 tree newdst;
2084 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2085 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2087 if (!strlen_fn || !memcpy_fn)
2088 return false;
2090 /* If the length of the source string isn't computable don't
2091 split strcat into strlen and memcpy. */
2092 tree len = get_maxval_strlen (src, SRK_STRLEN);
2093 if (! len)
2094 return false;
2096 /* Create strlen (dst). */
2097 gimple_seq stmts = NULL, stmts2;
2098 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2099 gimple_set_location (repl, loc);
2100 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2101 gimple_call_set_lhs (repl, newdst);
2102 gimple_seq_add_stmt_without_update (&stmts, repl);
2104 /* Create (dst p+ strlen (dst)). */
2105 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2106 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2107 gimple_seq_add_seq_without_update (&stmts, stmts2);
2109 len = fold_convert_loc (loc, size_type_node, len);
2110 len = size_binop_loc (loc, PLUS_EXPR, len,
2111 build_int_cst (size_type_node, 1));
2112 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2113 gimple_seq_add_seq_without_update (&stmts, stmts2);
2115 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2116 gimple_seq_add_stmt_without_update (&stmts, repl);
2117 if (gimple_call_lhs (stmt))
2119 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2120 gimple_seq_add_stmt_without_update (&stmts, repl);
2121 gsi_replace_with_seq_vops (gsi, stmts);
2122 /* gsi now points at the assignment to the lhs, get a
2123 stmt iterator to the memcpy call.
2124 ??? We can't use gsi_for_stmt as that doesn't work when the
2125 CFG isn't built yet. */
2126 gimple_stmt_iterator gsi2 = *gsi;
2127 gsi_prev (&gsi2);
2128 fold_stmt (&gsi2);
2130 else
2132 gsi_replace_with_seq_vops (gsi, stmts);
2133 fold_stmt (gsi);
2135 return true;
2138 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2139 are the arguments to the call. */
2141 static bool
2142 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2144 gimple *stmt = gsi_stmt (*gsi);
2145 tree dest = gimple_call_arg (stmt, 0);
2146 tree src = gimple_call_arg (stmt, 1);
2147 tree size = gimple_call_arg (stmt, 2);
2148 tree fn;
2149 const char *p;
2152 p = c_getstr (src);
2153 /* If the SRC parameter is "", return DEST. */
2154 if (p && *p == '\0')
2156 replace_call_with_value (gsi, dest);
2157 return true;
2160 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2161 return false;
2163 /* If __builtin_strcat_chk is used, assume strcat is available. */
2164 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2165 if (!fn)
2166 return false;
2168 gimple *repl = gimple_build_call (fn, 2, dest, src);
2169 replace_call_with_call_and_fold (gsi, repl);
2170 return true;
2173 /* Simplify a call to the strncat builtin. */
2175 static bool
2176 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2178 gimple *stmt = gsi_stmt (*gsi);
2179 tree dst = gimple_call_arg (stmt, 0);
2180 tree src = gimple_call_arg (stmt, 1);
2181 tree len = gimple_call_arg (stmt, 2);
2183 const char *p = c_getstr (src);
2185 /* If the requested length is zero, or the src parameter string
2186 length is zero, return the dst parameter. */
2187 if (integer_zerop (len) || (p && *p == '\0'))
2189 replace_call_with_value (gsi, dst);
2190 return true;
2193 if (TREE_CODE (len) != INTEGER_CST || !p)
2194 return false;
2196 unsigned srclen = strlen (p);
2198 int cmpsrc = compare_tree_int (len, srclen);
2200 /* Return early if the requested len is less than the string length.
2201 Warnings will be issued elsewhere later. */
2202 if (cmpsrc < 0)
2203 return false;
2205 unsigned HOST_WIDE_INT dstsize;
2207 bool nowarn = gimple_no_warning_p (stmt);
2209 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2211 int cmpdst = compare_tree_int (len, dstsize);
2213 if (cmpdst >= 0)
2215 tree fndecl = gimple_call_fndecl (stmt);
2217 /* Strncat copies (at most) LEN bytes and always appends
2218 the terminating NUL so the specified bound should never
2219 be equal to (or greater than) the size of the destination.
2220 If it is, the copy could overflow. */
2221 location_t loc = gimple_location (stmt);
2222 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2223 cmpdst == 0
2224 ? G_("%G%qD specified bound %E equals "
2225 "destination size")
2226 : G_("%G%qD specified bound %E exceeds "
2227 "destination size %wu"),
2228 stmt, fndecl, len, dstsize);
2229 if (nowarn)
2230 gimple_set_no_warning (stmt, true);
2234 if (!nowarn && cmpsrc == 0)
2236 tree fndecl = gimple_call_fndecl (stmt);
2237 location_t loc = gimple_location (stmt);
2239 /* To avoid possible overflow the specified bound should also
2240 not be equal to the length of the source, even when the size
2241 of the destination is unknown (it's not an uncommon mistake
2242 to specify as the bound to strncpy the length of the source). */
2243 if (warning_at (loc, OPT_Wstringop_overflow_,
2244 "%G%qD specified bound %E equals source length",
2245 stmt, fndecl, len))
2246 gimple_set_no_warning (stmt, true);
2249 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2251 /* If the replacement _DECL isn't initialized, don't do the
2252 transformation. */
2253 if (!fn)
2254 return false;
2256 /* Otherwise, emit a call to strcat. */
2257 gcall *repl = gimple_build_call (fn, 2, dst, src);
2258 replace_call_with_call_and_fold (gsi, repl);
2259 return true;
2262 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2263 LEN, and SIZE. */
2265 static bool
2266 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2268 gimple *stmt = gsi_stmt (*gsi);
2269 tree dest = gimple_call_arg (stmt, 0);
2270 tree src = gimple_call_arg (stmt, 1);
2271 tree len = gimple_call_arg (stmt, 2);
2272 tree size = gimple_call_arg (stmt, 3);
2273 tree fn;
2274 const char *p;
2276 p = c_getstr (src);
2277 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2278 if ((p && *p == '\0')
2279 || integer_zerop (len))
2281 replace_call_with_value (gsi, dest);
2282 return true;
2285 if (! tree_fits_uhwi_p (size))
2286 return false;
2288 if (! integer_all_onesp (size))
2290 tree src_len = c_strlen (src, 1);
2291 if (src_len
2292 && tree_fits_uhwi_p (src_len)
2293 && tree_fits_uhwi_p (len)
2294 && ! tree_int_cst_lt (len, src_len))
2296 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2297 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2298 if (!fn)
2299 return false;
2301 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2302 replace_call_with_call_and_fold (gsi, repl);
2303 return true;
2305 return false;
2308 /* If __builtin_strncat_chk is used, assume strncat is available. */
2309 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2310 if (!fn)
2311 return false;
2313 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2314 replace_call_with_call_and_fold (gsi, repl);
2315 return true;
2318 /* Build and append gimple statements to STMTS that would load a first
2319 character of a memory location identified by STR. LOC is location
2320 of the statement. */
2322 static tree
2323 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2325 tree var;
2327 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2328 tree cst_uchar_ptr_node
2329 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2330 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2332 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2333 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2334 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2336 gimple_assign_set_lhs (stmt, var);
2337 gimple_seq_add_stmt_without_update (stmts, stmt);
2339 return var;
2342 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2343 FCODE is the name of the builtin. */
2345 static bool
2346 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2348 gimple *stmt = gsi_stmt (*gsi);
2349 tree callee = gimple_call_fndecl (stmt);
2350 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2352 tree type = integer_type_node;
2353 tree str1 = gimple_call_arg (stmt, 0);
2354 tree str2 = gimple_call_arg (stmt, 1);
2355 tree lhs = gimple_call_lhs (stmt);
2356 HOST_WIDE_INT length = -1;
2358 /* Handle strncmp and strncasecmp functions. */
2359 if (gimple_call_num_args (stmt) == 3)
2361 tree len = gimple_call_arg (stmt, 2);
2362 if (tree_fits_uhwi_p (len))
2363 length = tree_to_uhwi (len);
2366 /* If the LEN parameter is zero, return zero. */
2367 if (length == 0)
2369 replace_call_with_value (gsi, integer_zero_node);
2370 return true;
2373 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2374 if (operand_equal_p (str1, str2, 0))
2376 replace_call_with_value (gsi, integer_zero_node);
2377 return true;
2380 const char *p1 = c_getstr (str1);
2381 const char *p2 = c_getstr (str2);
2383 /* For known strings, return an immediate value. */
2384 if (p1 && p2)
2386 int r = 0;
2387 bool known_result = false;
2389 switch (fcode)
2391 case BUILT_IN_STRCMP:
2392 case BUILT_IN_STRCMP_EQ:
2394 r = strcmp (p1, p2);
2395 known_result = true;
2396 break;
2398 case BUILT_IN_STRNCMP:
2399 case BUILT_IN_STRNCMP_EQ:
2401 if (length == -1)
2402 break;
2403 r = strncmp (p1, p2, length);
2404 known_result = true;
2405 break;
2407 /* Only handleable situation is where the string are equal (result 0),
2408 which is already handled by operand_equal_p case. */
2409 case BUILT_IN_STRCASECMP:
2410 break;
2411 case BUILT_IN_STRNCASECMP:
2413 if (length == -1)
2414 break;
2415 r = strncmp (p1, p2, length);
2416 if (r == 0)
2417 known_result = true;
2418 break;
2420 default:
2421 gcc_unreachable ();
2424 if (known_result)
2426 replace_call_with_value (gsi, build_cmp_result (type, r));
2427 return true;
2431 bool nonzero_length = length >= 1
2432 || fcode == BUILT_IN_STRCMP
2433 || fcode == BUILT_IN_STRCMP_EQ
2434 || fcode == BUILT_IN_STRCASECMP;
2436 location_t loc = gimple_location (stmt);
2438 /* If the second arg is "", return *(const unsigned char*)arg1. */
2439 if (p2 && *p2 == '\0' && nonzero_length)
2441 gimple_seq stmts = NULL;
2442 tree var = gimple_load_first_char (loc, str1, &stmts);
2443 if (lhs)
2445 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2446 gimple_seq_add_stmt_without_update (&stmts, stmt);
2449 gsi_replace_with_seq_vops (gsi, stmts);
2450 return true;
2453 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2454 if (p1 && *p1 == '\0' && nonzero_length)
2456 gimple_seq stmts = NULL;
2457 tree var = gimple_load_first_char (loc, str2, &stmts);
2459 if (lhs)
2461 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2462 stmt = gimple_build_assign (c, NOP_EXPR, var);
2463 gimple_seq_add_stmt_without_update (&stmts, stmt);
2465 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2466 gimple_seq_add_stmt_without_update (&stmts, stmt);
2469 gsi_replace_with_seq_vops (gsi, stmts);
2470 return true;
2473 /* If len parameter is one, return an expression corresponding to
2474 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2475 if (fcode == BUILT_IN_STRNCMP && length == 1)
2477 gimple_seq stmts = NULL;
2478 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2479 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2481 if (lhs)
2483 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2484 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2485 gimple_seq_add_stmt_without_update (&stmts, convert1);
2487 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2488 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2489 gimple_seq_add_stmt_without_update (&stmts, convert2);
2491 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2492 gimple_seq_add_stmt_without_update (&stmts, stmt);
2495 gsi_replace_with_seq_vops (gsi, stmts);
2496 return true;
2499 /* If length is larger than the length of one constant string,
2500 replace strncmp with corresponding strcmp */
2501 if (fcode == BUILT_IN_STRNCMP
2502 && length > 0
2503 && ((p2 && (size_t) length > strlen (p2))
2504 || (p1 && (size_t) length > strlen (p1))))
2506 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2507 if (!fn)
2508 return false;
2509 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2510 replace_call_with_call_and_fold (gsi, repl);
2511 return true;
2514 return false;
2517 /* Fold a call to the memchr pointed by GSI iterator. */
2519 static bool
2520 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2522 gimple *stmt = gsi_stmt (*gsi);
2523 tree lhs = gimple_call_lhs (stmt);
2524 tree arg1 = gimple_call_arg (stmt, 0);
2525 tree arg2 = gimple_call_arg (stmt, 1);
2526 tree len = gimple_call_arg (stmt, 2);
2528 /* If the LEN parameter is zero, return zero. */
2529 if (integer_zerop (len))
2531 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2532 return true;
2535 char c;
2536 if (TREE_CODE (arg2) != INTEGER_CST
2537 || !tree_fits_uhwi_p (len)
2538 || !target_char_cst_p (arg2, &c))
2539 return false;
2541 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2542 unsigned HOST_WIDE_INT string_length;
2543 const char *p1 = c_getstr (arg1, &string_length);
2545 if (p1)
2547 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2548 if (r == NULL)
2550 if (length <= string_length)
2552 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2553 return true;
2556 else
2558 unsigned HOST_WIDE_INT offset = r - p1;
2559 gimple_seq stmts = NULL;
2560 if (lhs != NULL_TREE)
2562 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2563 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2564 arg1, offset_cst);
2565 gimple_seq_add_stmt_without_update (&stmts, stmt);
2567 else
2568 gimple_seq_add_stmt_without_update (&stmts,
2569 gimple_build_nop ());
2571 gsi_replace_with_seq_vops (gsi, stmts);
2572 return true;
2576 return false;
2579 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2580 to the call. IGNORE is true if the value returned
2581 by the builtin will be ignored. UNLOCKED is true is true if this
2582 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2583 the known length of the string. Return NULL_TREE if no simplification
2584 was possible. */
2586 static bool
2587 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2588 tree arg0, tree arg1,
2589 bool unlocked)
2591 gimple *stmt = gsi_stmt (*gsi);
2593 /* If we're using an unlocked function, assume the other unlocked
2594 functions exist explicitly. */
2595 tree const fn_fputc = (unlocked
2596 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2597 : builtin_decl_implicit (BUILT_IN_FPUTC));
2598 tree const fn_fwrite = (unlocked
2599 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2600 : builtin_decl_implicit (BUILT_IN_FWRITE));
2602 /* If the return value is used, don't do the transformation. */
2603 if (gimple_call_lhs (stmt))
2604 return false;
2606 /* Get the length of the string passed to fputs. If the length
2607 can't be determined, punt. */
2608 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2609 if (!len
2610 || TREE_CODE (len) != INTEGER_CST)
2611 return false;
2613 switch (compare_tree_int (len, 1))
2615 case -1: /* length is 0, delete the call entirely . */
2616 replace_call_with_value (gsi, integer_zero_node);
2617 return true;
2619 case 0: /* length is 1, call fputc. */
2621 const char *p = c_getstr (arg0);
2622 if (p != NULL)
2624 if (!fn_fputc)
2625 return false;
2627 gimple *repl = gimple_build_call (fn_fputc, 2,
2628 build_int_cst
2629 (integer_type_node, p[0]), arg1);
2630 replace_call_with_call_and_fold (gsi, repl);
2631 return true;
2634 /* FALLTHROUGH */
2635 case 1: /* length is greater than 1, call fwrite. */
2637 /* If optimizing for size keep fputs. */
2638 if (optimize_function_for_size_p (cfun))
2639 return false;
2640 /* New argument list transforming fputs(string, stream) to
2641 fwrite(string, 1, len, stream). */
2642 if (!fn_fwrite)
2643 return false;
2645 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2646 size_one_node, len, arg1);
2647 replace_call_with_call_and_fold (gsi, repl);
2648 return true;
2650 default:
2651 gcc_unreachable ();
2653 return false;
2656 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2657 DEST, SRC, LEN, and SIZE are the arguments to the call.
2658 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2659 code of the builtin. If MAXLEN is not NULL, it is maximum length
2660 passed as third argument. */
2662 static bool
2663 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2664 tree dest, tree src, tree len, tree size,
2665 enum built_in_function fcode)
2667 gimple *stmt = gsi_stmt (*gsi);
2668 location_t loc = gimple_location (stmt);
2669 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2670 tree fn;
2672 /* If SRC and DEST are the same (and not volatile), return DEST
2673 (resp. DEST+LEN for __mempcpy_chk). */
2674 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2676 if (fcode != BUILT_IN_MEMPCPY_CHK)
2678 replace_call_with_value (gsi, dest);
2679 return true;
2681 else
2683 gimple_seq stmts = NULL;
2684 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2685 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2686 TREE_TYPE (dest), dest, len);
2687 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2688 replace_call_with_value (gsi, temp);
2689 return true;
2693 if (! tree_fits_uhwi_p (size))
2694 return false;
2696 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2697 if (! integer_all_onesp (size))
2699 if (! tree_fits_uhwi_p (len))
2701 /* If LEN is not constant, try MAXLEN too.
2702 For MAXLEN only allow optimizing into non-_ocs function
2703 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2704 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2706 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2708 /* (void) __mempcpy_chk () can be optimized into
2709 (void) __memcpy_chk (). */
2710 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2711 if (!fn)
2712 return false;
2714 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2715 replace_call_with_call_and_fold (gsi, repl);
2716 return true;
2718 return false;
2721 else
2722 maxlen = len;
2724 if (tree_int_cst_lt (size, maxlen))
2725 return false;
2728 fn = NULL_TREE;
2729 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2730 mem{cpy,pcpy,move,set} is available. */
2731 switch (fcode)
2733 case BUILT_IN_MEMCPY_CHK:
2734 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2735 break;
2736 case BUILT_IN_MEMPCPY_CHK:
2737 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2738 break;
2739 case BUILT_IN_MEMMOVE_CHK:
2740 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2741 break;
2742 case BUILT_IN_MEMSET_CHK:
2743 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2744 break;
2745 default:
2746 break;
2749 if (!fn)
2750 return false;
2752 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2753 replace_call_with_call_and_fold (gsi, repl);
2754 return true;
2757 /* Fold a call to the __st[rp]cpy_chk builtin.
2758 DEST, SRC, and SIZE are the arguments to the call.
2759 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2760 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2761 strings passed as second argument. */
2763 static bool
2764 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2765 tree dest,
2766 tree src, tree size,
2767 enum built_in_function fcode)
2769 gimple *stmt = gsi_stmt (*gsi);
2770 location_t loc = gimple_location (stmt);
2771 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2772 tree len, fn;
2774 /* If SRC and DEST are the same (and not volatile), return DEST. */
2775 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2777 /* Issue -Wrestrict unless the pointers are null (those do
2778 not point to objects and so do not indicate an overlap;
2779 such calls could be the result of sanitization and jump
2780 threading). */
2781 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2783 tree func = gimple_call_fndecl (stmt);
2785 warning_at (loc, OPT_Wrestrict,
2786 "%qD source argument is the same as destination",
2787 func);
2790 replace_call_with_value (gsi, dest);
2791 return true;
2794 if (! tree_fits_uhwi_p (size))
2795 return false;
2797 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
2798 if (! integer_all_onesp (size))
2800 len = c_strlen (src, 1);
2801 if (! len || ! tree_fits_uhwi_p (len))
2803 /* If LEN is not constant, try MAXLEN too.
2804 For MAXLEN only allow optimizing into non-_ocs function
2805 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2806 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2808 if (fcode == BUILT_IN_STPCPY_CHK)
2810 if (! ignore)
2811 return false;
2813 /* If return value of __stpcpy_chk is ignored,
2814 optimize into __strcpy_chk. */
2815 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2816 if (!fn)
2817 return false;
2819 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2820 replace_call_with_call_and_fold (gsi, repl);
2821 return true;
2824 if (! len || TREE_SIDE_EFFECTS (len))
2825 return false;
2827 /* If c_strlen returned something, but not a constant,
2828 transform __strcpy_chk into __memcpy_chk. */
2829 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2830 if (!fn)
2831 return false;
2833 gimple_seq stmts = NULL;
2834 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
2835 len = gimple_convert (&stmts, loc, size_type_node, len);
2836 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2837 build_int_cst (size_type_node, 1));
2838 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2839 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2840 replace_call_with_call_and_fold (gsi, repl);
2841 return true;
2844 else
2845 maxlen = len;
2847 if (! tree_int_cst_lt (maxlen, size))
2848 return false;
2851 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2852 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2853 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2854 if (!fn)
2855 return false;
2857 gimple *repl = gimple_build_call (fn, 2, dest, src);
2858 replace_call_with_call_and_fold (gsi, repl);
2859 return true;
2862 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2863 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2864 length passed as third argument. IGNORE is true if return value can be
2865 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2867 static bool
2868 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2869 tree dest, tree src,
2870 tree len, tree size,
2871 enum built_in_function fcode)
2873 gimple *stmt = gsi_stmt (*gsi);
2874 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2875 tree fn;
2877 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2879 /* If return value of __stpncpy_chk is ignored,
2880 optimize into __strncpy_chk. */
2881 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2882 if (fn)
2884 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2885 replace_call_with_call_and_fold (gsi, repl);
2886 return true;
2890 if (! tree_fits_uhwi_p (size))
2891 return false;
2893 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2894 if (! integer_all_onesp (size))
2896 if (! tree_fits_uhwi_p (len))
2898 /* If LEN is not constant, try MAXLEN too.
2899 For MAXLEN only allow optimizing into non-_ocs function
2900 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2901 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2902 return false;
2904 else
2905 maxlen = len;
2907 if (tree_int_cst_lt (size, maxlen))
2908 return false;
2911 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2912 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2913 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2914 if (!fn)
2915 return false;
2917 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2918 replace_call_with_call_and_fold (gsi, repl);
2919 return true;
2922 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2923 Return NULL_TREE if no simplification can be made. */
2925 static bool
2926 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2928 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2929 location_t loc = gimple_location (stmt);
2930 tree dest = gimple_call_arg (stmt, 0);
2931 tree src = gimple_call_arg (stmt, 1);
2932 tree fn, lenp1;
2934 /* If the result is unused, replace stpcpy with strcpy. */
2935 if (gimple_call_lhs (stmt) == NULL_TREE)
2937 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2938 if (!fn)
2939 return false;
2940 gimple_call_set_fndecl (stmt, fn);
2941 fold_stmt (gsi);
2942 return true;
2945 /* Set to non-null if ARG refers to an unterminated array. */
2946 c_strlen_data data = { };
2947 tree len = c_strlen (src, 1, &data, 1);
2948 if (!len
2949 || TREE_CODE (len) != INTEGER_CST)
2951 data.decl = unterminated_array (src);
2952 if (!data.decl)
2953 return false;
2956 if (data.decl)
2958 /* Avoid folding calls with unterminated arrays. */
2959 if (!gimple_no_warning_p (stmt))
2960 warn_string_no_nul (loc, "stpcpy", src, data.decl);
2961 gimple_set_no_warning (stmt, true);
2962 return false;
2965 if (optimize_function_for_size_p (cfun)
2966 /* If length is zero it's small enough. */
2967 && !integer_zerop (len))
2968 return false;
2970 /* If the source has a known length replace stpcpy with memcpy. */
2971 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2972 if (!fn)
2973 return false;
2975 gimple_seq stmts = NULL;
2976 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2977 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2978 tem, build_int_cst (size_type_node, 1));
2979 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2980 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2981 gimple_set_vuse (repl, gimple_vuse (stmt));
2982 gimple_set_vdef (repl, gimple_vdef (stmt));
2983 if (gimple_vdef (repl)
2984 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2985 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2986 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2987 /* Replace the result with dest + len. */
2988 stmts = NULL;
2989 tem = gimple_convert (&stmts, loc, sizetype, len);
2990 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2991 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2992 POINTER_PLUS_EXPR, dest, tem);
2993 gsi_replace (gsi, ret, false);
2994 /* Finally fold the memcpy call. */
2995 gimple_stmt_iterator gsi2 = *gsi;
2996 gsi_prev (&gsi2);
2997 fold_stmt (&gsi2);
2998 return true;
3001 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3002 NULL_TREE if a normal call should be emitted rather than expanding
3003 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3004 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3005 passed as second argument. */
3007 static bool
3008 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3009 enum built_in_function fcode)
3011 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3012 tree dest, size, len, fn, fmt, flag;
3013 const char *fmt_str;
3015 /* Verify the required arguments in the original call. */
3016 if (gimple_call_num_args (stmt) < 5)
3017 return false;
3019 dest = gimple_call_arg (stmt, 0);
3020 len = gimple_call_arg (stmt, 1);
3021 flag = gimple_call_arg (stmt, 2);
3022 size = gimple_call_arg (stmt, 3);
3023 fmt = gimple_call_arg (stmt, 4);
3025 if (! tree_fits_uhwi_p (size))
3026 return false;
3028 if (! integer_all_onesp (size))
3030 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3031 if (! tree_fits_uhwi_p (len))
3033 /* If LEN is not constant, try MAXLEN too.
3034 For MAXLEN only allow optimizing into non-_ocs function
3035 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3036 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3037 return false;
3039 else
3040 maxlen = len;
3042 if (tree_int_cst_lt (size, maxlen))
3043 return false;
3046 if (!init_target_chars ())
3047 return false;
3049 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3050 or if format doesn't contain % chars or is "%s". */
3051 if (! integer_zerop (flag))
3053 fmt_str = c_getstr (fmt);
3054 if (fmt_str == NULL)
3055 return false;
3056 if (strchr (fmt_str, target_percent) != NULL
3057 && strcmp (fmt_str, target_percent_s))
3058 return false;
3061 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3062 available. */
3063 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3064 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3065 if (!fn)
3066 return false;
3068 /* Replace the called function and the first 5 argument by 3 retaining
3069 trailing varargs. */
3070 gimple_call_set_fndecl (stmt, fn);
3071 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3072 gimple_call_set_arg (stmt, 0, dest);
3073 gimple_call_set_arg (stmt, 1, len);
3074 gimple_call_set_arg (stmt, 2, fmt);
3075 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3076 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3077 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3078 fold_stmt (gsi);
3079 return true;
3082 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3083 Return NULL_TREE if a normal call should be emitted rather than
3084 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3085 or BUILT_IN_VSPRINTF_CHK. */
3087 static bool
3088 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3089 enum built_in_function fcode)
3091 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3092 tree dest, size, len, fn, fmt, flag;
3093 const char *fmt_str;
3094 unsigned nargs = gimple_call_num_args (stmt);
3096 /* Verify the required arguments in the original call. */
3097 if (nargs < 4)
3098 return false;
3099 dest = gimple_call_arg (stmt, 0);
3100 flag = gimple_call_arg (stmt, 1);
3101 size = gimple_call_arg (stmt, 2);
3102 fmt = gimple_call_arg (stmt, 3);
3104 if (! tree_fits_uhwi_p (size))
3105 return false;
3107 len = NULL_TREE;
3109 if (!init_target_chars ())
3110 return false;
3112 /* Check whether the format is a literal string constant. */
3113 fmt_str = c_getstr (fmt);
3114 if (fmt_str != NULL)
3116 /* If the format doesn't contain % args or %%, we know the size. */
3117 if (strchr (fmt_str, target_percent) == 0)
3119 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3120 len = build_int_cstu (size_type_node, strlen (fmt_str));
3122 /* If the format is "%s" and first ... argument is a string literal,
3123 we know the size too. */
3124 else if (fcode == BUILT_IN_SPRINTF_CHK
3125 && strcmp (fmt_str, target_percent_s) == 0)
3127 tree arg;
3129 if (nargs == 5)
3131 arg = gimple_call_arg (stmt, 4);
3132 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3134 len = c_strlen (arg, 1);
3135 if (! len || ! tree_fits_uhwi_p (len))
3136 len = NULL_TREE;
3142 if (! integer_all_onesp (size))
3144 if (! len || ! tree_int_cst_lt (len, size))
3145 return false;
3148 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3149 or if format doesn't contain % chars or is "%s". */
3150 if (! integer_zerop (flag))
3152 if (fmt_str == NULL)
3153 return false;
3154 if (strchr (fmt_str, target_percent) != NULL
3155 && strcmp (fmt_str, target_percent_s))
3156 return false;
3159 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3160 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3161 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3162 if (!fn)
3163 return false;
3165 /* Replace the called function and the first 4 argument by 2 retaining
3166 trailing varargs. */
3167 gimple_call_set_fndecl (stmt, fn);
3168 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3169 gimple_call_set_arg (stmt, 0, dest);
3170 gimple_call_set_arg (stmt, 1, fmt);
3171 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3172 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3173 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3174 fold_stmt (gsi);
3175 return true;
3178 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3179 ORIG may be null if this is a 2-argument call. We don't attempt to
3180 simplify calls with more than 3 arguments.
3182 Return true if simplification was possible, otherwise false. */
3184 bool
3185 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3187 gimple *stmt = gsi_stmt (*gsi);
3188 tree dest = gimple_call_arg (stmt, 0);
3189 tree fmt = gimple_call_arg (stmt, 1);
3190 tree orig = NULL_TREE;
3191 const char *fmt_str = NULL;
3193 /* Verify the required arguments in the original call. We deal with two
3194 types of sprintf() calls: 'sprintf (str, fmt)' and
3195 'sprintf (dest, "%s", orig)'. */
3196 if (gimple_call_num_args (stmt) > 3)
3197 return false;
3199 if (gimple_call_num_args (stmt) == 3)
3200 orig = gimple_call_arg (stmt, 2);
3202 /* Check whether the format is a literal string constant. */
3203 fmt_str = c_getstr (fmt);
3204 if (fmt_str == NULL)
3205 return false;
3207 if (!init_target_chars ())
3208 return false;
3210 /* If the format doesn't contain % args or %%, use strcpy. */
3211 if (strchr (fmt_str, target_percent) == NULL)
3213 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3215 if (!fn)
3216 return false;
3218 /* Don't optimize sprintf (buf, "abc", ptr++). */
3219 if (orig)
3220 return false;
3222 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3223 'format' is known to contain no % formats. */
3224 gimple_seq stmts = NULL;
3225 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3227 /* Propagate the NO_WARNING bit to avoid issuing the same
3228 warning more than once. */
3229 if (gimple_no_warning_p (stmt))
3230 gimple_set_no_warning (repl, true);
3232 gimple_seq_add_stmt_without_update (&stmts, repl);
3233 if (tree lhs = gimple_call_lhs (stmt))
3235 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3236 strlen (fmt_str)));
3237 gimple_seq_add_stmt_without_update (&stmts, repl);
3238 gsi_replace_with_seq_vops (gsi, stmts);
3239 /* gsi now points at the assignment to the lhs, get a
3240 stmt iterator to the memcpy call.
3241 ??? We can't use gsi_for_stmt as that doesn't work when the
3242 CFG isn't built yet. */
3243 gimple_stmt_iterator gsi2 = *gsi;
3244 gsi_prev (&gsi2);
3245 fold_stmt (&gsi2);
3247 else
3249 gsi_replace_with_seq_vops (gsi, stmts);
3250 fold_stmt (gsi);
3252 return true;
3255 /* If the format is "%s", use strcpy if the result isn't used. */
3256 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3258 tree fn;
3259 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3261 if (!fn)
3262 return false;
3264 /* Don't crash on sprintf (str1, "%s"). */
3265 if (!orig)
3266 return false;
3268 tree orig_len = NULL_TREE;
3269 if (gimple_call_lhs (stmt))
3271 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3272 if (!orig_len)
3273 return false;
3276 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3277 gimple_seq stmts = NULL;
3278 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3280 /* Propagate the NO_WARNING bit to avoid issuing the same
3281 warning more than once. */
3282 if (gimple_no_warning_p (stmt))
3283 gimple_set_no_warning (repl, true);
3285 gimple_seq_add_stmt_without_update (&stmts, repl);
3286 if (tree lhs = gimple_call_lhs (stmt))
3288 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3289 TREE_TYPE (orig_len)))
3290 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3291 repl = gimple_build_assign (lhs, orig_len);
3292 gimple_seq_add_stmt_without_update (&stmts, repl);
3293 gsi_replace_with_seq_vops (gsi, stmts);
3294 /* gsi now points at the assignment to the lhs, get a
3295 stmt iterator to the memcpy call.
3296 ??? We can't use gsi_for_stmt as that doesn't work when the
3297 CFG isn't built yet. */
3298 gimple_stmt_iterator gsi2 = *gsi;
3299 gsi_prev (&gsi2);
3300 fold_stmt (&gsi2);
3302 else
3304 gsi_replace_with_seq_vops (gsi, stmts);
3305 fold_stmt (gsi);
3307 return true;
3309 return false;
3312 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3313 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3314 attempt to simplify calls with more than 4 arguments.
3316 Return true if simplification was possible, otherwise false. */
3318 bool
3319 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3321 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3322 tree dest = gimple_call_arg (stmt, 0);
3323 tree destsize = gimple_call_arg (stmt, 1);
3324 tree fmt = gimple_call_arg (stmt, 2);
3325 tree orig = NULL_TREE;
3326 const char *fmt_str = NULL;
3328 if (gimple_call_num_args (stmt) > 4)
3329 return false;
3331 if (gimple_call_num_args (stmt) == 4)
3332 orig = gimple_call_arg (stmt, 3);
3334 if (!tree_fits_uhwi_p (destsize))
3335 return false;
3336 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3338 /* Check whether the format is a literal string constant. */
3339 fmt_str = c_getstr (fmt);
3340 if (fmt_str == NULL)
3341 return false;
3343 if (!init_target_chars ())
3344 return false;
3346 /* If the format doesn't contain % args or %%, use strcpy. */
3347 if (strchr (fmt_str, target_percent) == NULL)
3349 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3350 if (!fn)
3351 return false;
3353 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3354 if (orig)
3355 return false;
3357 /* We could expand this as
3358 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3359 or to
3360 memcpy (str, fmt_with_nul_at_cstm1, cst);
3361 but in the former case that might increase code size
3362 and in the latter case grow .rodata section too much.
3363 So punt for now. */
3364 size_t len = strlen (fmt_str);
3365 if (len >= destlen)
3366 return false;
3368 gimple_seq stmts = NULL;
3369 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3370 gimple_seq_add_stmt_without_update (&stmts, repl);
3371 if (tree lhs = gimple_call_lhs (stmt))
3373 repl = gimple_build_assign (lhs,
3374 build_int_cst (TREE_TYPE (lhs), len));
3375 gimple_seq_add_stmt_without_update (&stmts, repl);
3376 gsi_replace_with_seq_vops (gsi, stmts);
3377 /* gsi now points at the assignment to the lhs, get a
3378 stmt iterator to the memcpy call.
3379 ??? We can't use gsi_for_stmt as that doesn't work when the
3380 CFG isn't built yet. */
3381 gimple_stmt_iterator gsi2 = *gsi;
3382 gsi_prev (&gsi2);
3383 fold_stmt (&gsi2);
3385 else
3387 gsi_replace_with_seq_vops (gsi, stmts);
3388 fold_stmt (gsi);
3390 return true;
3393 /* If the format is "%s", use strcpy if the result isn't used. */
3394 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3396 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3397 if (!fn)
3398 return false;
3400 /* Don't crash on snprintf (str1, cst, "%s"). */
3401 if (!orig)
3402 return false;
3404 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3405 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3406 return false;
3408 /* We could expand this as
3409 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3410 or to
3411 memcpy (str1, str2_with_nul_at_cstm1, cst);
3412 but in the former case that might increase code size
3413 and in the latter case grow .rodata section too much.
3414 So punt for now. */
3415 if (compare_tree_int (orig_len, destlen) >= 0)
3416 return false;
3418 /* Convert snprintf (str1, cst, "%s", str2) into
3419 strcpy (str1, str2) if strlen (str2) < cst. */
3420 gimple_seq stmts = NULL;
3421 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3422 gimple_seq_add_stmt_without_update (&stmts, repl);
3423 if (tree lhs = gimple_call_lhs (stmt))
3425 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3426 TREE_TYPE (orig_len)))
3427 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3428 repl = gimple_build_assign (lhs, orig_len);
3429 gimple_seq_add_stmt_without_update (&stmts, repl);
3430 gsi_replace_with_seq_vops (gsi, stmts);
3431 /* gsi now points at the assignment to the lhs, get a
3432 stmt iterator to the memcpy call.
3433 ??? We can't use gsi_for_stmt as that doesn't work when the
3434 CFG isn't built yet. */
3435 gimple_stmt_iterator gsi2 = *gsi;
3436 gsi_prev (&gsi2);
3437 fold_stmt (&gsi2);
3439 else
3441 gsi_replace_with_seq_vops (gsi, stmts);
3442 fold_stmt (gsi);
3444 return true;
3446 return false;
3449 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3450 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3451 more than 3 arguments, and ARG may be null in the 2-argument case.
3453 Return NULL_TREE if no simplification was possible, otherwise return the
3454 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3455 code of the function to be simplified. */
3457 static bool
3458 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3459 tree fp, tree fmt, tree arg,
3460 enum built_in_function fcode)
3462 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3463 tree fn_fputc, fn_fputs;
3464 const char *fmt_str = NULL;
3466 /* If the return value is used, don't do the transformation. */
3467 if (gimple_call_lhs (stmt) != NULL_TREE)
3468 return false;
3470 /* Check whether the format is a literal string constant. */
3471 fmt_str = c_getstr (fmt);
3472 if (fmt_str == NULL)
3473 return false;
3475 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3477 /* If we're using an unlocked function, assume the other
3478 unlocked functions exist explicitly. */
3479 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3480 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3482 else
3484 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3485 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3488 if (!init_target_chars ())
3489 return false;
3491 /* If the format doesn't contain % args or %%, use strcpy. */
3492 if (strchr (fmt_str, target_percent) == NULL)
3494 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3495 && arg)
3496 return false;
3498 /* If the format specifier was "", fprintf does nothing. */
3499 if (fmt_str[0] == '\0')
3501 replace_call_with_value (gsi, NULL_TREE);
3502 return true;
3505 /* When "string" doesn't contain %, replace all cases of
3506 fprintf (fp, string) with fputs (string, fp). The fputs
3507 builtin will take care of special cases like length == 1. */
3508 if (fn_fputs)
3510 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3511 replace_call_with_call_and_fold (gsi, repl);
3512 return true;
3516 /* The other optimizations can be done only on the non-va_list variants. */
3517 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3518 return false;
3520 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3521 else if (strcmp (fmt_str, target_percent_s) == 0)
3523 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3524 return false;
3525 if (fn_fputs)
3527 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3528 replace_call_with_call_and_fold (gsi, repl);
3529 return true;
3533 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3534 else if (strcmp (fmt_str, target_percent_c) == 0)
3536 if (!arg
3537 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3538 return false;
3539 if (fn_fputc)
3541 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3542 replace_call_with_call_and_fold (gsi, repl);
3543 return true;
3547 return false;
3550 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3551 FMT and ARG are the arguments to the call; we don't fold cases with
3552 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3554 Return NULL_TREE if no simplification was possible, otherwise return the
3555 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3556 code of the function to be simplified. */
3558 static bool
3559 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3560 tree arg, enum built_in_function fcode)
3562 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3563 tree fn_putchar, fn_puts, newarg;
3564 const char *fmt_str = NULL;
3566 /* If the return value is used, don't do the transformation. */
3567 if (gimple_call_lhs (stmt) != NULL_TREE)
3568 return false;
3570 /* Check whether the format is a literal string constant. */
3571 fmt_str = c_getstr (fmt);
3572 if (fmt_str == NULL)
3573 return false;
3575 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3577 /* If we're using an unlocked function, assume the other
3578 unlocked functions exist explicitly. */
3579 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3580 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3582 else
3584 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3585 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3588 if (!init_target_chars ())
3589 return false;
3591 if (strcmp (fmt_str, target_percent_s) == 0
3592 || strchr (fmt_str, target_percent) == NULL)
3594 const char *str;
3596 if (strcmp (fmt_str, target_percent_s) == 0)
3598 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3599 return false;
3601 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3602 return false;
3604 str = c_getstr (arg);
3605 if (str == NULL)
3606 return false;
3608 else
3610 /* The format specifier doesn't contain any '%' characters. */
3611 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3612 && arg)
3613 return false;
3614 str = fmt_str;
3617 /* If the string was "", printf does nothing. */
3618 if (str[0] == '\0')
3620 replace_call_with_value (gsi, NULL_TREE);
3621 return true;
3624 /* If the string has length of 1, call putchar. */
3625 if (str[1] == '\0')
3627 /* Given printf("c"), (where c is any one character,)
3628 convert "c"[0] to an int and pass that to the replacement
3629 function. */
3630 newarg = build_int_cst (integer_type_node, str[0]);
3631 if (fn_putchar)
3633 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3634 replace_call_with_call_and_fold (gsi, repl);
3635 return true;
3638 else
3640 /* If the string was "string\n", call puts("string"). */
3641 size_t len = strlen (str);
3642 if ((unsigned char)str[len - 1] == target_newline
3643 && (size_t) (int) len == len
3644 && (int) len > 0)
3646 char *newstr;
3648 /* Create a NUL-terminated string that's one char shorter
3649 than the original, stripping off the trailing '\n'. */
3650 newstr = xstrdup (str);
3651 newstr[len - 1] = '\0';
3652 newarg = build_string_literal (len, newstr);
3653 free (newstr);
3654 if (fn_puts)
3656 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3657 replace_call_with_call_and_fold (gsi, repl);
3658 return true;
3661 else
3662 /* We'd like to arrange to call fputs(string,stdout) here,
3663 but we need stdout and don't have a way to get it yet. */
3664 return false;
3668 /* The other optimizations can be done only on the non-va_list variants. */
3669 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3670 return false;
3672 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3673 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3675 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3676 return false;
3677 if (fn_puts)
3679 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3680 replace_call_with_call_and_fold (gsi, repl);
3681 return true;
3685 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3686 else if (strcmp (fmt_str, target_percent_c) == 0)
3688 if (!arg || ! useless_type_conversion_p (integer_type_node,
3689 TREE_TYPE (arg)))
3690 return false;
3691 if (fn_putchar)
3693 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3694 replace_call_with_call_and_fold (gsi, repl);
3695 return true;
3699 return false;
3704 /* Fold a call to __builtin_strlen with known length LEN. */
3706 static bool
3707 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3709 gimple *stmt = gsi_stmt (*gsi);
3710 tree arg = gimple_call_arg (stmt, 0);
3712 wide_int minlen;
3713 wide_int maxlen;
3715 c_strlen_data lendata = { };
3716 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
3717 && !lendata.decl
3718 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3719 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
3721 /* The range of lengths refers to either a single constant
3722 string or to the longest and shortest constant string
3723 referenced by the argument of the strlen() call, or to
3724 the strings that can possibly be stored in the arrays
3725 the argument refers to. */
3726 minlen = wi::to_wide (lendata.minlen);
3727 maxlen = wi::to_wide (lendata.maxlen);
3729 else
3731 unsigned prec = TYPE_PRECISION (sizetype);
3733 minlen = wi::shwi (0, prec);
3734 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3737 if (minlen == maxlen)
3739 /* Fold the strlen call to a constant. */
3740 tree type = TREE_TYPE (lendata.minlen);
3741 tree len = force_gimple_operand_gsi (gsi,
3742 wide_int_to_tree (type, minlen),
3743 true, NULL, true, GSI_SAME_STMT);
3744 replace_call_with_value (gsi, len);
3745 return true;
3748 /* Set the strlen() range to [0, MAXLEN]. */
3749 if (tree lhs = gimple_call_lhs (stmt))
3750 set_strlen_range (lhs, maxlen);
3752 return false;
3755 /* Fold a call to __builtin_acc_on_device. */
3757 static bool
3758 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3760 /* Defer folding until we know which compiler we're in. */
3761 if (symtab->state != EXPANSION)
3762 return false;
3764 unsigned val_host = GOMP_DEVICE_HOST;
3765 unsigned val_dev = GOMP_DEVICE_NONE;
3767 #ifdef ACCEL_COMPILER
3768 val_host = GOMP_DEVICE_NOT_HOST;
3769 val_dev = ACCEL_COMPILER_acc_device;
3770 #endif
3772 location_t loc = gimple_location (gsi_stmt (*gsi));
3774 tree host_eq = make_ssa_name (boolean_type_node);
3775 gimple *host_ass = gimple_build_assign
3776 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3777 gimple_set_location (host_ass, loc);
3778 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3780 tree dev_eq = make_ssa_name (boolean_type_node);
3781 gimple *dev_ass = gimple_build_assign
3782 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3783 gimple_set_location (dev_ass, loc);
3784 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3786 tree result = make_ssa_name (boolean_type_node);
3787 gimple *result_ass = gimple_build_assign
3788 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3789 gimple_set_location (result_ass, loc);
3790 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3792 replace_call_with_value (gsi, result);
3794 return true;
3797 /* Fold realloc (0, n) -> malloc (n). */
3799 static bool
3800 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3802 gimple *stmt = gsi_stmt (*gsi);
3803 tree arg = gimple_call_arg (stmt, 0);
3804 tree size = gimple_call_arg (stmt, 1);
3806 if (operand_equal_p (arg, null_pointer_node, 0))
3808 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3809 if (fn_malloc)
3811 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3812 replace_call_with_call_and_fold (gsi, repl);
3813 return true;
3816 return false;
3819 /* Fold the non-target builtin at *GSI and return whether any simplification
3820 was made. */
3822 static bool
3823 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3825 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3826 tree callee = gimple_call_fndecl (stmt);
3828 /* Give up for always_inline inline builtins until they are
3829 inlined. */
3830 if (avoid_folding_inline_builtin (callee))
3831 return false;
3833 unsigned n = gimple_call_num_args (stmt);
3834 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3835 switch (fcode)
3837 case BUILT_IN_BCMP:
3838 return gimple_fold_builtin_bcmp (gsi);
3839 case BUILT_IN_BCOPY:
3840 return gimple_fold_builtin_bcopy (gsi);
3841 case BUILT_IN_BZERO:
3842 return gimple_fold_builtin_bzero (gsi);
3844 case BUILT_IN_MEMSET:
3845 return gimple_fold_builtin_memset (gsi,
3846 gimple_call_arg (stmt, 1),
3847 gimple_call_arg (stmt, 2));
3848 case BUILT_IN_MEMCPY:
3849 case BUILT_IN_MEMPCPY:
3850 case BUILT_IN_MEMMOVE:
3851 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3852 gimple_call_arg (stmt, 1), fcode);
3853 case BUILT_IN_SPRINTF_CHK:
3854 case BUILT_IN_VSPRINTF_CHK:
3855 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3856 case BUILT_IN_STRCAT_CHK:
3857 return gimple_fold_builtin_strcat_chk (gsi);
3858 case BUILT_IN_STRNCAT_CHK:
3859 return gimple_fold_builtin_strncat_chk (gsi);
3860 case BUILT_IN_STRLEN:
3861 return gimple_fold_builtin_strlen (gsi);
3862 case BUILT_IN_STRCPY:
3863 return gimple_fold_builtin_strcpy (gsi,
3864 gimple_call_arg (stmt, 0),
3865 gimple_call_arg (stmt, 1));
3866 case BUILT_IN_STRNCPY:
3867 return gimple_fold_builtin_strncpy (gsi,
3868 gimple_call_arg (stmt, 0),
3869 gimple_call_arg (stmt, 1),
3870 gimple_call_arg (stmt, 2));
3871 case BUILT_IN_STRCAT:
3872 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3873 gimple_call_arg (stmt, 1));
3874 case BUILT_IN_STRNCAT:
3875 return gimple_fold_builtin_strncat (gsi);
3876 case BUILT_IN_INDEX:
3877 case BUILT_IN_STRCHR:
3878 return gimple_fold_builtin_strchr (gsi, false);
3879 case BUILT_IN_RINDEX:
3880 case BUILT_IN_STRRCHR:
3881 return gimple_fold_builtin_strchr (gsi, true);
3882 case BUILT_IN_STRSTR:
3883 return gimple_fold_builtin_strstr (gsi);
3884 case BUILT_IN_STRCMP:
3885 case BUILT_IN_STRCMP_EQ:
3886 case BUILT_IN_STRCASECMP:
3887 case BUILT_IN_STRNCMP:
3888 case BUILT_IN_STRNCMP_EQ:
3889 case BUILT_IN_STRNCASECMP:
3890 return gimple_fold_builtin_string_compare (gsi);
3891 case BUILT_IN_MEMCHR:
3892 return gimple_fold_builtin_memchr (gsi);
3893 case BUILT_IN_FPUTS:
3894 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3895 gimple_call_arg (stmt, 1), false);
3896 case BUILT_IN_FPUTS_UNLOCKED:
3897 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3898 gimple_call_arg (stmt, 1), true);
3899 case BUILT_IN_MEMCPY_CHK:
3900 case BUILT_IN_MEMPCPY_CHK:
3901 case BUILT_IN_MEMMOVE_CHK:
3902 case BUILT_IN_MEMSET_CHK:
3903 return gimple_fold_builtin_memory_chk (gsi,
3904 gimple_call_arg (stmt, 0),
3905 gimple_call_arg (stmt, 1),
3906 gimple_call_arg (stmt, 2),
3907 gimple_call_arg (stmt, 3),
3908 fcode);
3909 case BUILT_IN_STPCPY:
3910 return gimple_fold_builtin_stpcpy (gsi);
3911 case BUILT_IN_STRCPY_CHK:
3912 case BUILT_IN_STPCPY_CHK:
3913 return gimple_fold_builtin_stxcpy_chk (gsi,
3914 gimple_call_arg (stmt, 0),
3915 gimple_call_arg (stmt, 1),
3916 gimple_call_arg (stmt, 2),
3917 fcode);
3918 case BUILT_IN_STRNCPY_CHK:
3919 case BUILT_IN_STPNCPY_CHK:
3920 return gimple_fold_builtin_stxncpy_chk (gsi,
3921 gimple_call_arg (stmt, 0),
3922 gimple_call_arg (stmt, 1),
3923 gimple_call_arg (stmt, 2),
3924 gimple_call_arg (stmt, 3),
3925 fcode);
3926 case BUILT_IN_SNPRINTF_CHK:
3927 case BUILT_IN_VSNPRINTF_CHK:
3928 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3930 case BUILT_IN_FPRINTF:
3931 case BUILT_IN_FPRINTF_UNLOCKED:
3932 case BUILT_IN_VFPRINTF:
3933 if (n == 2 || n == 3)
3934 return gimple_fold_builtin_fprintf (gsi,
3935 gimple_call_arg (stmt, 0),
3936 gimple_call_arg (stmt, 1),
3937 n == 3
3938 ? gimple_call_arg (stmt, 2)
3939 : NULL_TREE,
3940 fcode);
3941 break;
3942 case BUILT_IN_FPRINTF_CHK:
3943 case BUILT_IN_VFPRINTF_CHK:
3944 if (n == 3 || n == 4)
3945 return gimple_fold_builtin_fprintf (gsi,
3946 gimple_call_arg (stmt, 0),
3947 gimple_call_arg (stmt, 2),
3948 n == 4
3949 ? gimple_call_arg (stmt, 3)
3950 : NULL_TREE,
3951 fcode);
3952 break;
3953 case BUILT_IN_PRINTF:
3954 case BUILT_IN_PRINTF_UNLOCKED:
3955 case BUILT_IN_VPRINTF:
3956 if (n == 1 || n == 2)
3957 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3958 n == 2
3959 ? gimple_call_arg (stmt, 1)
3960 : NULL_TREE, fcode);
3961 break;
3962 case BUILT_IN_PRINTF_CHK:
3963 case BUILT_IN_VPRINTF_CHK:
3964 if (n == 2 || n == 3)
3965 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3966 n == 3
3967 ? gimple_call_arg (stmt, 2)
3968 : NULL_TREE, fcode);
3969 break;
3970 case BUILT_IN_ACC_ON_DEVICE:
3971 return gimple_fold_builtin_acc_on_device (gsi,
3972 gimple_call_arg (stmt, 0));
3973 case BUILT_IN_REALLOC:
3974 return gimple_fold_builtin_realloc (gsi);
3976 default:;
3979 /* Try the generic builtin folder. */
3980 bool ignore = (gimple_call_lhs (stmt) == NULL);
3981 tree result = fold_call_stmt (stmt, ignore);
3982 if (result)
3984 if (ignore)
3985 STRIP_NOPS (result);
3986 else
3987 result = fold_convert (gimple_call_return_type (stmt), result);
3988 if (!update_call_from_tree (gsi, result))
3989 gimplify_and_update_call_from_tree (gsi, result);
3990 return true;
3993 return false;
3996 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3997 function calls to constants, where possible. */
3999 static tree
4000 fold_internal_goacc_dim (const gimple *call)
4002 int axis = oacc_get_ifn_dim_arg (call);
4003 int size = oacc_get_fn_dim_size (current_function_decl, axis);
4004 tree result = NULL_TREE;
4005 tree type = TREE_TYPE (gimple_call_lhs (call));
4007 switch (gimple_call_internal_fn (call))
4009 case IFN_GOACC_DIM_POS:
4010 /* If the size is 1, we know the answer. */
4011 if (size == 1)
4012 result = build_int_cst (type, 0);
4013 break;
4014 case IFN_GOACC_DIM_SIZE:
4015 /* If the size is not dynamic, we know the answer. */
4016 if (size)
4017 result = build_int_cst (type, size);
4018 break;
4019 default:
4020 break;
4023 return result;
4026 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4027 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4028 &var where var is only addressable because of such calls. */
4030 bool
4031 optimize_atomic_compare_exchange_p (gimple *stmt)
4033 if (gimple_call_num_args (stmt) != 6
4034 || !flag_inline_atomics
4035 || !optimize
4036 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
4037 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4038 || !gimple_vdef (stmt)
4039 || !gimple_vuse (stmt))
4040 return false;
4042 tree fndecl = gimple_call_fndecl (stmt);
4043 switch (DECL_FUNCTION_CODE (fndecl))
4045 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
4046 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
4047 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
4048 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
4049 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
4050 break;
4051 default:
4052 return false;
4055 tree expected = gimple_call_arg (stmt, 1);
4056 if (TREE_CODE (expected) != ADDR_EXPR
4057 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
4058 return false;
4060 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
4061 if (!is_gimple_reg_type (etype)
4062 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
4063 || TREE_THIS_VOLATILE (etype)
4064 || VECTOR_TYPE_P (etype)
4065 || TREE_CODE (etype) == COMPLEX_TYPE
4066 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4067 might not preserve all the bits. See PR71716. */
4068 || SCALAR_FLOAT_TYPE_P (etype)
4069 || maybe_ne (TYPE_PRECISION (etype),
4070 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4071 return false;
4073 tree weak = gimple_call_arg (stmt, 3);
4074 if (!integer_zerop (weak) && !integer_onep (weak))
4075 return false;
4077 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4078 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4079 machine_mode mode = TYPE_MODE (itype);
4081 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
4082 == CODE_FOR_nothing
4083 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
4084 return false;
4086 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
4087 return false;
4089 return true;
4092 /* Fold
4093 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4094 into
4095 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4096 i = IMAGPART_EXPR <t>;
4097 r = (_Bool) i;
4098 e = REALPART_EXPR <t>; */
4100 void
4101 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
4103 gimple *stmt = gsi_stmt (*gsi);
4104 tree fndecl = gimple_call_fndecl (stmt);
4105 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4106 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4107 tree ctype = build_complex_type (itype);
4108 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
4109 bool throws = false;
4110 edge e = NULL;
4111 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4112 expected);
4113 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4114 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4115 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4117 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4118 build1 (VIEW_CONVERT_EXPR, itype,
4119 gimple_assign_lhs (g)));
4120 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4122 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4123 + int_size_in_bytes (itype);
4124 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4125 gimple_call_arg (stmt, 0),
4126 gimple_assign_lhs (g),
4127 gimple_call_arg (stmt, 2),
4128 build_int_cst (integer_type_node, flag),
4129 gimple_call_arg (stmt, 4),
4130 gimple_call_arg (stmt, 5));
4131 tree lhs = make_ssa_name (ctype);
4132 gimple_call_set_lhs (g, lhs);
4133 gimple_set_vdef (g, gimple_vdef (stmt));
4134 gimple_set_vuse (g, gimple_vuse (stmt));
4135 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
4136 tree oldlhs = gimple_call_lhs (stmt);
4137 if (stmt_can_throw_internal (cfun, stmt))
4139 throws = true;
4140 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4142 gimple_call_set_nothrow (as_a <gcall *> (g),
4143 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4144 gimple_call_set_lhs (stmt, NULL_TREE);
4145 gsi_replace (gsi, g, true);
4146 if (oldlhs)
4148 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4149 build1 (IMAGPART_EXPR, itype, lhs));
4150 if (throws)
4152 gsi_insert_on_edge_immediate (e, g);
4153 *gsi = gsi_for_stmt (g);
4155 else
4156 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4157 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4158 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4160 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4161 build1 (REALPART_EXPR, itype, lhs));
4162 if (throws && oldlhs == NULL_TREE)
4164 gsi_insert_on_edge_immediate (e, g);
4165 *gsi = gsi_for_stmt (g);
4167 else
4168 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4169 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4171 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4172 VIEW_CONVERT_EXPR,
4173 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4174 gimple_assign_lhs (g)));
4175 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4177 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4178 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4179 *gsi = gsiret;
4182 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4183 doesn't fit into TYPE. The test for overflow should be regardless of
4184 -fwrapv, and even for unsigned types. */
4186 bool
4187 arith_overflowed_p (enum tree_code code, const_tree type,
4188 const_tree arg0, const_tree arg1)
4190 widest2_int warg0 = widest2_int_cst (arg0);
4191 widest2_int warg1 = widest2_int_cst (arg1);
4192 widest2_int wres;
4193 switch (code)
4195 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4196 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4197 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4198 default: gcc_unreachable ();
4200 signop sign = TYPE_SIGN (type);
4201 if (sign == UNSIGNED && wi::neg_p (wres))
4202 return true;
4203 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4206 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4207 The statement may be replaced by another statement, e.g., if the call
4208 simplifies to a constant value. Return true if any changes were made.
4209 It is assumed that the operands have been previously folded. */
4211 static bool
4212 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4214 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4215 tree callee;
4216 bool changed = false;
4217 unsigned i;
4219 /* Fold *& in call arguments. */
4220 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4221 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4223 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4224 if (tmp)
4226 gimple_call_set_arg (stmt, i, tmp);
4227 changed = true;
4231 /* Check for virtual calls that became direct calls. */
4232 callee = gimple_call_fn (stmt);
4233 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4235 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4237 if (dump_file && virtual_method_call_p (callee)
4238 && !possible_polymorphic_call_target_p
4239 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4240 (OBJ_TYPE_REF_EXPR (callee)))))
4242 fprintf (dump_file,
4243 "Type inheritance inconsistent devirtualization of ");
4244 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4245 fprintf (dump_file, " to ");
4246 print_generic_expr (dump_file, callee, TDF_SLIM);
4247 fprintf (dump_file, "\n");
4250 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4251 changed = true;
4253 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4255 bool final;
4256 vec <cgraph_node *>targets
4257 = possible_polymorphic_call_targets (callee, stmt, &final);
4258 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4260 tree lhs = gimple_call_lhs (stmt);
4261 if (dump_enabled_p ())
4263 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4264 "folding virtual function call to %s\n",
4265 targets.length () == 1
4266 ? targets[0]->name ()
4267 : "__builtin_unreachable");
4269 if (targets.length () == 1)
4271 tree fndecl = targets[0]->decl;
4272 gimple_call_set_fndecl (stmt, fndecl);
4273 changed = true;
4274 /* If changing the call to __cxa_pure_virtual
4275 or similar noreturn function, adjust gimple_call_fntype
4276 too. */
4277 if (gimple_call_noreturn_p (stmt)
4278 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4279 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4280 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4281 == void_type_node))
4282 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4283 /* If the call becomes noreturn, remove the lhs. */
4284 if (lhs
4285 && gimple_call_noreturn_p (stmt)
4286 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4287 || should_remove_lhs_p (lhs)))
4289 if (TREE_CODE (lhs) == SSA_NAME)
4291 tree var = create_tmp_var (TREE_TYPE (lhs));
4292 tree def = get_or_create_ssa_default_def (cfun, var);
4293 gimple *new_stmt = gimple_build_assign (lhs, def);
4294 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4296 gimple_call_set_lhs (stmt, NULL_TREE);
4298 maybe_remove_unused_call_args (cfun, stmt);
4300 else
4302 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4303 gimple *new_stmt = gimple_build_call (fndecl, 0);
4304 gimple_set_location (new_stmt, gimple_location (stmt));
4305 /* If the call had a SSA name as lhs morph that into
4306 an uninitialized value. */
4307 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4309 tree var = create_tmp_var (TREE_TYPE (lhs));
4310 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4311 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4312 set_ssa_default_def (cfun, var, lhs);
4314 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4315 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4316 gsi_replace (gsi, new_stmt, false);
4317 return true;
4323 /* Check for indirect calls that became direct calls, and then
4324 no longer require a static chain. */
4325 if (gimple_call_chain (stmt))
4327 tree fn = gimple_call_fndecl (stmt);
4328 if (fn && !DECL_STATIC_CHAIN (fn))
4330 gimple_call_set_chain (stmt, NULL);
4331 changed = true;
4333 else
4335 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4336 if (tmp)
4338 gimple_call_set_chain (stmt, tmp);
4339 changed = true;
4344 if (inplace)
4345 return changed;
4347 /* Check for builtins that CCP can handle using information not
4348 available in the generic fold routines. */
4349 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4351 if (gimple_fold_builtin (gsi))
4352 changed = true;
4354 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4356 changed |= targetm.gimple_fold_builtin (gsi);
4358 else if (gimple_call_internal_p (stmt))
4360 enum tree_code subcode = ERROR_MARK;
4361 tree result = NULL_TREE;
4362 bool cplx_result = false;
4363 tree overflow = NULL_TREE;
4364 switch (gimple_call_internal_fn (stmt))
4366 case IFN_BUILTIN_EXPECT:
4367 result = fold_builtin_expect (gimple_location (stmt),
4368 gimple_call_arg (stmt, 0),
4369 gimple_call_arg (stmt, 1),
4370 gimple_call_arg (stmt, 2),
4371 NULL_TREE);
4372 break;
4373 case IFN_UBSAN_OBJECT_SIZE:
4375 tree offset = gimple_call_arg (stmt, 1);
4376 tree objsize = gimple_call_arg (stmt, 2);
4377 if (integer_all_onesp (objsize)
4378 || (TREE_CODE (offset) == INTEGER_CST
4379 && TREE_CODE (objsize) == INTEGER_CST
4380 && tree_int_cst_le (offset, objsize)))
4382 replace_call_with_value (gsi, NULL_TREE);
4383 return true;
4386 break;
4387 case IFN_UBSAN_PTR:
4388 if (integer_zerop (gimple_call_arg (stmt, 1)))
4390 replace_call_with_value (gsi, NULL_TREE);
4391 return true;
4393 break;
4394 case IFN_UBSAN_BOUNDS:
4396 tree index = gimple_call_arg (stmt, 1);
4397 tree bound = gimple_call_arg (stmt, 2);
4398 if (TREE_CODE (index) == INTEGER_CST
4399 && TREE_CODE (bound) == INTEGER_CST)
4401 index = fold_convert (TREE_TYPE (bound), index);
4402 if (TREE_CODE (index) == INTEGER_CST
4403 && tree_int_cst_le (index, bound))
4405 replace_call_with_value (gsi, NULL_TREE);
4406 return true;
4410 break;
4411 case IFN_GOACC_DIM_SIZE:
4412 case IFN_GOACC_DIM_POS:
4413 result = fold_internal_goacc_dim (stmt);
4414 break;
4415 case IFN_UBSAN_CHECK_ADD:
4416 subcode = PLUS_EXPR;
4417 break;
4418 case IFN_UBSAN_CHECK_SUB:
4419 subcode = MINUS_EXPR;
4420 break;
4421 case IFN_UBSAN_CHECK_MUL:
4422 subcode = MULT_EXPR;
4423 break;
4424 case IFN_ADD_OVERFLOW:
4425 subcode = PLUS_EXPR;
4426 cplx_result = true;
4427 break;
4428 case IFN_SUB_OVERFLOW:
4429 subcode = MINUS_EXPR;
4430 cplx_result = true;
4431 break;
4432 case IFN_MUL_OVERFLOW:
4433 subcode = MULT_EXPR;
4434 cplx_result = true;
4435 break;
4436 default:
4437 break;
4439 if (subcode != ERROR_MARK)
4441 tree arg0 = gimple_call_arg (stmt, 0);
4442 tree arg1 = gimple_call_arg (stmt, 1);
4443 tree type = TREE_TYPE (arg0);
4444 if (cplx_result)
4446 tree lhs = gimple_call_lhs (stmt);
4447 if (lhs == NULL_TREE)
4448 type = NULL_TREE;
4449 else
4450 type = TREE_TYPE (TREE_TYPE (lhs));
4452 if (type == NULL_TREE)
4454 /* x = y + 0; x = y - 0; x = y * 0; */
4455 else if (integer_zerop (arg1))
4456 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4457 /* x = 0 + y; x = 0 * y; */
4458 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4459 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4460 /* x = y - y; */
4461 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4462 result = integer_zero_node;
4463 /* x = y * 1; x = 1 * y; */
4464 else if (subcode == MULT_EXPR && integer_onep (arg1))
4465 result = arg0;
4466 else if (subcode == MULT_EXPR && integer_onep (arg0))
4467 result = arg1;
4468 else if (TREE_CODE (arg0) == INTEGER_CST
4469 && TREE_CODE (arg1) == INTEGER_CST)
4471 if (cplx_result)
4472 result = int_const_binop (subcode, fold_convert (type, arg0),
4473 fold_convert (type, arg1));
4474 else
4475 result = int_const_binop (subcode, arg0, arg1);
4476 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4478 if (cplx_result)
4479 overflow = build_one_cst (type);
4480 else
4481 result = NULL_TREE;
4484 if (result)
4486 if (result == integer_zero_node)
4487 result = build_zero_cst (type);
4488 else if (cplx_result && TREE_TYPE (result) != type)
4490 if (TREE_CODE (result) == INTEGER_CST)
4492 if (arith_overflowed_p (PLUS_EXPR, type, result,
4493 integer_zero_node))
4494 overflow = build_one_cst (type);
4496 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4497 && TYPE_UNSIGNED (type))
4498 || (TYPE_PRECISION (type)
4499 < (TYPE_PRECISION (TREE_TYPE (result))
4500 + (TYPE_UNSIGNED (TREE_TYPE (result))
4501 && !TYPE_UNSIGNED (type)))))
4502 result = NULL_TREE;
4503 if (result)
4504 result = fold_convert (type, result);
4509 if (result)
4511 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4512 result = drop_tree_overflow (result);
4513 if (cplx_result)
4515 if (overflow == NULL_TREE)
4516 overflow = build_zero_cst (TREE_TYPE (result));
4517 tree ctype = build_complex_type (TREE_TYPE (result));
4518 if (TREE_CODE (result) == INTEGER_CST
4519 && TREE_CODE (overflow) == INTEGER_CST)
4520 result = build_complex (ctype, result, overflow);
4521 else
4522 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4523 ctype, result, overflow);
4525 if (!update_call_from_tree (gsi, result))
4526 gimplify_and_update_call_from_tree (gsi, result);
4527 changed = true;
4531 return changed;
4535 /* Return true whether NAME has a use on STMT. */
4537 static bool
4538 has_use_on_stmt (tree name, gimple *stmt)
4540 imm_use_iterator iter;
4541 use_operand_p use_p;
4542 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4543 if (USE_STMT (use_p) == stmt)
4544 return true;
4545 return false;
4548 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4549 gimple_simplify.
4551 Replaces *GSI with the simplification result in RCODE and OPS
4552 and the associated statements in *SEQ. Does the replacement
4553 according to INPLACE and returns true if the operation succeeded. */
4555 static bool
4556 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4557 gimple_match_op *res_op,
4558 gimple_seq *seq, bool inplace)
4560 gimple *stmt = gsi_stmt (*gsi);
4561 tree *ops = res_op->ops;
4562 unsigned int num_ops = res_op->num_ops;
4564 /* Play safe and do not allow abnormals to be mentioned in
4565 newly created statements. See also maybe_push_res_to_seq.
4566 As an exception allow such uses if there was a use of the
4567 same SSA name on the old stmt. */
4568 for (unsigned int i = 0; i < num_ops; ++i)
4569 if (TREE_CODE (ops[i]) == SSA_NAME
4570 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4571 && !has_use_on_stmt (ops[i], stmt))
4572 return false;
4574 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4575 for (unsigned int i = 0; i < 2; ++i)
4576 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4577 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4578 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4579 return false;
4581 /* Don't insert new statements when INPLACE is true, even if we could
4582 reuse STMT for the final statement. */
4583 if (inplace && !gimple_seq_empty_p (*seq))
4584 return false;
4586 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4588 gcc_assert (res_op->code.is_tree_code ());
4589 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4590 /* GIMPLE_CONDs condition may not throw. */
4591 && (!flag_exceptions
4592 || !cfun->can_throw_non_call_exceptions
4593 || !operation_could_trap_p (res_op->code,
4594 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4595 false, NULL_TREE)))
4596 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4597 else if (res_op->code == SSA_NAME)
4598 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4599 build_zero_cst (TREE_TYPE (ops[0])));
4600 else if (res_op->code == INTEGER_CST)
4602 if (integer_zerop (ops[0]))
4603 gimple_cond_make_false (cond_stmt);
4604 else
4605 gimple_cond_make_true (cond_stmt);
4607 else if (!inplace)
4609 tree res = maybe_push_res_to_seq (res_op, seq);
4610 if (!res)
4611 return false;
4612 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4613 build_zero_cst (TREE_TYPE (res)));
4615 else
4616 return false;
4617 if (dump_file && (dump_flags & TDF_DETAILS))
4619 fprintf (dump_file, "gimple_simplified to ");
4620 if (!gimple_seq_empty_p (*seq))
4621 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4622 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4623 0, TDF_SLIM);
4625 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4626 return true;
4628 else if (is_gimple_assign (stmt)
4629 && res_op->code.is_tree_code ())
4631 if (!inplace
4632 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4634 maybe_build_generic_op (res_op);
4635 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4636 res_op->op_or_null (0),
4637 res_op->op_or_null (1),
4638 res_op->op_or_null (2));
4639 if (dump_file && (dump_flags & TDF_DETAILS))
4641 fprintf (dump_file, "gimple_simplified to ");
4642 if (!gimple_seq_empty_p (*seq))
4643 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4644 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4645 0, TDF_SLIM);
4647 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4648 return true;
4651 else if (res_op->code.is_fn_code ()
4652 && gimple_call_combined_fn (stmt) == res_op->code)
4654 gcc_assert (num_ops == gimple_call_num_args (stmt));
4655 for (unsigned int i = 0; i < num_ops; ++i)
4656 gimple_call_set_arg (stmt, i, ops[i]);
4657 if (dump_file && (dump_flags & TDF_DETAILS))
4659 fprintf (dump_file, "gimple_simplified to ");
4660 if (!gimple_seq_empty_p (*seq))
4661 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4662 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4664 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4665 return true;
4667 else if (!inplace)
4669 if (gimple_has_lhs (stmt))
4671 tree lhs = gimple_get_lhs (stmt);
4672 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4673 return false;
4674 if (dump_file && (dump_flags & TDF_DETAILS))
4676 fprintf (dump_file, "gimple_simplified to ");
4677 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4679 gsi_replace_with_seq_vops (gsi, *seq);
4680 return true;
4682 else
4683 gcc_unreachable ();
4686 return false;
4689 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4691 static bool
4692 maybe_canonicalize_mem_ref_addr (tree *t)
4694 bool res = false;
4696 if (TREE_CODE (*t) == ADDR_EXPR)
4697 t = &TREE_OPERAND (*t, 0);
4699 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4700 generic vector extension. The actual vector referenced is
4701 view-converted to an array type for this purpose. If the index
4702 is constant the canonical representation in the middle-end is a
4703 BIT_FIELD_REF so re-write the former to the latter here. */
4704 if (TREE_CODE (*t) == ARRAY_REF
4705 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4706 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4707 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4709 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4710 if (VECTOR_TYPE_P (vtype))
4712 tree low = array_ref_low_bound (*t);
4713 if (TREE_CODE (low) == INTEGER_CST)
4715 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4717 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4718 wi::to_widest (low));
4719 idx = wi::mul (idx, wi::to_widest
4720 (TYPE_SIZE (TREE_TYPE (*t))));
4721 widest_int ext
4722 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4723 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4725 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4726 TREE_TYPE (*t),
4727 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4728 TYPE_SIZE (TREE_TYPE (*t)),
4729 wide_int_to_tree (bitsizetype, idx));
4730 res = true;
4737 while (handled_component_p (*t))
4738 t = &TREE_OPERAND (*t, 0);
4740 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4741 of invariant addresses into a SSA name MEM_REF address. */
4742 if (TREE_CODE (*t) == MEM_REF
4743 || TREE_CODE (*t) == TARGET_MEM_REF)
4745 tree addr = TREE_OPERAND (*t, 0);
4746 if (TREE_CODE (addr) == ADDR_EXPR
4747 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4748 || handled_component_p (TREE_OPERAND (addr, 0))))
4750 tree base;
4751 poly_int64 coffset;
4752 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4753 &coffset);
4754 if (!base)
4755 gcc_unreachable ();
4757 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4758 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4759 TREE_OPERAND (*t, 1),
4760 size_int (coffset));
4761 res = true;
4763 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4764 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4767 /* Canonicalize back MEM_REFs to plain reference trees if the object
4768 accessed is a decl that has the same access semantics as the MEM_REF. */
4769 if (TREE_CODE (*t) == MEM_REF
4770 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4771 && integer_zerop (TREE_OPERAND (*t, 1))
4772 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4774 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4775 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4776 if (/* Same volatile qualification. */
4777 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4778 /* Same TBAA behavior with -fstrict-aliasing. */
4779 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4780 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4781 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4782 /* Same alignment. */
4783 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4784 /* We have to look out here to not drop a required conversion
4785 from the rhs to the lhs if *t appears on the lhs or vice-versa
4786 if it appears on the rhs. Thus require strict type
4787 compatibility. */
4788 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4790 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4791 res = true;
4795 /* Canonicalize TARGET_MEM_REF in particular with respect to
4796 the indexes becoming constant. */
4797 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4799 tree tem = maybe_fold_tmr (*t);
4800 if (tem)
4802 *t = tem;
4803 res = true;
4807 return res;
4810 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4811 distinguishes both cases. */
4813 static bool
4814 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4816 bool changed = false;
4817 gimple *stmt = gsi_stmt (*gsi);
4818 bool nowarning = gimple_no_warning_p (stmt);
4819 unsigned i;
4820 fold_defer_overflow_warnings ();
4822 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4823 after propagation.
4824 ??? This shouldn't be done in generic folding but in the
4825 propagation helpers which also know whether an address was
4826 propagated.
4827 Also canonicalize operand order. */
4828 switch (gimple_code (stmt))
4830 case GIMPLE_ASSIGN:
4831 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4833 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4834 if ((REFERENCE_CLASS_P (*rhs)
4835 || TREE_CODE (*rhs) == ADDR_EXPR)
4836 && maybe_canonicalize_mem_ref_addr (rhs))
4837 changed = true;
4838 tree *lhs = gimple_assign_lhs_ptr (stmt);
4839 if (REFERENCE_CLASS_P (*lhs)
4840 && maybe_canonicalize_mem_ref_addr (lhs))
4841 changed = true;
4843 else
4845 /* Canonicalize operand order. */
4846 enum tree_code code = gimple_assign_rhs_code (stmt);
4847 if (TREE_CODE_CLASS (code) == tcc_comparison
4848 || commutative_tree_code (code)
4849 || commutative_ternary_tree_code (code))
4851 tree rhs1 = gimple_assign_rhs1 (stmt);
4852 tree rhs2 = gimple_assign_rhs2 (stmt);
4853 if (tree_swap_operands_p (rhs1, rhs2))
4855 gimple_assign_set_rhs1 (stmt, rhs2);
4856 gimple_assign_set_rhs2 (stmt, rhs1);
4857 if (TREE_CODE_CLASS (code) == tcc_comparison)
4858 gimple_assign_set_rhs_code (stmt,
4859 swap_tree_comparison (code));
4860 changed = true;
4864 break;
4865 case GIMPLE_CALL:
4867 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4869 tree *arg = gimple_call_arg_ptr (stmt, i);
4870 if (REFERENCE_CLASS_P (*arg)
4871 && maybe_canonicalize_mem_ref_addr (arg))
4872 changed = true;
4874 tree *lhs = gimple_call_lhs_ptr (stmt);
4875 if (*lhs
4876 && REFERENCE_CLASS_P (*lhs)
4877 && maybe_canonicalize_mem_ref_addr (lhs))
4878 changed = true;
4879 break;
4881 case GIMPLE_ASM:
4883 gasm *asm_stmt = as_a <gasm *> (stmt);
4884 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4886 tree link = gimple_asm_output_op (asm_stmt, i);
4887 tree op = TREE_VALUE (link);
4888 if (REFERENCE_CLASS_P (op)
4889 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4890 changed = true;
4892 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4894 tree link = gimple_asm_input_op (asm_stmt, i);
4895 tree op = TREE_VALUE (link);
4896 if ((REFERENCE_CLASS_P (op)
4897 || TREE_CODE (op) == ADDR_EXPR)
4898 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4899 changed = true;
4902 break;
4903 case GIMPLE_DEBUG:
4904 if (gimple_debug_bind_p (stmt))
4906 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4907 if (*val
4908 && (REFERENCE_CLASS_P (*val)
4909 || TREE_CODE (*val) == ADDR_EXPR)
4910 && maybe_canonicalize_mem_ref_addr (val))
4911 changed = true;
4913 break;
4914 case GIMPLE_COND:
4916 /* Canonicalize operand order. */
4917 tree lhs = gimple_cond_lhs (stmt);
4918 tree rhs = gimple_cond_rhs (stmt);
4919 if (tree_swap_operands_p (lhs, rhs))
4921 gcond *gc = as_a <gcond *> (stmt);
4922 gimple_cond_set_lhs (gc, rhs);
4923 gimple_cond_set_rhs (gc, lhs);
4924 gimple_cond_set_code (gc,
4925 swap_tree_comparison (gimple_cond_code (gc)));
4926 changed = true;
4929 default:;
4932 /* Dispatch to pattern-based folding. */
4933 if (!inplace
4934 || is_gimple_assign (stmt)
4935 || gimple_code (stmt) == GIMPLE_COND)
4937 gimple_seq seq = NULL;
4938 gimple_match_op res_op;
4939 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
4940 valueize, valueize))
4942 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
4943 changed = true;
4944 else
4945 gimple_seq_discard (seq);
4949 stmt = gsi_stmt (*gsi);
4951 /* Fold the main computation performed by the statement. */
4952 switch (gimple_code (stmt))
4954 case GIMPLE_ASSIGN:
4956 /* Try to canonicalize for boolean-typed X the comparisons
4957 X == 0, X == 1, X != 0, and X != 1. */
4958 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4959 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4961 tree lhs = gimple_assign_lhs (stmt);
4962 tree op1 = gimple_assign_rhs1 (stmt);
4963 tree op2 = gimple_assign_rhs2 (stmt);
4964 tree type = TREE_TYPE (op1);
4966 /* Check whether the comparison operands are of the same boolean
4967 type as the result type is.
4968 Check that second operand is an integer-constant with value
4969 one or zero. */
4970 if (TREE_CODE (op2) == INTEGER_CST
4971 && (integer_zerop (op2) || integer_onep (op2))
4972 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4974 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4975 bool is_logical_not = false;
4977 /* X == 0 and X != 1 is a logical-not.of X
4978 X == 1 and X != 0 is X */
4979 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4980 || (cmp_code == NE_EXPR && integer_onep (op2)))
4981 is_logical_not = true;
4983 if (is_logical_not == false)
4984 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4985 /* Only for one-bit precision typed X the transformation
4986 !X -> ~X is valied. */
4987 else if (TYPE_PRECISION (type) == 1)
4988 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4989 /* Otherwise we use !X -> X ^ 1. */
4990 else
4991 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4992 build_int_cst (type, 1));
4993 changed = true;
4994 break;
4998 unsigned old_num_ops = gimple_num_ops (stmt);
4999 tree lhs = gimple_assign_lhs (stmt);
5000 tree new_rhs = fold_gimple_assign (gsi);
5001 if (new_rhs
5002 && !useless_type_conversion_p (TREE_TYPE (lhs),
5003 TREE_TYPE (new_rhs)))
5004 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
5005 if (new_rhs
5006 && (!inplace
5007 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
5009 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
5010 changed = true;
5012 break;
5015 case GIMPLE_CALL:
5016 changed |= gimple_fold_call (gsi, inplace);
5017 break;
5019 case GIMPLE_ASM:
5020 /* Fold *& in asm operands. */
5022 gasm *asm_stmt = as_a <gasm *> (stmt);
5023 size_t noutputs;
5024 const char **oconstraints;
5025 const char *constraint;
5026 bool allows_mem, allows_reg;
5028 noutputs = gimple_asm_noutputs (asm_stmt);
5029 oconstraints = XALLOCAVEC (const char *, noutputs);
5031 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
5033 tree link = gimple_asm_output_op (asm_stmt, i);
5034 tree op = TREE_VALUE (link);
5035 oconstraints[i]
5036 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5037 if (REFERENCE_CLASS_P (op)
5038 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
5040 TREE_VALUE (link) = op;
5041 changed = true;
5044 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5046 tree link = gimple_asm_input_op (asm_stmt, i);
5047 tree op = TREE_VALUE (link);
5048 constraint
5049 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5050 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5051 oconstraints, &allows_mem, &allows_reg);
5052 if (REFERENCE_CLASS_P (op)
5053 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
5054 != NULL_TREE)
5056 TREE_VALUE (link) = op;
5057 changed = true;
5061 break;
5063 case GIMPLE_DEBUG:
5064 if (gimple_debug_bind_p (stmt))
5066 tree val = gimple_debug_bind_get_value (stmt);
5067 if (val
5068 && REFERENCE_CLASS_P (val))
5070 tree tem = maybe_fold_reference (val, false);
5071 if (tem)
5073 gimple_debug_bind_set_value (stmt, tem);
5074 changed = true;
5077 else if (val
5078 && TREE_CODE (val) == ADDR_EXPR)
5080 tree ref = TREE_OPERAND (val, 0);
5081 tree tem = maybe_fold_reference (ref, false);
5082 if (tem)
5084 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
5085 gimple_debug_bind_set_value (stmt, tem);
5086 changed = true;
5090 break;
5092 case GIMPLE_RETURN:
5094 greturn *ret_stmt = as_a<greturn *> (stmt);
5095 tree ret = gimple_return_retval(ret_stmt);
5097 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
5099 tree val = valueize (ret);
5100 if (val && val != ret
5101 && may_propagate_copy (ret, val))
5103 gimple_return_set_retval (ret_stmt, val);
5104 changed = true;
5108 break;
5110 default:;
5113 stmt = gsi_stmt (*gsi);
5115 /* Fold *& on the lhs. */
5116 if (gimple_has_lhs (stmt))
5118 tree lhs = gimple_get_lhs (stmt);
5119 if (lhs && REFERENCE_CLASS_P (lhs))
5121 tree new_lhs = maybe_fold_reference (lhs, true);
5122 if (new_lhs)
5124 gimple_set_lhs (stmt, new_lhs);
5125 changed = true;
5130 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
5131 return changed;
5134 /* Valueziation callback that ends up not following SSA edges. */
5136 tree
5137 no_follow_ssa_edges (tree)
5139 return NULL_TREE;
5142 /* Valueization callback that ends up following single-use SSA edges only. */
5144 tree
5145 follow_single_use_edges (tree val)
5147 if (TREE_CODE (val) == SSA_NAME
5148 && !has_single_use (val))
5149 return NULL_TREE;
5150 return val;
5153 /* Valueization callback that follows all SSA edges. */
5155 tree
5156 follow_all_ssa_edges (tree val)
5158 return val;
5161 /* Fold the statement pointed to by GSI. In some cases, this function may
5162 replace the whole statement with a new one. Returns true iff folding
5163 makes any changes.
5164 The statement pointed to by GSI should be in valid gimple form but may
5165 be in unfolded state as resulting from for example constant propagation
5166 which can produce *&x = 0. */
5168 bool
5169 fold_stmt (gimple_stmt_iterator *gsi)
5171 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5174 bool
5175 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5177 return fold_stmt_1 (gsi, false, valueize);
5180 /* Perform the minimal folding on statement *GSI. Only operations like
5181 *&x created by constant propagation are handled. The statement cannot
5182 be replaced with a new one. Return true if the statement was
5183 changed, false otherwise.
5184 The statement *GSI should be in valid gimple form but may
5185 be in unfolded state as resulting from for example constant propagation
5186 which can produce *&x = 0. */
5188 bool
5189 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5191 gimple *stmt = gsi_stmt (*gsi);
5192 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5193 gcc_assert (gsi_stmt (*gsi) == stmt);
5194 return changed;
5197 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5198 if EXPR is null or we don't know how.
5199 If non-null, the result always has boolean type. */
5201 static tree
5202 canonicalize_bool (tree expr, bool invert)
5204 if (!expr)
5205 return NULL_TREE;
5206 else if (invert)
5208 if (integer_nonzerop (expr))
5209 return boolean_false_node;
5210 else if (integer_zerop (expr))
5211 return boolean_true_node;
5212 else if (TREE_CODE (expr) == SSA_NAME)
5213 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5214 build_int_cst (TREE_TYPE (expr), 0));
5215 else if (COMPARISON_CLASS_P (expr))
5216 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5217 boolean_type_node,
5218 TREE_OPERAND (expr, 0),
5219 TREE_OPERAND (expr, 1));
5220 else
5221 return NULL_TREE;
5223 else
5225 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5226 return expr;
5227 if (integer_nonzerop (expr))
5228 return boolean_true_node;
5229 else if (integer_zerop (expr))
5230 return boolean_false_node;
5231 else if (TREE_CODE (expr) == SSA_NAME)
5232 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5233 build_int_cst (TREE_TYPE (expr), 0));
5234 else if (COMPARISON_CLASS_P (expr))
5235 return fold_build2 (TREE_CODE (expr),
5236 boolean_type_node,
5237 TREE_OPERAND (expr, 0),
5238 TREE_OPERAND (expr, 1));
5239 else
5240 return NULL_TREE;
5244 /* Check to see if a boolean expression EXPR is logically equivalent to the
5245 comparison (OP1 CODE OP2). Check for various identities involving
5246 SSA_NAMEs. */
5248 static bool
5249 same_bool_comparison_p (const_tree expr, enum tree_code code,
5250 const_tree op1, const_tree op2)
5252 gimple *s;
5254 /* The obvious case. */
5255 if (TREE_CODE (expr) == code
5256 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5257 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5258 return true;
5260 /* Check for comparing (name, name != 0) and the case where expr
5261 is an SSA_NAME with a definition matching the comparison. */
5262 if (TREE_CODE (expr) == SSA_NAME
5263 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5265 if (operand_equal_p (expr, op1, 0))
5266 return ((code == NE_EXPR && integer_zerop (op2))
5267 || (code == EQ_EXPR && integer_nonzerop (op2)));
5268 s = SSA_NAME_DEF_STMT (expr);
5269 if (is_gimple_assign (s)
5270 && gimple_assign_rhs_code (s) == code
5271 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5272 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5273 return true;
5276 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5277 of name is a comparison, recurse. */
5278 if (TREE_CODE (op1) == SSA_NAME
5279 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5281 s = SSA_NAME_DEF_STMT (op1);
5282 if (is_gimple_assign (s)
5283 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5285 enum tree_code c = gimple_assign_rhs_code (s);
5286 if ((c == NE_EXPR && integer_zerop (op2))
5287 || (c == EQ_EXPR && integer_nonzerop (op2)))
5288 return same_bool_comparison_p (expr, c,
5289 gimple_assign_rhs1 (s),
5290 gimple_assign_rhs2 (s));
5291 if ((c == EQ_EXPR && integer_zerop (op2))
5292 || (c == NE_EXPR && integer_nonzerop (op2)))
5293 return same_bool_comparison_p (expr,
5294 invert_tree_comparison (c, false),
5295 gimple_assign_rhs1 (s),
5296 gimple_assign_rhs2 (s));
5299 return false;
5302 /* Check to see if two boolean expressions OP1 and OP2 are logically
5303 equivalent. */
5305 static bool
5306 same_bool_result_p (const_tree op1, const_tree op2)
5308 /* Simple cases first. */
5309 if (operand_equal_p (op1, op2, 0))
5310 return true;
5312 /* Check the cases where at least one of the operands is a comparison.
5313 These are a bit smarter than operand_equal_p in that they apply some
5314 identifies on SSA_NAMEs. */
5315 if (COMPARISON_CLASS_P (op2)
5316 && same_bool_comparison_p (op1, TREE_CODE (op2),
5317 TREE_OPERAND (op2, 0),
5318 TREE_OPERAND (op2, 1)))
5319 return true;
5320 if (COMPARISON_CLASS_P (op1)
5321 && same_bool_comparison_p (op2, TREE_CODE (op1),
5322 TREE_OPERAND (op1, 0),
5323 TREE_OPERAND (op1, 1)))
5324 return true;
5326 /* Default case. */
5327 return false;
5330 /* Forward declarations for some mutually recursive functions. */
5332 static tree
5333 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5334 enum tree_code code2, tree op2a, tree op2b);
5335 static tree
5336 and_var_with_comparison (tree var, bool invert,
5337 enum tree_code code2, tree op2a, tree op2b);
5338 static tree
5339 and_var_with_comparison_1 (gimple *stmt,
5340 enum tree_code code2, tree op2a, tree op2b);
5341 static tree
5342 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5343 enum tree_code code2, tree op2a, tree op2b);
5344 static tree
5345 or_var_with_comparison (tree var, bool invert,
5346 enum tree_code code2, tree op2a, tree op2b);
5347 static tree
5348 or_var_with_comparison_1 (gimple *stmt,
5349 enum tree_code code2, tree op2a, tree op2b);
5351 /* Helper function for and_comparisons_1: try to simplify the AND of the
5352 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5353 If INVERT is true, invert the value of the VAR before doing the AND.
5354 Return NULL_EXPR if we can't simplify this to a single expression. */
5356 static tree
5357 and_var_with_comparison (tree var, bool invert,
5358 enum tree_code code2, tree op2a, tree op2b)
5360 tree t;
5361 gimple *stmt = SSA_NAME_DEF_STMT (var);
5363 /* We can only deal with variables whose definitions are assignments. */
5364 if (!is_gimple_assign (stmt))
5365 return NULL_TREE;
5367 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5368 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5369 Then we only have to consider the simpler non-inverted cases. */
5370 if (invert)
5371 t = or_var_with_comparison_1 (stmt,
5372 invert_tree_comparison (code2, false),
5373 op2a, op2b);
5374 else
5375 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5376 return canonicalize_bool (t, invert);
5379 /* Try to simplify the AND of the ssa variable defined by the assignment
5380 STMT with the comparison specified by (OP2A CODE2 OP2B).
5381 Return NULL_EXPR if we can't simplify this to a single expression. */
5383 static tree
5384 and_var_with_comparison_1 (gimple *stmt,
5385 enum tree_code code2, tree op2a, tree op2b)
5387 tree var = gimple_assign_lhs (stmt);
5388 tree true_test_var = NULL_TREE;
5389 tree false_test_var = NULL_TREE;
5390 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5392 /* Check for identities like (var AND (var == 0)) => false. */
5393 if (TREE_CODE (op2a) == SSA_NAME
5394 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5396 if ((code2 == NE_EXPR && integer_zerop (op2b))
5397 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5399 true_test_var = op2a;
5400 if (var == true_test_var)
5401 return var;
5403 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5404 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5406 false_test_var = op2a;
5407 if (var == false_test_var)
5408 return boolean_false_node;
5412 /* If the definition is a comparison, recurse on it. */
5413 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5415 tree t = and_comparisons_1 (innercode,
5416 gimple_assign_rhs1 (stmt),
5417 gimple_assign_rhs2 (stmt),
5418 code2,
5419 op2a,
5420 op2b);
5421 if (t)
5422 return t;
5425 /* If the definition is an AND or OR expression, we may be able to
5426 simplify by reassociating. */
5427 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5428 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5430 tree inner1 = gimple_assign_rhs1 (stmt);
5431 tree inner2 = gimple_assign_rhs2 (stmt);
5432 gimple *s;
5433 tree t;
5434 tree partial = NULL_TREE;
5435 bool is_and = (innercode == BIT_AND_EXPR);
5437 /* Check for boolean identities that don't require recursive examination
5438 of inner1/inner2:
5439 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5440 inner1 AND (inner1 OR inner2) => inner1
5441 !inner1 AND (inner1 AND inner2) => false
5442 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5443 Likewise for similar cases involving inner2. */
5444 if (inner1 == true_test_var)
5445 return (is_and ? var : inner1);
5446 else if (inner2 == true_test_var)
5447 return (is_and ? var : inner2);
5448 else if (inner1 == false_test_var)
5449 return (is_and
5450 ? boolean_false_node
5451 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5452 else if (inner2 == false_test_var)
5453 return (is_and
5454 ? boolean_false_node
5455 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5457 /* Next, redistribute/reassociate the AND across the inner tests.
5458 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5459 if (TREE_CODE (inner1) == SSA_NAME
5460 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5461 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5462 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5463 gimple_assign_rhs1 (s),
5464 gimple_assign_rhs2 (s),
5465 code2, op2a, op2b)))
5467 /* Handle the AND case, where we are reassociating:
5468 (inner1 AND inner2) AND (op2a code2 op2b)
5469 => (t AND inner2)
5470 If the partial result t is a constant, we win. Otherwise
5471 continue on to try reassociating with the other inner test. */
5472 if (is_and)
5474 if (integer_onep (t))
5475 return inner2;
5476 else if (integer_zerop (t))
5477 return boolean_false_node;
5480 /* Handle the OR case, where we are redistributing:
5481 (inner1 OR inner2) AND (op2a code2 op2b)
5482 => (t OR (inner2 AND (op2a code2 op2b))) */
5483 else if (integer_onep (t))
5484 return boolean_true_node;
5486 /* Save partial result for later. */
5487 partial = t;
5490 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5491 if (TREE_CODE (inner2) == SSA_NAME
5492 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5493 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5494 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5495 gimple_assign_rhs1 (s),
5496 gimple_assign_rhs2 (s),
5497 code2, op2a, op2b)))
5499 /* Handle the AND case, where we are reassociating:
5500 (inner1 AND inner2) AND (op2a code2 op2b)
5501 => (inner1 AND t) */
5502 if (is_and)
5504 if (integer_onep (t))
5505 return inner1;
5506 else if (integer_zerop (t))
5507 return boolean_false_node;
5508 /* If both are the same, we can apply the identity
5509 (x AND x) == x. */
5510 else if (partial && same_bool_result_p (t, partial))
5511 return t;
5514 /* Handle the OR case. where we are redistributing:
5515 (inner1 OR inner2) AND (op2a code2 op2b)
5516 => (t OR (inner1 AND (op2a code2 op2b)))
5517 => (t OR partial) */
5518 else
5520 if (integer_onep (t))
5521 return boolean_true_node;
5522 else if (partial)
5524 /* We already got a simplification for the other
5525 operand to the redistributed OR expression. The
5526 interesting case is when at least one is false.
5527 Or, if both are the same, we can apply the identity
5528 (x OR x) == x. */
5529 if (integer_zerop (partial))
5530 return t;
5531 else if (integer_zerop (t))
5532 return partial;
5533 else if (same_bool_result_p (t, partial))
5534 return t;
5539 return NULL_TREE;
5542 /* Try to simplify the AND of two comparisons defined by
5543 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5544 If this can be done without constructing an intermediate value,
5545 return the resulting tree; otherwise NULL_TREE is returned.
5546 This function is deliberately asymmetric as it recurses on SSA_DEFs
5547 in the first comparison but not the second. */
5549 static tree
5550 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5551 enum tree_code code2, tree op2a, tree op2b)
5553 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5555 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5556 if (operand_equal_p (op1a, op2a, 0)
5557 && operand_equal_p (op1b, op2b, 0))
5559 /* Result will be either NULL_TREE, or a combined comparison. */
5560 tree t = combine_comparisons (UNKNOWN_LOCATION,
5561 TRUTH_ANDIF_EXPR, code1, code2,
5562 truth_type, op1a, op1b);
5563 if (t)
5564 return t;
5567 /* Likewise the swapped case of the above. */
5568 if (operand_equal_p (op1a, op2b, 0)
5569 && operand_equal_p (op1b, op2a, 0))
5571 /* Result will be either NULL_TREE, or a combined comparison. */
5572 tree t = combine_comparisons (UNKNOWN_LOCATION,
5573 TRUTH_ANDIF_EXPR, code1,
5574 swap_tree_comparison (code2),
5575 truth_type, op1a, op1b);
5576 if (t)
5577 return t;
5580 /* If both comparisons are of the same value against constants, we might
5581 be able to merge them. */
5582 if (operand_equal_p (op1a, op2a, 0)
5583 && TREE_CODE (op1b) == INTEGER_CST
5584 && TREE_CODE (op2b) == INTEGER_CST)
5586 int cmp = tree_int_cst_compare (op1b, op2b);
5588 /* If we have (op1a == op1b), we should either be able to
5589 return that or FALSE, depending on whether the constant op1b
5590 also satisfies the other comparison against op2b. */
5591 if (code1 == EQ_EXPR)
5593 bool done = true;
5594 bool val;
5595 switch (code2)
5597 case EQ_EXPR: val = (cmp == 0); break;
5598 case NE_EXPR: val = (cmp != 0); break;
5599 case LT_EXPR: val = (cmp < 0); break;
5600 case GT_EXPR: val = (cmp > 0); break;
5601 case LE_EXPR: val = (cmp <= 0); break;
5602 case GE_EXPR: val = (cmp >= 0); break;
5603 default: done = false;
5605 if (done)
5607 if (val)
5608 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5609 else
5610 return boolean_false_node;
5613 /* Likewise if the second comparison is an == comparison. */
5614 else if (code2 == EQ_EXPR)
5616 bool done = true;
5617 bool val;
5618 switch (code1)
5620 case EQ_EXPR: val = (cmp == 0); break;
5621 case NE_EXPR: val = (cmp != 0); break;
5622 case LT_EXPR: val = (cmp > 0); break;
5623 case GT_EXPR: val = (cmp < 0); break;
5624 case LE_EXPR: val = (cmp >= 0); break;
5625 case GE_EXPR: val = (cmp <= 0); break;
5626 default: done = false;
5628 if (done)
5630 if (val)
5631 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5632 else
5633 return boolean_false_node;
5637 /* Same business with inequality tests. */
5638 else if (code1 == NE_EXPR)
5640 bool val;
5641 switch (code2)
5643 case EQ_EXPR: val = (cmp != 0); break;
5644 case NE_EXPR: val = (cmp == 0); break;
5645 case LT_EXPR: val = (cmp >= 0); break;
5646 case GT_EXPR: val = (cmp <= 0); break;
5647 case LE_EXPR: val = (cmp > 0); break;
5648 case GE_EXPR: val = (cmp < 0); break;
5649 default:
5650 val = false;
5652 if (val)
5653 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5655 else if (code2 == NE_EXPR)
5657 bool val;
5658 switch (code1)
5660 case EQ_EXPR: val = (cmp == 0); break;
5661 case NE_EXPR: val = (cmp != 0); break;
5662 case LT_EXPR: val = (cmp <= 0); break;
5663 case GT_EXPR: val = (cmp >= 0); break;
5664 case LE_EXPR: val = (cmp < 0); break;
5665 case GE_EXPR: val = (cmp > 0); break;
5666 default:
5667 val = false;
5669 if (val)
5670 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5673 /* Chose the more restrictive of two < or <= comparisons. */
5674 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5675 && (code2 == LT_EXPR || code2 == LE_EXPR))
5677 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5678 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5679 else
5680 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5683 /* Likewise chose the more restrictive of two > or >= comparisons. */
5684 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5685 && (code2 == GT_EXPR || code2 == GE_EXPR))
5687 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5688 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5689 else
5690 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5693 /* Check for singleton ranges. */
5694 else if (cmp == 0
5695 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5696 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5697 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5699 /* Check for disjoint ranges. */
5700 else if (cmp <= 0
5701 && (code1 == LT_EXPR || code1 == LE_EXPR)
5702 && (code2 == GT_EXPR || code2 == GE_EXPR))
5703 return boolean_false_node;
5704 else if (cmp >= 0
5705 && (code1 == GT_EXPR || code1 == GE_EXPR)
5706 && (code2 == LT_EXPR || code2 == LE_EXPR))
5707 return boolean_false_node;
5710 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5711 NAME's definition is a truth value. See if there are any simplifications
5712 that can be done against the NAME's definition. */
5713 if (TREE_CODE (op1a) == SSA_NAME
5714 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5715 && (integer_zerop (op1b) || integer_onep (op1b)))
5717 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5718 || (code1 == NE_EXPR && integer_onep (op1b)));
5719 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5720 switch (gimple_code (stmt))
5722 case GIMPLE_ASSIGN:
5723 /* Try to simplify by copy-propagating the definition. */
5724 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5726 case GIMPLE_PHI:
5727 /* If every argument to the PHI produces the same result when
5728 ANDed with the second comparison, we win.
5729 Do not do this unless the type is bool since we need a bool
5730 result here anyway. */
5731 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5733 tree result = NULL_TREE;
5734 unsigned i;
5735 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5737 tree arg = gimple_phi_arg_def (stmt, i);
5739 /* If this PHI has itself as an argument, ignore it.
5740 If all the other args produce the same result,
5741 we're still OK. */
5742 if (arg == gimple_phi_result (stmt))
5743 continue;
5744 else if (TREE_CODE (arg) == INTEGER_CST)
5746 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5748 if (!result)
5749 result = boolean_false_node;
5750 else if (!integer_zerop (result))
5751 return NULL_TREE;
5753 else if (!result)
5754 result = fold_build2 (code2, boolean_type_node,
5755 op2a, op2b);
5756 else if (!same_bool_comparison_p (result,
5757 code2, op2a, op2b))
5758 return NULL_TREE;
5760 else if (TREE_CODE (arg) == SSA_NAME
5761 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5763 tree temp;
5764 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5765 /* In simple cases we can look through PHI nodes,
5766 but we have to be careful with loops.
5767 See PR49073. */
5768 if (! dom_info_available_p (CDI_DOMINATORS)
5769 || gimple_bb (def_stmt) == gimple_bb (stmt)
5770 || dominated_by_p (CDI_DOMINATORS,
5771 gimple_bb (def_stmt),
5772 gimple_bb (stmt)))
5773 return NULL_TREE;
5774 temp = and_var_with_comparison (arg, invert, code2,
5775 op2a, op2b);
5776 if (!temp)
5777 return NULL_TREE;
5778 else if (!result)
5779 result = temp;
5780 else if (!same_bool_result_p (result, temp))
5781 return NULL_TREE;
5783 else
5784 return NULL_TREE;
5786 return result;
5789 default:
5790 break;
5793 return NULL_TREE;
5796 /* Try to simplify the AND of two comparisons, specified by
5797 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5798 If this can be simplified to a single expression (without requiring
5799 introducing more SSA variables to hold intermediate values),
5800 return the resulting tree. Otherwise return NULL_TREE.
5801 If the result expression is non-null, it has boolean type. */
5803 tree
5804 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5805 enum tree_code code2, tree op2a, tree op2b)
5807 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5808 if (t)
5809 return t;
5810 else
5811 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5814 /* Helper function for or_comparisons_1: try to simplify the OR of the
5815 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5816 If INVERT is true, invert the value of VAR before doing the OR.
5817 Return NULL_EXPR if we can't simplify this to a single expression. */
5819 static tree
5820 or_var_with_comparison (tree var, bool invert,
5821 enum tree_code code2, tree op2a, tree op2b)
5823 tree t;
5824 gimple *stmt = SSA_NAME_DEF_STMT (var);
5826 /* We can only deal with variables whose definitions are assignments. */
5827 if (!is_gimple_assign (stmt))
5828 return NULL_TREE;
5830 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5831 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5832 Then we only have to consider the simpler non-inverted cases. */
5833 if (invert)
5834 t = and_var_with_comparison_1 (stmt,
5835 invert_tree_comparison (code2, false),
5836 op2a, op2b);
5837 else
5838 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5839 return canonicalize_bool (t, invert);
5842 /* Try to simplify the OR of the ssa variable defined by the assignment
5843 STMT with the comparison specified by (OP2A CODE2 OP2B).
5844 Return NULL_EXPR if we can't simplify this to a single expression. */
5846 static tree
5847 or_var_with_comparison_1 (gimple *stmt,
5848 enum tree_code code2, tree op2a, tree op2b)
5850 tree var = gimple_assign_lhs (stmt);
5851 tree true_test_var = NULL_TREE;
5852 tree false_test_var = NULL_TREE;
5853 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5855 /* Check for identities like (var OR (var != 0)) => true . */
5856 if (TREE_CODE (op2a) == SSA_NAME
5857 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5859 if ((code2 == NE_EXPR && integer_zerop (op2b))
5860 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5862 true_test_var = op2a;
5863 if (var == true_test_var)
5864 return var;
5866 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5867 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5869 false_test_var = op2a;
5870 if (var == false_test_var)
5871 return boolean_true_node;
5875 /* If the definition is a comparison, recurse on it. */
5876 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5878 tree t = or_comparisons_1 (innercode,
5879 gimple_assign_rhs1 (stmt),
5880 gimple_assign_rhs2 (stmt),
5881 code2,
5882 op2a,
5883 op2b);
5884 if (t)
5885 return t;
5888 /* If the definition is an AND or OR expression, we may be able to
5889 simplify by reassociating. */
5890 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5891 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5893 tree inner1 = gimple_assign_rhs1 (stmt);
5894 tree inner2 = gimple_assign_rhs2 (stmt);
5895 gimple *s;
5896 tree t;
5897 tree partial = NULL_TREE;
5898 bool is_or = (innercode == BIT_IOR_EXPR);
5900 /* Check for boolean identities that don't require recursive examination
5901 of inner1/inner2:
5902 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5903 inner1 OR (inner1 AND inner2) => inner1
5904 !inner1 OR (inner1 OR inner2) => true
5905 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5907 if (inner1 == true_test_var)
5908 return (is_or ? var : inner1);
5909 else if (inner2 == true_test_var)
5910 return (is_or ? var : inner2);
5911 else if (inner1 == false_test_var)
5912 return (is_or
5913 ? boolean_true_node
5914 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5915 else if (inner2 == false_test_var)
5916 return (is_or
5917 ? boolean_true_node
5918 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5920 /* Next, redistribute/reassociate the OR across the inner tests.
5921 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5922 if (TREE_CODE (inner1) == SSA_NAME
5923 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5924 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5925 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5926 gimple_assign_rhs1 (s),
5927 gimple_assign_rhs2 (s),
5928 code2, op2a, op2b)))
5930 /* Handle the OR case, where we are reassociating:
5931 (inner1 OR inner2) OR (op2a code2 op2b)
5932 => (t OR inner2)
5933 If the partial result t is a constant, we win. Otherwise
5934 continue on to try reassociating with the other inner test. */
5935 if (is_or)
5937 if (integer_onep (t))
5938 return boolean_true_node;
5939 else if (integer_zerop (t))
5940 return inner2;
5943 /* Handle the AND case, where we are redistributing:
5944 (inner1 AND inner2) OR (op2a code2 op2b)
5945 => (t AND (inner2 OR (op2a code op2b))) */
5946 else if (integer_zerop (t))
5947 return boolean_false_node;
5949 /* Save partial result for later. */
5950 partial = t;
5953 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5954 if (TREE_CODE (inner2) == SSA_NAME
5955 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5956 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5957 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5958 gimple_assign_rhs1 (s),
5959 gimple_assign_rhs2 (s),
5960 code2, op2a, op2b)))
5962 /* Handle the OR case, where we are reassociating:
5963 (inner1 OR inner2) OR (op2a code2 op2b)
5964 => (inner1 OR t)
5965 => (t OR partial) */
5966 if (is_or)
5968 if (integer_zerop (t))
5969 return inner1;
5970 else if (integer_onep (t))
5971 return boolean_true_node;
5972 /* If both are the same, we can apply the identity
5973 (x OR x) == x. */
5974 else if (partial && same_bool_result_p (t, partial))
5975 return t;
5978 /* Handle the AND case, where we are redistributing:
5979 (inner1 AND inner2) OR (op2a code2 op2b)
5980 => (t AND (inner1 OR (op2a code2 op2b)))
5981 => (t AND partial) */
5982 else
5984 if (integer_zerop (t))
5985 return boolean_false_node;
5986 else if (partial)
5988 /* We already got a simplification for the other
5989 operand to the redistributed AND expression. The
5990 interesting case is when at least one is true.
5991 Or, if both are the same, we can apply the identity
5992 (x AND x) == x. */
5993 if (integer_onep (partial))
5994 return t;
5995 else if (integer_onep (t))
5996 return partial;
5997 else if (same_bool_result_p (t, partial))
5998 return t;
6003 return NULL_TREE;
6006 /* Try to simplify the OR of two comparisons defined by
6007 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6008 If this can be done without constructing an intermediate value,
6009 return the resulting tree; otherwise NULL_TREE is returned.
6010 This function is deliberately asymmetric as it recurses on SSA_DEFs
6011 in the first comparison but not the second. */
6013 static tree
6014 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
6015 enum tree_code code2, tree op2a, tree op2b)
6017 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6019 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
6020 if (operand_equal_p (op1a, op2a, 0)
6021 && operand_equal_p (op1b, op2b, 0))
6023 /* Result will be either NULL_TREE, or a combined comparison. */
6024 tree t = combine_comparisons (UNKNOWN_LOCATION,
6025 TRUTH_ORIF_EXPR, code1, code2,
6026 truth_type, op1a, op1b);
6027 if (t)
6028 return t;
6031 /* Likewise the swapped case of the above. */
6032 if (operand_equal_p (op1a, op2b, 0)
6033 && operand_equal_p (op1b, op2a, 0))
6035 /* Result will be either NULL_TREE, or a combined comparison. */
6036 tree t = combine_comparisons (UNKNOWN_LOCATION,
6037 TRUTH_ORIF_EXPR, code1,
6038 swap_tree_comparison (code2),
6039 truth_type, op1a, op1b);
6040 if (t)
6041 return t;
6044 /* If both comparisons are of the same value against constants, we might
6045 be able to merge them. */
6046 if (operand_equal_p (op1a, op2a, 0)
6047 && TREE_CODE (op1b) == INTEGER_CST
6048 && TREE_CODE (op2b) == INTEGER_CST)
6050 int cmp = tree_int_cst_compare (op1b, op2b);
6052 /* If we have (op1a != op1b), we should either be able to
6053 return that or TRUE, depending on whether the constant op1b
6054 also satisfies the other comparison against op2b. */
6055 if (code1 == NE_EXPR)
6057 bool done = true;
6058 bool val;
6059 switch (code2)
6061 case EQ_EXPR: val = (cmp == 0); break;
6062 case NE_EXPR: val = (cmp != 0); break;
6063 case LT_EXPR: val = (cmp < 0); break;
6064 case GT_EXPR: val = (cmp > 0); break;
6065 case LE_EXPR: val = (cmp <= 0); break;
6066 case GE_EXPR: val = (cmp >= 0); break;
6067 default: done = false;
6069 if (done)
6071 if (val)
6072 return boolean_true_node;
6073 else
6074 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6077 /* Likewise if the second comparison is a != comparison. */
6078 else if (code2 == NE_EXPR)
6080 bool done = true;
6081 bool val;
6082 switch (code1)
6084 case EQ_EXPR: val = (cmp == 0); break;
6085 case NE_EXPR: val = (cmp != 0); break;
6086 case LT_EXPR: val = (cmp > 0); break;
6087 case GT_EXPR: val = (cmp < 0); break;
6088 case LE_EXPR: val = (cmp >= 0); break;
6089 case GE_EXPR: val = (cmp <= 0); break;
6090 default: done = false;
6092 if (done)
6094 if (val)
6095 return boolean_true_node;
6096 else
6097 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6101 /* See if an equality test is redundant with the other comparison. */
6102 else if (code1 == EQ_EXPR)
6104 bool val;
6105 switch (code2)
6107 case EQ_EXPR: val = (cmp == 0); break;
6108 case NE_EXPR: val = (cmp != 0); break;
6109 case LT_EXPR: val = (cmp < 0); break;
6110 case GT_EXPR: val = (cmp > 0); break;
6111 case LE_EXPR: val = (cmp <= 0); break;
6112 case GE_EXPR: val = (cmp >= 0); break;
6113 default:
6114 val = false;
6116 if (val)
6117 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6119 else if (code2 == EQ_EXPR)
6121 bool val;
6122 switch (code1)
6124 case EQ_EXPR: val = (cmp == 0); break;
6125 case NE_EXPR: val = (cmp != 0); break;
6126 case LT_EXPR: val = (cmp > 0); break;
6127 case GT_EXPR: val = (cmp < 0); break;
6128 case LE_EXPR: val = (cmp >= 0); break;
6129 case GE_EXPR: val = (cmp <= 0); break;
6130 default:
6131 val = false;
6133 if (val)
6134 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6137 /* Chose the less restrictive of two < or <= comparisons. */
6138 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
6139 && (code2 == LT_EXPR || code2 == LE_EXPR))
6141 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
6142 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6143 else
6144 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6147 /* Likewise chose the less restrictive of two > or >= comparisons. */
6148 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
6149 && (code2 == GT_EXPR || code2 == GE_EXPR))
6151 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
6152 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6153 else
6154 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6157 /* Check for singleton ranges. */
6158 else if (cmp == 0
6159 && ((code1 == LT_EXPR && code2 == GT_EXPR)
6160 || (code1 == GT_EXPR && code2 == LT_EXPR)))
6161 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
6163 /* Check for less/greater pairs that don't restrict the range at all. */
6164 else if (cmp >= 0
6165 && (code1 == LT_EXPR || code1 == LE_EXPR)
6166 && (code2 == GT_EXPR || code2 == GE_EXPR))
6167 return boolean_true_node;
6168 else if (cmp <= 0
6169 && (code1 == GT_EXPR || code1 == GE_EXPR)
6170 && (code2 == LT_EXPR || code2 == LE_EXPR))
6171 return boolean_true_node;
6174 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6175 NAME's definition is a truth value. See if there are any simplifications
6176 that can be done against the NAME's definition. */
6177 if (TREE_CODE (op1a) == SSA_NAME
6178 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6179 && (integer_zerop (op1b) || integer_onep (op1b)))
6181 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6182 || (code1 == NE_EXPR && integer_onep (op1b)));
6183 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6184 switch (gimple_code (stmt))
6186 case GIMPLE_ASSIGN:
6187 /* Try to simplify by copy-propagating the definition. */
6188 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
6190 case GIMPLE_PHI:
6191 /* If every argument to the PHI produces the same result when
6192 ORed with the second comparison, we win.
6193 Do not do this unless the type is bool since we need a bool
6194 result here anyway. */
6195 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6197 tree result = NULL_TREE;
6198 unsigned i;
6199 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6201 tree arg = gimple_phi_arg_def (stmt, i);
6203 /* If this PHI has itself as an argument, ignore it.
6204 If all the other args produce the same result,
6205 we're still OK. */
6206 if (arg == gimple_phi_result (stmt))
6207 continue;
6208 else if (TREE_CODE (arg) == INTEGER_CST)
6210 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6212 if (!result)
6213 result = boolean_true_node;
6214 else if (!integer_onep (result))
6215 return NULL_TREE;
6217 else if (!result)
6218 result = fold_build2 (code2, boolean_type_node,
6219 op2a, op2b);
6220 else if (!same_bool_comparison_p (result,
6221 code2, op2a, op2b))
6222 return NULL_TREE;
6224 else if (TREE_CODE (arg) == SSA_NAME
6225 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6227 tree temp;
6228 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6229 /* In simple cases we can look through PHI nodes,
6230 but we have to be careful with loops.
6231 See PR49073. */
6232 if (! dom_info_available_p (CDI_DOMINATORS)
6233 || gimple_bb (def_stmt) == gimple_bb (stmt)
6234 || dominated_by_p (CDI_DOMINATORS,
6235 gimple_bb (def_stmt),
6236 gimple_bb (stmt)))
6237 return NULL_TREE;
6238 temp = or_var_with_comparison (arg, invert, code2,
6239 op2a, op2b);
6240 if (!temp)
6241 return NULL_TREE;
6242 else if (!result)
6243 result = temp;
6244 else if (!same_bool_result_p (result, temp))
6245 return NULL_TREE;
6247 else
6248 return NULL_TREE;
6250 return result;
6253 default:
6254 break;
6257 return NULL_TREE;
6260 /* Try to simplify the OR of two comparisons, specified by
6261 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6262 If this can be simplified to a single expression (without requiring
6263 introducing more SSA variables to hold intermediate values),
6264 return the resulting tree. Otherwise return NULL_TREE.
6265 If the result expression is non-null, it has boolean type. */
6267 tree
6268 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6269 enum tree_code code2, tree op2a, tree op2b)
6271 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6272 if (t)
6273 return t;
6274 else
6275 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6279 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6281 Either NULL_TREE, a simplified but non-constant or a constant
6282 is returned.
6284 ??? This should go into a gimple-fold-inline.h file to be eventually
6285 privatized with the single valueize function used in the various TUs
6286 to avoid the indirect function call overhead. */
6288 tree
6289 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6290 tree (*gvalueize) (tree))
6292 gimple_match_op res_op;
6293 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6294 edges if there are intermediate VARYING defs. For this reason
6295 do not follow SSA edges here even though SCCVN can technically
6296 just deal fine with that. */
6297 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6299 tree res = NULL_TREE;
6300 if (gimple_simplified_result_is_gimple_val (&res_op))
6301 res = res_op.ops[0];
6302 else if (mprts_hook)
6303 res = mprts_hook (&res_op);
6304 if (res)
6306 if (dump_file && dump_flags & TDF_DETAILS)
6308 fprintf (dump_file, "Match-and-simplified ");
6309 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6310 fprintf (dump_file, " to ");
6311 print_generic_expr (dump_file, res);
6312 fprintf (dump_file, "\n");
6314 return res;
6318 location_t loc = gimple_location (stmt);
6319 switch (gimple_code (stmt))
6321 case GIMPLE_ASSIGN:
6323 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6325 switch (get_gimple_rhs_class (subcode))
6327 case GIMPLE_SINGLE_RHS:
6329 tree rhs = gimple_assign_rhs1 (stmt);
6330 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6332 if (TREE_CODE (rhs) == SSA_NAME)
6334 /* If the RHS is an SSA_NAME, return its known constant value,
6335 if any. */
6336 return (*valueize) (rhs);
6338 /* Handle propagating invariant addresses into address
6339 operations. */
6340 else if (TREE_CODE (rhs) == ADDR_EXPR
6341 && !is_gimple_min_invariant (rhs))
6343 poly_int64 offset = 0;
6344 tree base;
6345 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6346 &offset,
6347 valueize);
6348 if (base
6349 && (CONSTANT_CLASS_P (base)
6350 || decl_address_invariant_p (base)))
6351 return build_invariant_address (TREE_TYPE (rhs),
6352 base, offset);
6354 else if (TREE_CODE (rhs) == CONSTRUCTOR
6355 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6356 && known_eq (CONSTRUCTOR_NELTS (rhs),
6357 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6359 unsigned i, nelts;
6360 tree val;
6362 nelts = CONSTRUCTOR_NELTS (rhs);
6363 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6364 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6366 val = (*valueize) (val);
6367 if (TREE_CODE (val) == INTEGER_CST
6368 || TREE_CODE (val) == REAL_CST
6369 || TREE_CODE (val) == FIXED_CST)
6370 vec.quick_push (val);
6371 else
6372 return NULL_TREE;
6375 return vec.build ();
6377 if (subcode == OBJ_TYPE_REF)
6379 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6380 /* If callee is constant, we can fold away the wrapper. */
6381 if (is_gimple_min_invariant (val))
6382 return val;
6385 if (kind == tcc_reference)
6387 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6388 || TREE_CODE (rhs) == REALPART_EXPR
6389 || TREE_CODE (rhs) == IMAGPART_EXPR)
6390 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6392 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6393 return fold_unary_loc (EXPR_LOCATION (rhs),
6394 TREE_CODE (rhs),
6395 TREE_TYPE (rhs), val);
6397 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6398 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6400 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6401 return fold_ternary_loc (EXPR_LOCATION (rhs),
6402 TREE_CODE (rhs),
6403 TREE_TYPE (rhs), val,
6404 TREE_OPERAND (rhs, 1),
6405 TREE_OPERAND (rhs, 2));
6407 else if (TREE_CODE (rhs) == MEM_REF
6408 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6410 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6411 if (TREE_CODE (val) == ADDR_EXPR
6412 && is_gimple_min_invariant (val))
6414 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6415 unshare_expr (val),
6416 TREE_OPERAND (rhs, 1));
6417 if (tem)
6418 rhs = tem;
6421 return fold_const_aggregate_ref_1 (rhs, valueize);
6423 else if (kind == tcc_declaration)
6424 return get_symbol_constant_value (rhs);
6425 return rhs;
6428 case GIMPLE_UNARY_RHS:
6429 return NULL_TREE;
6431 case GIMPLE_BINARY_RHS:
6432 /* Translate &x + CST into an invariant form suitable for
6433 further propagation. */
6434 if (subcode == POINTER_PLUS_EXPR)
6436 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6437 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6438 if (TREE_CODE (op0) == ADDR_EXPR
6439 && TREE_CODE (op1) == INTEGER_CST)
6441 tree off = fold_convert (ptr_type_node, op1);
6442 return build_fold_addr_expr_loc
6443 (loc,
6444 fold_build2 (MEM_REF,
6445 TREE_TYPE (TREE_TYPE (op0)),
6446 unshare_expr (op0), off));
6449 /* Canonicalize bool != 0 and bool == 0 appearing after
6450 valueization. While gimple_simplify handles this
6451 it can get confused by the ~X == 1 -> X == 0 transform
6452 which we cant reduce to a SSA name or a constant
6453 (and we have no way to tell gimple_simplify to not
6454 consider those transforms in the first place). */
6455 else if (subcode == EQ_EXPR
6456 || subcode == NE_EXPR)
6458 tree lhs = gimple_assign_lhs (stmt);
6459 tree op0 = gimple_assign_rhs1 (stmt);
6460 if (useless_type_conversion_p (TREE_TYPE (lhs),
6461 TREE_TYPE (op0)))
6463 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6464 op0 = (*valueize) (op0);
6465 if (TREE_CODE (op0) == INTEGER_CST)
6466 std::swap (op0, op1);
6467 if (TREE_CODE (op1) == INTEGER_CST
6468 && ((subcode == NE_EXPR && integer_zerop (op1))
6469 || (subcode == EQ_EXPR && integer_onep (op1))))
6470 return op0;
6473 return NULL_TREE;
6475 case GIMPLE_TERNARY_RHS:
6477 /* Handle ternary operators that can appear in GIMPLE form. */
6478 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6479 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6480 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6481 return fold_ternary_loc (loc, subcode,
6482 gimple_expr_type (stmt), op0, op1, op2);
6485 default:
6486 gcc_unreachable ();
6490 case GIMPLE_CALL:
6492 tree fn;
6493 gcall *call_stmt = as_a <gcall *> (stmt);
6495 if (gimple_call_internal_p (stmt))
6497 enum tree_code subcode = ERROR_MARK;
6498 switch (gimple_call_internal_fn (stmt))
6500 case IFN_UBSAN_CHECK_ADD:
6501 subcode = PLUS_EXPR;
6502 break;
6503 case IFN_UBSAN_CHECK_SUB:
6504 subcode = MINUS_EXPR;
6505 break;
6506 case IFN_UBSAN_CHECK_MUL:
6507 subcode = MULT_EXPR;
6508 break;
6509 case IFN_BUILTIN_EXPECT:
6511 tree arg0 = gimple_call_arg (stmt, 0);
6512 tree op0 = (*valueize) (arg0);
6513 if (TREE_CODE (op0) == INTEGER_CST)
6514 return op0;
6515 return NULL_TREE;
6517 default:
6518 return NULL_TREE;
6520 tree arg0 = gimple_call_arg (stmt, 0);
6521 tree arg1 = gimple_call_arg (stmt, 1);
6522 tree op0 = (*valueize) (arg0);
6523 tree op1 = (*valueize) (arg1);
6525 if (TREE_CODE (op0) != INTEGER_CST
6526 || TREE_CODE (op1) != INTEGER_CST)
6528 switch (subcode)
6530 case MULT_EXPR:
6531 /* x * 0 = 0 * x = 0 without overflow. */
6532 if (integer_zerop (op0) || integer_zerop (op1))
6533 return build_zero_cst (TREE_TYPE (arg0));
6534 break;
6535 case MINUS_EXPR:
6536 /* y - y = 0 without overflow. */
6537 if (operand_equal_p (op0, op1, 0))
6538 return build_zero_cst (TREE_TYPE (arg0));
6539 break;
6540 default:
6541 break;
6544 tree res
6545 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6546 if (res
6547 && TREE_CODE (res) == INTEGER_CST
6548 && !TREE_OVERFLOW (res))
6549 return res;
6550 return NULL_TREE;
6553 fn = (*valueize) (gimple_call_fn (stmt));
6554 if (TREE_CODE (fn) == ADDR_EXPR
6555 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
6556 && gimple_builtin_call_types_compatible_p (stmt,
6557 TREE_OPERAND (fn, 0)))
6559 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6560 tree retval;
6561 unsigned i;
6562 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6563 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6564 retval = fold_builtin_call_array (loc,
6565 gimple_call_return_type (call_stmt),
6566 fn, gimple_call_num_args (stmt), args);
6567 if (retval)
6569 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6570 STRIP_NOPS (retval);
6571 retval = fold_convert (gimple_call_return_type (call_stmt),
6572 retval);
6574 return retval;
6576 return NULL_TREE;
6579 default:
6580 return NULL_TREE;
6584 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6585 Returns NULL_TREE if folding to a constant is not possible, otherwise
6586 returns a constant according to is_gimple_min_invariant. */
6588 tree
6589 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6591 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6592 if (res && is_gimple_min_invariant (res))
6593 return res;
6594 return NULL_TREE;
6598 /* The following set of functions are supposed to fold references using
6599 their constant initializers. */
6601 /* See if we can find constructor defining value of BASE.
6602 When we know the consructor with constant offset (such as
6603 base is array[40] and we do know constructor of array), then
6604 BIT_OFFSET is adjusted accordingly.
6606 As a special case, return error_mark_node when constructor
6607 is not explicitly available, but it is known to be zero
6608 such as 'static const int a;'. */
6609 static tree
6610 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6611 tree (*valueize)(tree))
6613 poly_int64 bit_offset2, size, max_size;
6614 bool reverse;
6616 if (TREE_CODE (base) == MEM_REF)
6618 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6619 if (!boff.to_shwi (bit_offset))
6620 return NULL_TREE;
6622 if (valueize
6623 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6624 base = valueize (TREE_OPERAND (base, 0));
6625 if (!base || TREE_CODE (base) != ADDR_EXPR)
6626 return NULL_TREE;
6627 base = TREE_OPERAND (base, 0);
6629 else if (valueize
6630 && TREE_CODE (base) == SSA_NAME)
6631 base = valueize (base);
6633 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6634 DECL_INITIAL. If BASE is a nested reference into another
6635 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6636 the inner reference. */
6637 switch (TREE_CODE (base))
6639 case VAR_DECL:
6640 case CONST_DECL:
6642 tree init = ctor_for_folding (base);
6644 /* Our semantic is exact opposite of ctor_for_folding;
6645 NULL means unknown, while error_mark_node is 0. */
6646 if (init == error_mark_node)
6647 return NULL_TREE;
6648 if (!init)
6649 return error_mark_node;
6650 return init;
6653 case VIEW_CONVERT_EXPR:
6654 return get_base_constructor (TREE_OPERAND (base, 0),
6655 bit_offset, valueize);
6657 case ARRAY_REF:
6658 case COMPONENT_REF:
6659 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6660 &reverse);
6661 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6662 return NULL_TREE;
6663 *bit_offset += bit_offset2;
6664 return get_base_constructor (base, bit_offset, valueize);
6666 case CONSTRUCTOR:
6667 return base;
6669 default:
6670 if (CONSTANT_CLASS_P (base))
6671 return base;
6673 return NULL_TREE;
6677 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6678 to the memory at bit OFFSET. When non-null, TYPE is the expected
6679 type of the reference; otherwise the type of the referenced element
6680 is used instead. When SIZE is zero, attempt to fold a reference to
6681 the entire element which OFFSET refers to. Increment *SUBOFF by
6682 the bit offset of the accessed element. */
6684 static tree
6685 fold_array_ctor_reference (tree type, tree ctor,
6686 unsigned HOST_WIDE_INT offset,
6687 unsigned HOST_WIDE_INT size,
6688 tree from_decl,
6689 unsigned HOST_WIDE_INT *suboff)
6691 offset_int low_bound;
6692 offset_int elt_size;
6693 offset_int access_index;
6694 tree domain_type = NULL_TREE;
6695 HOST_WIDE_INT inner_offset;
6697 /* Compute low bound and elt size. */
6698 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6699 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6700 if (domain_type && TYPE_MIN_VALUE (domain_type))
6702 /* Static constructors for variably sized objects make no sense. */
6703 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6704 return NULL_TREE;
6705 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6707 else
6708 low_bound = 0;
6709 /* Static constructors for variably sized objects make no sense. */
6710 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6711 return NULL_TREE;
6712 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6714 /* When TYPE is non-null, verify that it specifies a constant-sized
6715 accessed not larger than size of array element. Avoid division
6716 by zero below when ELT_SIZE is zero, such as with the result of
6717 an initializer for a zero-length array or an empty struct. */
6718 if (elt_size == 0
6719 || (type
6720 && (!TYPE_SIZE_UNIT (type)
6721 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6722 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type)))))
6723 return NULL_TREE;
6725 /* Compute the array index we look for. */
6726 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6727 elt_size);
6728 access_index += low_bound;
6730 /* And offset within the access. */
6731 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6733 /* See if the array field is large enough to span whole access. We do not
6734 care to fold accesses spanning multiple array indexes. */
6735 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6736 return NULL_TREE;
6737 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6739 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6741 /* For the final reference to the entire accessed element
6742 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6743 may be null) in favor of the type of the element, and set
6744 SIZE to the size of the accessed element. */
6745 inner_offset = 0;
6746 type = TREE_TYPE (val);
6747 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6750 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6751 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6752 suboff);
6755 /* Memory not explicitly mentioned in constructor is 0 (or
6756 the reference is out of range). */
6757 return type ? build_zero_cst (type) : NULL_TREE;
6760 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6761 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6762 is the expected type of the reference; otherwise the type of
6763 the referenced member is used instead. When SIZE is zero,
6764 attempt to fold a reference to the entire member which OFFSET
6765 refers to; in this case. Increment *SUBOFF by the bit offset
6766 of the accessed member. */
6768 static tree
6769 fold_nonarray_ctor_reference (tree type, tree ctor,
6770 unsigned HOST_WIDE_INT offset,
6771 unsigned HOST_WIDE_INT size,
6772 tree from_decl,
6773 unsigned HOST_WIDE_INT *suboff)
6775 unsigned HOST_WIDE_INT cnt;
6776 tree cfield, cval;
6778 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6779 cval)
6781 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6782 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6783 tree field_size = DECL_SIZE (cfield);
6785 if (!field_size)
6787 /* Determine the size of the flexible array member from
6788 the size of the initializer provided for it. */
6789 field_size = TYPE_SIZE (TREE_TYPE (cval));
6792 /* Variable sized objects in static constructors makes no sense,
6793 but field_size can be NULL for flexible array members. */
6794 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6795 && TREE_CODE (byte_offset) == INTEGER_CST
6796 && (field_size != NULL_TREE
6797 ? TREE_CODE (field_size) == INTEGER_CST
6798 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6800 /* Compute bit offset of the field. */
6801 offset_int bitoffset
6802 = (wi::to_offset (field_offset)
6803 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6804 /* Compute bit offset where the field ends. */
6805 offset_int bitoffset_end;
6806 if (field_size != NULL_TREE)
6807 bitoffset_end = bitoffset + wi::to_offset (field_size);
6808 else
6809 bitoffset_end = 0;
6811 /* Compute the bit offset of the end of the desired access.
6812 As a special case, if the size of the desired access is
6813 zero, assume the access is to the entire field (and let
6814 the caller make any necessary adjustments by storing
6815 the actual bounds of the field in FIELDBOUNDS). */
6816 offset_int access_end = offset_int (offset);
6817 if (size)
6818 access_end += size;
6819 else
6820 access_end = bitoffset_end;
6822 /* Is there any overlap between the desired access at
6823 [OFFSET, OFFSET+SIZE) and the offset of the field within
6824 the object at [BITOFFSET, BITOFFSET_END)? */
6825 if (wi::cmps (access_end, bitoffset) > 0
6826 && (field_size == NULL_TREE
6827 || wi::lts_p (offset, bitoffset_end)))
6829 *suboff += bitoffset.to_uhwi ();
6831 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6833 /* For the final reference to the entire accessed member
6834 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6835 be null) in favor of the type of the member, and set
6836 SIZE to the size of the accessed member. */
6837 offset = bitoffset.to_uhwi ();
6838 type = TREE_TYPE (cval);
6839 size = (bitoffset_end - bitoffset).to_uhwi ();
6842 /* We do have overlap. Now see if the field is large enough
6843 to cover the access. Give up for accesses that extend
6844 beyond the end of the object or that span multiple fields. */
6845 if (wi::cmps (access_end, bitoffset_end) > 0)
6846 return NULL_TREE;
6847 if (offset < bitoffset)
6848 return NULL_TREE;
6850 offset_int inner_offset = offset_int (offset) - bitoffset;
6851 return fold_ctor_reference (type, cval,
6852 inner_offset.to_uhwi (), size,
6853 from_decl, suboff);
6856 /* Memory not explicitly mentioned in constructor is 0. */
6857 return type ? build_zero_cst (type) : NULL_TREE;
6860 /* CTOR is value initializing memory. Fold a reference of TYPE and
6861 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
6862 is zero, attempt to fold a reference to the entire subobject
6863 which OFFSET refers to. This is used when folding accesses to
6864 string members of aggregates. When non-null, set *SUBOFF to
6865 the bit offset of the accessed subobject. */
6867 tree
6868 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6869 const poly_uint64 &poly_size, tree from_decl,
6870 unsigned HOST_WIDE_INT *suboff /* = NULL */)
6872 tree ret;
6874 /* We found the field with exact match. */
6875 if (type
6876 && useless_type_conversion_p (type, TREE_TYPE (ctor))
6877 && known_eq (poly_offset, 0U))
6878 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6880 /* The remaining optimizations need a constant size and offset. */
6881 unsigned HOST_WIDE_INT size, offset;
6882 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6883 return NULL_TREE;
6885 /* We are at the end of walk, see if we can view convert the
6886 result. */
6887 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6888 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6889 && !compare_tree_int (TYPE_SIZE (type), size)
6890 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6892 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6893 if (ret)
6895 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6896 if (ret)
6897 STRIP_USELESS_TYPE_CONVERSION (ret);
6899 return ret;
6901 /* For constants and byte-aligned/sized reads try to go through
6902 native_encode/interpret. */
6903 if (CONSTANT_CLASS_P (ctor)
6904 && BITS_PER_UNIT == 8
6905 && offset % BITS_PER_UNIT == 0
6906 && size % BITS_PER_UNIT == 0
6907 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6909 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6910 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6911 offset / BITS_PER_UNIT);
6912 if (len > 0)
6913 return native_interpret_expr (type, buf, len);
6915 if (TREE_CODE (ctor) == CONSTRUCTOR)
6917 unsigned HOST_WIDE_INT dummy = 0;
6918 if (!suboff)
6919 suboff = &dummy;
6921 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6922 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6923 return fold_array_ctor_reference (type, ctor, offset, size,
6924 from_decl, suboff);
6926 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6927 from_decl, suboff);
6930 return NULL_TREE;
6933 /* Return the tree representing the element referenced by T if T is an
6934 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6935 names using VALUEIZE. Return NULL_TREE otherwise. */
6937 tree
6938 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6940 tree ctor, idx, base;
6941 poly_int64 offset, size, max_size;
6942 tree tem;
6943 bool reverse;
6945 if (TREE_THIS_VOLATILE (t))
6946 return NULL_TREE;
6948 if (DECL_P (t))
6949 return get_symbol_constant_value (t);
6951 tem = fold_read_from_constant_string (t);
6952 if (tem)
6953 return tem;
6955 switch (TREE_CODE (t))
6957 case ARRAY_REF:
6958 case ARRAY_RANGE_REF:
6959 /* Constant indexes are handled well by get_base_constructor.
6960 Only special case variable offsets.
6961 FIXME: This code can't handle nested references with variable indexes
6962 (they will be handled only by iteration of ccp). Perhaps we can bring
6963 get_ref_base_and_extent here and make it use a valueize callback. */
6964 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6965 && valueize
6966 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6967 && poly_int_tree_p (idx))
6969 tree low_bound, unit_size;
6971 /* If the resulting bit-offset is constant, track it. */
6972 if ((low_bound = array_ref_low_bound (t),
6973 poly_int_tree_p (low_bound))
6974 && (unit_size = array_ref_element_size (t),
6975 tree_fits_uhwi_p (unit_size)))
6977 poly_offset_int woffset
6978 = wi::sext (wi::to_poly_offset (idx)
6979 - wi::to_poly_offset (low_bound),
6980 TYPE_PRECISION (TREE_TYPE (idx)));
6981 woffset *= tree_to_uhwi (unit_size);
6982 woffset *= BITS_PER_UNIT;
6983 if (woffset.to_shwi (&offset))
6985 base = TREE_OPERAND (t, 0);
6986 ctor = get_base_constructor (base, &offset, valueize);
6987 /* Empty constructor. Always fold to 0. */
6988 if (ctor == error_mark_node)
6989 return build_zero_cst (TREE_TYPE (t));
6990 /* Out of bound array access. Value is undefined,
6991 but don't fold. */
6992 if (maybe_lt (offset, 0))
6993 return NULL_TREE;
6994 /* We cannot determine ctor. */
6995 if (!ctor)
6996 return NULL_TREE;
6997 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6998 tree_to_uhwi (unit_size)
6999 * BITS_PER_UNIT,
7000 base);
7004 /* Fallthru. */
7006 case COMPONENT_REF:
7007 case BIT_FIELD_REF:
7008 case TARGET_MEM_REF:
7009 case MEM_REF:
7010 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
7011 ctor = get_base_constructor (base, &offset, valueize);
7013 /* Empty constructor. Always fold to 0. */
7014 if (ctor == error_mark_node)
7015 return build_zero_cst (TREE_TYPE (t));
7016 /* We do not know precise address. */
7017 if (!known_size_p (max_size) || maybe_ne (max_size, size))
7018 return NULL_TREE;
7019 /* We cannot determine ctor. */
7020 if (!ctor)
7021 return NULL_TREE;
7023 /* Out of bound array access. Value is undefined, but don't fold. */
7024 if (maybe_lt (offset, 0))
7025 return NULL_TREE;
7027 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
7028 base);
7030 case REALPART_EXPR:
7031 case IMAGPART_EXPR:
7033 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
7034 if (c && TREE_CODE (c) == COMPLEX_CST)
7035 return fold_build1_loc (EXPR_LOCATION (t),
7036 TREE_CODE (t), TREE_TYPE (t), c);
7037 break;
7040 default:
7041 break;
7044 return NULL_TREE;
7047 tree
7048 fold_const_aggregate_ref (tree t)
7050 return fold_const_aggregate_ref_1 (t, NULL);
7053 /* Lookup virtual method with index TOKEN in a virtual table V
7054 at OFFSET.
7055 Set CAN_REFER if non-NULL to false if method
7056 is not referable or if the virtual table is ill-formed (such as rewriten
7057 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7059 tree
7060 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
7061 tree v,
7062 unsigned HOST_WIDE_INT offset,
7063 bool *can_refer)
7065 tree vtable = v, init, fn;
7066 unsigned HOST_WIDE_INT size;
7067 unsigned HOST_WIDE_INT elt_size, access_index;
7068 tree domain_type;
7070 if (can_refer)
7071 *can_refer = true;
7073 /* First of all double check we have virtual table. */
7074 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
7076 /* Pass down that we lost track of the target. */
7077 if (can_refer)
7078 *can_refer = false;
7079 return NULL_TREE;
7082 init = ctor_for_folding (v);
7084 /* The virtual tables should always be born with constructors
7085 and we always should assume that they are avaialble for
7086 folding. At the moment we do not stream them in all cases,
7087 but it should never happen that ctor seem unreachable. */
7088 gcc_assert (init);
7089 if (init == error_mark_node)
7091 /* Pass down that we lost track of the target. */
7092 if (can_refer)
7093 *can_refer = false;
7094 return NULL_TREE;
7096 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
7097 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
7098 offset *= BITS_PER_UNIT;
7099 offset += token * size;
7101 /* Lookup the value in the constructor that is assumed to be array.
7102 This is equivalent to
7103 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7104 offset, size, NULL);
7105 but in a constant time. We expect that frontend produced a simple
7106 array without indexed initializers. */
7108 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7109 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7110 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7111 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7113 access_index = offset / BITS_PER_UNIT / elt_size;
7114 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7116 /* The C++ FE can now produce indexed fields, and we check if the indexes
7117 match. */
7118 if (access_index < CONSTRUCTOR_NELTS (init))
7120 fn = CONSTRUCTOR_ELT (init, access_index)->value;
7121 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7122 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
7123 STRIP_NOPS (fn);
7125 else
7126 fn = NULL;
7128 /* For type inconsistent program we may end up looking up virtual method
7129 in virtual table that does not contain TOKEN entries. We may overrun
7130 the virtual table and pick up a constant or RTTI info pointer.
7131 In any case the call is undefined. */
7132 if (!fn
7133 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7134 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7135 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7136 else
7138 fn = TREE_OPERAND (fn, 0);
7140 /* When cgraph node is missing and function is not public, we cannot
7141 devirtualize. This can happen in WHOPR when the actual method
7142 ends up in other partition, because we found devirtualization
7143 possibility too late. */
7144 if (!can_refer_decl_in_current_unit_p (fn, vtable))
7146 if (can_refer)
7148 *can_refer = false;
7149 return fn;
7151 return NULL_TREE;
7155 /* Make sure we create a cgraph node for functions we'll reference.
7156 They can be non-existent if the reference comes from an entry
7157 of an external vtable for example. */
7158 cgraph_node::get_create (fn);
7160 return fn;
7163 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7164 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7165 KNOWN_BINFO carries the binfo describing the true type of
7166 OBJ_TYPE_REF_OBJECT(REF).
7167 Set CAN_REFER if non-NULL to false if method
7168 is not referable or if the virtual table is ill-formed (such as rewriten
7169 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7171 tree
7172 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7173 bool *can_refer)
7175 unsigned HOST_WIDE_INT offset;
7176 tree v;
7178 v = BINFO_VTABLE (known_binfo);
7179 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7180 if (!v)
7181 return NULL_TREE;
7183 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
7185 if (can_refer)
7186 *can_refer = false;
7187 return NULL_TREE;
7189 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
7192 /* Given a pointer value T, return a simplified version of an
7193 indirection through T, or NULL_TREE if no simplification is
7194 possible. Note that the resulting type may be different from
7195 the type pointed to in the sense that it is still compatible
7196 from the langhooks point of view. */
7198 tree
7199 gimple_fold_indirect_ref (tree t)
7201 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7202 tree sub = t;
7203 tree subtype;
7205 STRIP_NOPS (sub);
7206 subtype = TREE_TYPE (sub);
7207 if (!POINTER_TYPE_P (subtype)
7208 || TYPE_REF_CAN_ALIAS_ALL (ptype))
7209 return NULL_TREE;
7211 if (TREE_CODE (sub) == ADDR_EXPR)
7213 tree op = TREE_OPERAND (sub, 0);
7214 tree optype = TREE_TYPE (op);
7215 /* *&p => p */
7216 if (useless_type_conversion_p (type, optype))
7217 return op;
7219 /* *(foo *)&fooarray => fooarray[0] */
7220 if (TREE_CODE (optype) == ARRAY_TYPE
7221 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7222 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7224 tree type_domain = TYPE_DOMAIN (optype);
7225 tree min_val = size_zero_node;
7226 if (type_domain && TYPE_MIN_VALUE (type_domain))
7227 min_val = TYPE_MIN_VALUE (type_domain);
7228 if (TREE_CODE (min_val) == INTEGER_CST)
7229 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7231 /* *(foo *)&complexfoo => __real__ complexfoo */
7232 else if (TREE_CODE (optype) == COMPLEX_TYPE
7233 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7234 return fold_build1 (REALPART_EXPR, type, op);
7235 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7236 else if (TREE_CODE (optype) == VECTOR_TYPE
7237 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7239 tree part_width = TYPE_SIZE (type);
7240 tree index = bitsize_int (0);
7241 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7245 /* *(p + CST) -> ... */
7246 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7247 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7249 tree addr = TREE_OPERAND (sub, 0);
7250 tree off = TREE_OPERAND (sub, 1);
7251 tree addrtype;
7253 STRIP_NOPS (addr);
7254 addrtype = TREE_TYPE (addr);
7256 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7257 if (TREE_CODE (addr) == ADDR_EXPR
7258 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7259 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7260 && tree_fits_uhwi_p (off))
7262 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7263 tree part_width = TYPE_SIZE (type);
7264 unsigned HOST_WIDE_INT part_widthi
7265 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7266 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7267 tree index = bitsize_int (indexi);
7268 if (known_lt (offset / part_widthi,
7269 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7270 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7271 part_width, index);
7274 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7275 if (TREE_CODE (addr) == ADDR_EXPR
7276 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7277 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7279 tree size = TYPE_SIZE_UNIT (type);
7280 if (tree_int_cst_equal (size, off))
7281 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7284 /* *(p + CST) -> MEM_REF <p, CST>. */
7285 if (TREE_CODE (addr) != ADDR_EXPR
7286 || DECL_P (TREE_OPERAND (addr, 0)))
7287 return fold_build2 (MEM_REF, type,
7288 addr,
7289 wide_int_to_tree (ptype, wi::to_wide (off)));
7292 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7293 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7294 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7295 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7297 tree type_domain;
7298 tree min_val = size_zero_node;
7299 tree osub = sub;
7300 sub = gimple_fold_indirect_ref (sub);
7301 if (! sub)
7302 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7303 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7304 if (type_domain && TYPE_MIN_VALUE (type_domain))
7305 min_val = TYPE_MIN_VALUE (type_domain);
7306 if (TREE_CODE (min_val) == INTEGER_CST)
7307 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7310 return NULL_TREE;
7313 /* Return true if CODE is an operation that when operating on signed
7314 integer types involves undefined behavior on overflow and the
7315 operation can be expressed with unsigned arithmetic. */
7317 bool
7318 arith_code_with_undefined_signed_overflow (tree_code code)
7320 switch (code)
7322 case PLUS_EXPR:
7323 case MINUS_EXPR:
7324 case MULT_EXPR:
7325 case NEGATE_EXPR:
7326 case POINTER_PLUS_EXPR:
7327 return true;
7328 default:
7329 return false;
7333 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7334 operation that can be transformed to unsigned arithmetic by converting
7335 its operand, carrying out the operation in the corresponding unsigned
7336 type and converting the result back to the original type.
7338 Returns a sequence of statements that replace STMT and also contain
7339 a modified form of STMT itself. */
7341 gimple_seq
7342 rewrite_to_defined_overflow (gimple *stmt)
7344 if (dump_file && (dump_flags & TDF_DETAILS))
7346 fprintf (dump_file, "rewriting stmt with undefined signed "
7347 "overflow ");
7348 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7351 tree lhs = gimple_assign_lhs (stmt);
7352 tree type = unsigned_type_for (TREE_TYPE (lhs));
7353 gimple_seq stmts = NULL;
7354 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7356 tree op = gimple_op (stmt, i);
7357 op = gimple_convert (&stmts, type, op);
7358 gimple_set_op (stmt, i, op);
7360 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7361 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7362 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7363 gimple_seq_add_stmt (&stmts, stmt);
7364 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7365 gimple_seq_add_stmt (&stmts, cvt);
7367 return stmts;
7371 /* The valueization hook we use for the gimple_build API simplification.
7372 This makes us match fold_buildN behavior by only combining with
7373 statements in the sequence(s) we are currently building. */
7375 static tree
7376 gimple_build_valueize (tree op)
7378 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7379 return op;
7380 return NULL_TREE;
7383 /* Build the expression CODE OP0 of type TYPE with location LOC,
7384 simplifying it first if possible. Returns the built
7385 expression value and appends statements possibly defining it
7386 to SEQ. */
7388 tree
7389 gimple_build (gimple_seq *seq, location_t loc,
7390 enum tree_code code, tree type, tree op0)
7392 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7393 if (!res)
7395 res = create_tmp_reg_or_ssa_name (type);
7396 gimple *stmt;
7397 if (code == REALPART_EXPR
7398 || code == IMAGPART_EXPR
7399 || code == VIEW_CONVERT_EXPR)
7400 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7401 else
7402 stmt = gimple_build_assign (res, code, op0);
7403 gimple_set_location (stmt, loc);
7404 gimple_seq_add_stmt_without_update (seq, stmt);
7406 return res;
7409 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7410 simplifying it first if possible. Returns the built
7411 expression value and appends statements possibly defining it
7412 to SEQ. */
7414 tree
7415 gimple_build (gimple_seq *seq, location_t loc,
7416 enum tree_code code, tree type, tree op0, tree op1)
7418 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7419 if (!res)
7421 res = create_tmp_reg_or_ssa_name (type);
7422 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7423 gimple_set_location (stmt, loc);
7424 gimple_seq_add_stmt_without_update (seq, stmt);
7426 return res;
7429 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7430 simplifying it first if possible. Returns the built
7431 expression value and appends statements possibly defining it
7432 to SEQ. */
7434 tree
7435 gimple_build (gimple_seq *seq, location_t loc,
7436 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7438 tree res = gimple_simplify (code, type, op0, op1, op2,
7439 seq, gimple_build_valueize);
7440 if (!res)
7442 res = create_tmp_reg_or_ssa_name (type);
7443 gimple *stmt;
7444 if (code == BIT_FIELD_REF)
7445 stmt = gimple_build_assign (res, code,
7446 build3 (code, type, op0, op1, op2));
7447 else
7448 stmt = gimple_build_assign (res, code, op0, op1, op2);
7449 gimple_set_location (stmt, loc);
7450 gimple_seq_add_stmt_without_update (seq, stmt);
7452 return res;
7455 /* Build the call FN (ARG0) with a result of type TYPE
7456 (or no result if TYPE is void) with location LOC,
7457 simplifying it first if possible. Returns the built
7458 expression value (or NULL_TREE if TYPE is void) and appends
7459 statements possibly defining it to SEQ. */
7461 tree
7462 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7463 tree type, tree arg0)
7465 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7466 if (!res)
7468 gcall *stmt;
7469 if (internal_fn_p (fn))
7470 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7471 else
7473 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7474 stmt = gimple_build_call (decl, 1, arg0);
7476 if (!VOID_TYPE_P (type))
7478 res = create_tmp_reg_or_ssa_name (type);
7479 gimple_call_set_lhs (stmt, res);
7481 gimple_set_location (stmt, loc);
7482 gimple_seq_add_stmt_without_update (seq, stmt);
7484 return res;
7487 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7488 (or no result if TYPE is void) with location LOC,
7489 simplifying it first if possible. Returns the built
7490 expression value (or NULL_TREE if TYPE is void) and appends
7491 statements possibly defining it to SEQ. */
7493 tree
7494 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7495 tree type, tree arg0, tree arg1)
7497 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7498 if (!res)
7500 gcall *stmt;
7501 if (internal_fn_p (fn))
7502 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7503 else
7505 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7506 stmt = gimple_build_call (decl, 2, arg0, arg1);
7508 if (!VOID_TYPE_P (type))
7510 res = create_tmp_reg_or_ssa_name (type);
7511 gimple_call_set_lhs (stmt, res);
7513 gimple_set_location (stmt, loc);
7514 gimple_seq_add_stmt_without_update (seq, stmt);
7516 return res;
7519 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7520 (or no result if TYPE is void) with location LOC,
7521 simplifying it first if possible. Returns the built
7522 expression value (or NULL_TREE if TYPE is void) and appends
7523 statements possibly defining it to SEQ. */
7525 tree
7526 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7527 tree type, tree arg0, tree arg1, tree arg2)
7529 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7530 seq, gimple_build_valueize);
7531 if (!res)
7533 gcall *stmt;
7534 if (internal_fn_p (fn))
7535 stmt = gimple_build_call_internal (as_internal_fn (fn),
7536 3, arg0, arg1, arg2);
7537 else
7539 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7540 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7542 if (!VOID_TYPE_P (type))
7544 res = create_tmp_reg_or_ssa_name (type);
7545 gimple_call_set_lhs (stmt, res);
7547 gimple_set_location (stmt, loc);
7548 gimple_seq_add_stmt_without_update (seq, stmt);
7550 return res;
7553 /* Build the conversion (TYPE) OP with a result of type TYPE
7554 with location LOC if such conversion is neccesary in GIMPLE,
7555 simplifying it first.
7556 Returns the built expression value and appends
7557 statements possibly defining it to SEQ. */
7559 tree
7560 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7562 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7563 return op;
7564 return gimple_build (seq, loc, NOP_EXPR, type, op);
7567 /* Build the conversion (ptrofftype) OP with a result of a type
7568 compatible with ptrofftype with location LOC if such conversion
7569 is neccesary in GIMPLE, simplifying it first.
7570 Returns the built expression value and appends
7571 statements possibly defining it to SEQ. */
7573 tree
7574 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7576 if (ptrofftype_p (TREE_TYPE (op)))
7577 return op;
7578 return gimple_convert (seq, loc, sizetype, op);
7581 /* Build a vector of type TYPE in which each element has the value OP.
7582 Return a gimple value for the result, appending any new statements
7583 to SEQ. */
7585 tree
7586 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7587 tree op)
7589 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7590 && !CONSTANT_CLASS_P (op))
7591 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7593 tree res, vec = build_vector_from_val (type, op);
7594 if (is_gimple_val (vec))
7595 return vec;
7596 if (gimple_in_ssa_p (cfun))
7597 res = make_ssa_name (type);
7598 else
7599 res = create_tmp_reg (type);
7600 gimple *stmt = gimple_build_assign (res, vec);
7601 gimple_set_location (stmt, loc);
7602 gimple_seq_add_stmt_without_update (seq, stmt);
7603 return res;
7606 /* Build a vector from BUILDER, handling the case in which some elements
7607 are non-constant. Return a gimple value for the result, appending any
7608 new instructions to SEQ.
7610 BUILDER must not have a stepped encoding on entry. This is because
7611 the function is not geared up to handle the arithmetic that would
7612 be needed in the variable case, and any code building a vector that
7613 is known to be constant should use BUILDER->build () directly. */
7615 tree
7616 gimple_build_vector (gimple_seq *seq, location_t loc,
7617 tree_vector_builder *builder)
7619 gcc_assert (builder->nelts_per_pattern () <= 2);
7620 unsigned int encoded_nelts = builder->encoded_nelts ();
7621 for (unsigned int i = 0; i < encoded_nelts; ++i)
7622 if (!TREE_CONSTANT ((*builder)[i]))
7624 tree type = builder->type ();
7625 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7626 vec<constructor_elt, va_gc> *v;
7627 vec_alloc (v, nelts);
7628 for (i = 0; i < nelts; ++i)
7629 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7631 tree res;
7632 if (gimple_in_ssa_p (cfun))
7633 res = make_ssa_name (type);
7634 else
7635 res = create_tmp_reg (type);
7636 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7637 gimple_set_location (stmt, loc);
7638 gimple_seq_add_stmt_without_update (seq, stmt);
7639 return res;
7641 return builder->build ();
7644 /* Return true if the result of assignment STMT is known to be non-negative.
7645 If the return value is based on the assumption that signed overflow is
7646 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7647 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7649 static bool
7650 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7651 int depth)
7653 enum tree_code code = gimple_assign_rhs_code (stmt);
7654 switch (get_gimple_rhs_class (code))
7656 case GIMPLE_UNARY_RHS:
7657 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7658 gimple_expr_type (stmt),
7659 gimple_assign_rhs1 (stmt),
7660 strict_overflow_p, depth);
7661 case GIMPLE_BINARY_RHS:
7662 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7663 gimple_expr_type (stmt),
7664 gimple_assign_rhs1 (stmt),
7665 gimple_assign_rhs2 (stmt),
7666 strict_overflow_p, depth);
7667 case GIMPLE_TERNARY_RHS:
7668 return false;
7669 case GIMPLE_SINGLE_RHS:
7670 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7671 strict_overflow_p, depth);
7672 case GIMPLE_INVALID_RHS:
7673 break;
7675 gcc_unreachable ();
7678 /* Return true if return value of call STMT is known to be non-negative.
7679 If the return value is based on the assumption that signed overflow is
7680 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7681 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7683 static bool
7684 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7685 int depth)
7687 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7688 gimple_call_arg (stmt, 0) : NULL_TREE;
7689 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7690 gimple_call_arg (stmt, 1) : NULL_TREE;
7692 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7693 gimple_call_combined_fn (stmt),
7694 arg0,
7695 arg1,
7696 strict_overflow_p, depth);
7699 /* Return true if return value of call STMT is known to be non-negative.
7700 If the return value is based on the assumption that signed overflow is
7701 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7702 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7704 static bool
7705 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7706 int depth)
7708 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7710 tree arg = gimple_phi_arg_def (stmt, i);
7711 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7712 return false;
7714 return true;
7717 /* Return true if STMT is known to compute a non-negative value.
7718 If the return value is based on the assumption that signed overflow is
7719 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7720 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7722 bool
7723 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7724 int depth)
7726 switch (gimple_code (stmt))
7728 case GIMPLE_ASSIGN:
7729 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7730 depth);
7731 case GIMPLE_CALL:
7732 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7733 depth);
7734 case GIMPLE_PHI:
7735 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7736 depth);
7737 default:
7738 return false;
7742 /* Return true if the floating-point value computed by assignment STMT
7743 is known to have an integer value. We also allow +Inf, -Inf and NaN
7744 to be considered integer values. Return false for signaling NaN.
7746 DEPTH is the current nesting depth of the query. */
7748 static bool
7749 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7751 enum tree_code code = gimple_assign_rhs_code (stmt);
7752 switch (get_gimple_rhs_class (code))
7754 case GIMPLE_UNARY_RHS:
7755 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7756 gimple_assign_rhs1 (stmt), depth);
7757 case GIMPLE_BINARY_RHS:
7758 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7759 gimple_assign_rhs1 (stmt),
7760 gimple_assign_rhs2 (stmt), depth);
7761 case GIMPLE_TERNARY_RHS:
7762 return false;
7763 case GIMPLE_SINGLE_RHS:
7764 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7765 case GIMPLE_INVALID_RHS:
7766 break;
7768 gcc_unreachable ();
7771 /* Return true if the floating-point value computed by call STMT is known
7772 to have an integer value. We also allow +Inf, -Inf and NaN to be
7773 considered integer values. Return false for signaling NaN.
7775 DEPTH is the current nesting depth of the query. */
7777 static bool
7778 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7780 tree arg0 = (gimple_call_num_args (stmt) > 0
7781 ? gimple_call_arg (stmt, 0)
7782 : NULL_TREE);
7783 tree arg1 = (gimple_call_num_args (stmt) > 1
7784 ? gimple_call_arg (stmt, 1)
7785 : NULL_TREE);
7786 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7787 arg0, arg1, depth);
7790 /* Return true if the floating-point result of phi STMT is known to have
7791 an integer value. We also allow +Inf, -Inf and NaN to be considered
7792 integer values. Return false for signaling NaN.
7794 DEPTH is the current nesting depth of the query. */
7796 static bool
7797 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7799 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7801 tree arg = gimple_phi_arg_def (stmt, i);
7802 if (!integer_valued_real_single_p (arg, depth + 1))
7803 return false;
7805 return true;
7808 /* Return true if the floating-point value computed by STMT is known
7809 to have an integer value. We also allow +Inf, -Inf and NaN to be
7810 considered integer values. Return false for signaling NaN.
7812 DEPTH is the current nesting depth of the query. */
7814 bool
7815 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7817 switch (gimple_code (stmt))
7819 case GIMPLE_ASSIGN:
7820 return gimple_assign_integer_valued_real_p (stmt, depth);
7821 case GIMPLE_CALL:
7822 return gimple_call_integer_valued_real_p (stmt, depth);
7823 case GIMPLE_PHI:
7824 return gimple_phi_integer_valued_real_p (stmt, depth);
7825 default:
7826 return false;