parse.c (decode_statement): Suppress "Unclassifiable statement" error if previous...
[official-gcc.git] / gcc / gimple-fold.c
blob688daf921542659fb96d74a8737b9ed7ce7c19d5
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2019 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
69 enum strlen_range_kind {
70 /* Compute the exact constant string length. */
71 SRK_STRLEN,
72 /* Compute the maximum constant string length. */
73 SRK_STRLENMAX,
74 /* Compute a range of string lengths bounded by object sizes. When
75 the length of a string cannot be determined, consider as the upper
76 bound the size of the enclosing object the string may be a member
77 or element of. Also determine the size of the largest character
78 array the string may refer to. */
79 SRK_LENRANGE,
80 /* Temporary until the rest of Martin's strlen range work is integrated. */
81 SRK_LENRANGE_2,
82 /* Determine the integer value of the argument (not string length). */
83 SRK_INT_VALUE
86 static bool
87 get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
89 /* Return true when DECL can be referenced from current unit.
90 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
91 We can get declarations that are not possible to reference for various
92 reasons:
94 1) When analyzing C++ virtual tables.
95 C++ virtual tables do have known constructors even
96 when they are keyed to other compilation unit.
97 Those tables can contain pointers to methods and vars
98 in other units. Those methods have both STATIC and EXTERNAL
99 set.
100 2) In WHOPR mode devirtualization might lead to reference
101 to method that was partitioned elsehwere.
102 In this case we have static VAR_DECL or FUNCTION_DECL
103 that has no corresponding callgraph/varpool node
104 declaring the body.
105 3) COMDAT functions referred by external vtables that
106 we devirtualize only during final compilation stage.
107 At this time we already decided that we will not output
108 the function body and thus we can't reference the symbol
109 directly. */
111 static bool
112 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
114 varpool_node *vnode;
115 struct cgraph_node *node;
116 symtab_node *snode;
118 if (DECL_ABSTRACT_P (decl))
119 return false;
121 /* We are concerned only about static/external vars and functions. */
122 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
123 || !VAR_OR_FUNCTION_DECL_P (decl))
124 return true;
126 /* Static objects can be referred only if they was not optimized out yet. */
127 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
129 /* Before we start optimizing unreachable code we can be sure all
130 static objects are defined. */
131 if (symtab->function_flags_ready)
132 return true;
133 snode = symtab_node::get (decl);
134 if (!snode || !snode->definition)
135 return false;
136 node = dyn_cast <cgraph_node *> (snode);
137 return !node || !node->global.inlined_to;
140 /* We will later output the initializer, so we can refer to it.
141 So we are concerned only when DECL comes from initializer of
142 external var or var that has been optimized out. */
143 if (!from_decl
144 || !VAR_P (from_decl)
145 || (!DECL_EXTERNAL (from_decl)
146 && (vnode = varpool_node::get (from_decl)) != NULL
147 && vnode->definition)
148 || (flag_ltrans
149 && (vnode = varpool_node::get (from_decl)) != NULL
150 && vnode->in_other_partition))
151 return true;
152 /* We are folding reference from external vtable. The vtable may reffer
153 to a symbol keyed to other compilation unit. The other compilation
154 unit may be in separate DSO and the symbol may be hidden. */
155 if (DECL_VISIBILITY_SPECIFIED (decl)
156 && DECL_EXTERNAL (decl)
157 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
158 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
159 return false;
160 /* When function is public, we always can introduce new reference.
161 Exception are the COMDAT functions where introducing a direct
162 reference imply need to include function body in the curren tunit. */
163 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
164 return true;
165 /* We have COMDAT. We are going to check if we still have definition
166 or if the definition is going to be output in other partition.
167 Bypass this when gimplifying; all needed functions will be produced.
169 As observed in PR20991 for already optimized out comdat virtual functions
170 it may be tempting to not necessarily give up because the copy will be
171 output elsewhere when corresponding vtable is output.
172 This is however not possible - ABI specify that COMDATs are output in
173 units where they are used and when the other unit was compiled with LTO
174 it is possible that vtable was kept public while the function itself
175 was privatized. */
176 if (!symtab->function_flags_ready)
177 return true;
179 snode = symtab_node::get (decl);
180 if (!snode
181 || ((!snode->definition || DECL_EXTERNAL (decl))
182 && (!snode->in_other_partition
183 || (!snode->forced_by_abi && !snode->force_output))))
184 return false;
185 node = dyn_cast <cgraph_node *> (snode);
186 return !node || !node->global.inlined_to;
189 /* Create a temporary for TYPE for a statement STMT. If the current function
190 is in SSA form, a SSA name is created. Otherwise a temporary register
191 is made. */
193 tree
194 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
196 if (gimple_in_ssa_p (cfun))
197 return make_ssa_name (type, stmt);
198 else
199 return create_tmp_reg (type);
202 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
203 acceptable form for is_gimple_min_invariant.
204 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
206 tree
207 canonicalize_constructor_val (tree cval, tree from_decl)
209 tree orig_cval = cval;
210 STRIP_NOPS (cval);
211 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
212 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
214 tree ptr = TREE_OPERAND (cval, 0);
215 if (is_gimple_min_invariant (ptr))
216 cval = build1_loc (EXPR_LOCATION (cval),
217 ADDR_EXPR, TREE_TYPE (ptr),
218 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
219 ptr,
220 fold_convert (ptr_type_node,
221 TREE_OPERAND (cval, 1))));
223 if (TREE_CODE (cval) == ADDR_EXPR)
225 tree base = NULL_TREE;
226 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
228 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
229 if (base)
230 TREE_OPERAND (cval, 0) = base;
232 else
233 base = get_base_address (TREE_OPERAND (cval, 0));
234 if (!base)
235 return NULL_TREE;
237 if (VAR_OR_FUNCTION_DECL_P (base)
238 && !can_refer_decl_in_current_unit_p (base, from_decl))
239 return NULL_TREE;
240 if (TREE_TYPE (base) == error_mark_node)
241 return NULL_TREE;
242 if (VAR_P (base))
243 TREE_ADDRESSABLE (base) = 1;
244 else if (TREE_CODE (base) == FUNCTION_DECL)
246 /* Make sure we create a cgraph node for functions we'll reference.
247 They can be non-existent if the reference comes from an entry
248 of an external vtable for example. */
249 cgraph_node::get_create (base);
251 /* Fixup types in global initializers. */
252 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
253 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
255 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
256 cval = fold_convert (TREE_TYPE (orig_cval), cval);
257 return cval;
259 if (TREE_OVERFLOW_P (cval))
260 return drop_tree_overflow (cval);
261 return orig_cval;
264 /* If SYM is a constant variable with known value, return the value.
265 NULL_TREE is returned otherwise. */
267 tree
268 get_symbol_constant_value (tree sym)
270 tree val = ctor_for_folding (sym);
271 if (val != error_mark_node)
273 if (val)
275 val = canonicalize_constructor_val (unshare_expr (val), sym);
276 if (val && is_gimple_min_invariant (val))
277 return val;
278 else
279 return NULL_TREE;
281 /* Variables declared 'const' without an initializer
282 have zero as the initializer if they may not be
283 overridden at link or run time. */
284 if (!val
285 && is_gimple_reg_type (TREE_TYPE (sym)))
286 return build_zero_cst (TREE_TYPE (sym));
289 return NULL_TREE;
294 /* Subroutine of fold_stmt. We perform several simplifications of the
295 memory reference tree EXPR and make sure to re-gimplify them properly
296 after propagation of constant addresses. IS_LHS is true if the
297 reference is supposed to be an lvalue. */
299 static tree
300 maybe_fold_reference (tree expr, bool is_lhs)
302 tree result;
304 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
305 || TREE_CODE (expr) == REALPART_EXPR
306 || TREE_CODE (expr) == IMAGPART_EXPR)
307 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
308 return fold_unary_loc (EXPR_LOCATION (expr),
309 TREE_CODE (expr),
310 TREE_TYPE (expr),
311 TREE_OPERAND (expr, 0));
312 else if (TREE_CODE (expr) == BIT_FIELD_REF
313 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
314 return fold_ternary_loc (EXPR_LOCATION (expr),
315 TREE_CODE (expr),
316 TREE_TYPE (expr),
317 TREE_OPERAND (expr, 0),
318 TREE_OPERAND (expr, 1),
319 TREE_OPERAND (expr, 2));
321 if (!is_lhs
322 && (result = fold_const_aggregate_ref (expr))
323 && is_gimple_min_invariant (result))
324 return result;
326 return NULL_TREE;
330 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
331 replacement rhs for the statement or NULL_TREE if no simplification
332 could be made. It is assumed that the operands have been previously
333 folded. */
335 static tree
336 fold_gimple_assign (gimple_stmt_iterator *si)
338 gimple *stmt = gsi_stmt (*si);
339 enum tree_code subcode = gimple_assign_rhs_code (stmt);
340 location_t loc = gimple_location (stmt);
342 tree result = NULL_TREE;
344 switch (get_gimple_rhs_class (subcode))
346 case GIMPLE_SINGLE_RHS:
348 tree rhs = gimple_assign_rhs1 (stmt);
350 if (TREE_CLOBBER_P (rhs))
351 return NULL_TREE;
353 if (REFERENCE_CLASS_P (rhs))
354 return maybe_fold_reference (rhs, false);
356 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
358 tree val = OBJ_TYPE_REF_EXPR (rhs);
359 if (is_gimple_min_invariant (val))
360 return val;
361 else if (flag_devirtualize && virtual_method_call_p (rhs))
363 bool final;
364 vec <cgraph_node *>targets
365 = possible_polymorphic_call_targets (rhs, stmt, &final);
366 if (final && targets.length () <= 1 && dbg_cnt (devirt))
368 if (dump_enabled_p ())
370 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
371 "resolving virtual function address "
372 "reference to function %s\n",
373 targets.length () == 1
374 ? targets[0]->name ()
375 : "NULL");
377 if (targets.length () == 1)
379 val = fold_convert (TREE_TYPE (val),
380 build_fold_addr_expr_loc
381 (loc, targets[0]->decl));
382 STRIP_USELESS_TYPE_CONVERSION (val);
384 else
385 /* We can not use __builtin_unreachable here because it
386 can not have address taken. */
387 val = build_int_cst (TREE_TYPE (val), 0);
388 return val;
393 else if (TREE_CODE (rhs) == ADDR_EXPR)
395 tree ref = TREE_OPERAND (rhs, 0);
396 tree tem = maybe_fold_reference (ref, true);
397 if (tem
398 && TREE_CODE (tem) == MEM_REF
399 && integer_zerop (TREE_OPERAND (tem, 1)))
400 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
401 else if (tem)
402 result = fold_convert (TREE_TYPE (rhs),
403 build_fold_addr_expr_loc (loc, tem));
404 else if (TREE_CODE (ref) == MEM_REF
405 && integer_zerop (TREE_OPERAND (ref, 1)))
406 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
408 if (result)
410 /* Strip away useless type conversions. Both the
411 NON_LVALUE_EXPR that may have been added by fold, and
412 "useless" type conversions that might now be apparent
413 due to propagation. */
414 STRIP_USELESS_TYPE_CONVERSION (result);
416 if (result != rhs && valid_gimple_rhs_p (result))
417 return result;
421 else if (TREE_CODE (rhs) == CONSTRUCTOR
422 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
424 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
425 unsigned i;
426 tree val;
428 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
429 if (! CONSTANT_CLASS_P (val))
430 return NULL_TREE;
432 return build_vector_from_ctor (TREE_TYPE (rhs),
433 CONSTRUCTOR_ELTS (rhs));
436 else if (DECL_P (rhs))
437 return get_symbol_constant_value (rhs);
439 break;
441 case GIMPLE_UNARY_RHS:
442 break;
444 case GIMPLE_BINARY_RHS:
445 break;
447 case GIMPLE_TERNARY_RHS:
448 result = fold_ternary_loc (loc, subcode,
449 TREE_TYPE (gimple_assign_lhs (stmt)),
450 gimple_assign_rhs1 (stmt),
451 gimple_assign_rhs2 (stmt),
452 gimple_assign_rhs3 (stmt));
454 if (result)
456 STRIP_USELESS_TYPE_CONVERSION (result);
457 if (valid_gimple_rhs_p (result))
458 return result;
460 break;
462 case GIMPLE_INVALID_RHS:
463 gcc_unreachable ();
466 return NULL_TREE;
470 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
471 adjusting the replacement stmts location and virtual operands.
472 If the statement has a lhs the last stmt in the sequence is expected
473 to assign to that lhs. */
475 static void
476 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
478 gimple *stmt = gsi_stmt (*si_p);
480 if (gimple_has_location (stmt))
481 annotate_all_with_location (stmts, gimple_location (stmt));
483 /* First iterate over the replacement statements backward, assigning
484 virtual operands to their defining statements. */
485 gimple *laststore = NULL;
486 for (gimple_stmt_iterator i = gsi_last (stmts);
487 !gsi_end_p (i); gsi_prev (&i))
489 gimple *new_stmt = gsi_stmt (i);
490 if ((gimple_assign_single_p (new_stmt)
491 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
492 || (is_gimple_call (new_stmt)
493 && (gimple_call_flags (new_stmt)
494 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
496 tree vdef;
497 if (!laststore)
498 vdef = gimple_vdef (stmt);
499 else
500 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
501 gimple_set_vdef (new_stmt, vdef);
502 if (vdef && TREE_CODE (vdef) == SSA_NAME)
503 SSA_NAME_DEF_STMT (vdef) = new_stmt;
504 laststore = new_stmt;
508 /* Second iterate over the statements forward, assigning virtual
509 operands to their uses. */
510 tree reaching_vuse = gimple_vuse (stmt);
511 for (gimple_stmt_iterator i = gsi_start (stmts);
512 !gsi_end_p (i); gsi_next (&i))
514 gimple *new_stmt = gsi_stmt (i);
515 /* If the new statement possibly has a VUSE, update it with exact SSA
516 name we know will reach this one. */
517 if (gimple_has_mem_ops (new_stmt))
518 gimple_set_vuse (new_stmt, reaching_vuse);
519 gimple_set_modified (new_stmt, true);
520 if (gimple_vdef (new_stmt))
521 reaching_vuse = gimple_vdef (new_stmt);
524 /* If the new sequence does not do a store release the virtual
525 definition of the original statement. */
526 if (reaching_vuse
527 && reaching_vuse == gimple_vuse (stmt))
529 tree vdef = gimple_vdef (stmt);
530 if (vdef
531 && TREE_CODE (vdef) == SSA_NAME)
533 unlink_stmt_vdef (stmt);
534 release_ssa_name (vdef);
538 /* Finally replace the original statement with the sequence. */
539 gsi_replace_with_seq (si_p, stmts, false);
542 /* Convert EXPR into a GIMPLE value suitable for substitution on the
543 RHS of an assignment. Insert the necessary statements before
544 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
545 is replaced. If the call is expected to produces a result, then it
546 is replaced by an assignment of the new RHS to the result variable.
547 If the result is to be ignored, then the call is replaced by a
548 GIMPLE_NOP. A proper VDEF chain is retained by making the first
549 VUSE and the last VDEF of the whole sequence be the same as the replaced
550 statement and using new SSA names for stores in between. */
552 void
553 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
555 tree lhs;
556 gimple *stmt, *new_stmt;
557 gimple_stmt_iterator i;
558 gimple_seq stmts = NULL;
560 stmt = gsi_stmt (*si_p);
562 gcc_assert (is_gimple_call (stmt));
564 push_gimplify_context (gimple_in_ssa_p (cfun));
566 lhs = gimple_call_lhs (stmt);
567 if (lhs == NULL_TREE)
569 gimplify_and_add (expr, &stmts);
570 /* We can end up with folding a memcpy of an empty class assignment
571 which gets optimized away by C++ gimplification. */
572 if (gimple_seq_empty_p (stmts))
574 pop_gimplify_context (NULL);
575 if (gimple_in_ssa_p (cfun))
577 unlink_stmt_vdef (stmt);
578 release_defs (stmt);
580 gsi_replace (si_p, gimple_build_nop (), false);
581 return;
584 else
586 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
587 new_stmt = gimple_build_assign (lhs, tmp);
588 i = gsi_last (stmts);
589 gsi_insert_after_without_update (&i, new_stmt,
590 GSI_CONTINUE_LINKING);
593 pop_gimplify_context (NULL);
595 gsi_replace_with_seq_vops (si_p, stmts);
599 /* Replace the call at *GSI with the gimple value VAL. */
601 void
602 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
604 gimple *stmt = gsi_stmt (*gsi);
605 tree lhs = gimple_call_lhs (stmt);
606 gimple *repl;
607 if (lhs)
609 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
610 val = fold_convert (TREE_TYPE (lhs), val);
611 repl = gimple_build_assign (lhs, val);
613 else
614 repl = gimple_build_nop ();
615 tree vdef = gimple_vdef (stmt);
616 if (vdef && TREE_CODE (vdef) == SSA_NAME)
618 unlink_stmt_vdef (stmt);
619 release_ssa_name (vdef);
621 gsi_replace (gsi, repl, false);
624 /* Replace the call at *GSI with the new call REPL and fold that
625 again. */
627 static void
628 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
630 gimple *stmt = gsi_stmt (*gsi);
631 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
632 gimple_set_location (repl, gimple_location (stmt));
633 if (gimple_vdef (stmt)
634 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
636 gimple_set_vdef (repl, gimple_vdef (stmt));
637 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
639 if (gimple_vuse (stmt))
640 gimple_set_vuse (repl, gimple_vuse (stmt));
641 gsi_replace (gsi, repl, false);
642 fold_stmt (gsi);
645 /* Return true if VAR is a VAR_DECL or a component thereof. */
647 static bool
648 var_decl_component_p (tree var)
650 tree inner = var;
651 while (handled_component_p (inner))
652 inner = TREE_OPERAND (inner, 0);
653 return (DECL_P (inner)
654 || (TREE_CODE (inner) == MEM_REF
655 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
658 /* Return TRUE if the SIZE argument, representing the size of an
659 object, is in a range of values of which exactly zero is valid. */
661 static bool
662 size_must_be_zero_p (tree size)
664 if (integer_zerop (size))
665 return true;
667 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
668 return false;
670 tree type = TREE_TYPE (size);
671 int prec = TYPE_PRECISION (type);
673 /* Compute the value of SSIZE_MAX, the largest positive value that
674 can be stored in ssize_t, the signed counterpart of size_t. */
675 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
676 value_range valid_range (VR_RANGE,
677 build_int_cst (type, 0),
678 wide_int_to_tree (type, ssize_max));
679 value_range vr;
680 get_range_info (size, vr);
681 vr.intersect (&valid_range);
682 return vr.zero_p ();
685 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
686 diagnose (otherwise undefined) overlapping copies without preventing
687 folding. When folded, GCC guarantees that overlapping memcpy has
688 the same semantics as memmove. Call to the library memcpy need not
689 provide the same guarantee. Return false if no simplification can
690 be made. */
692 static bool
693 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
694 tree dest, tree src, int endp)
696 gimple *stmt = gsi_stmt (*gsi);
697 tree lhs = gimple_call_lhs (stmt);
698 tree len = gimple_call_arg (stmt, 2);
699 tree destvar, srcvar;
700 location_t loc = gimple_location (stmt);
702 bool nowarn = gimple_no_warning_p (stmt);
704 /* If the LEN parameter is a constant zero or in range where
705 the only valid value is zero, return DEST. */
706 if (size_must_be_zero_p (len))
708 gimple *repl;
709 if (gimple_call_lhs (stmt))
710 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
711 else
712 repl = gimple_build_nop ();
713 tree vdef = gimple_vdef (stmt);
714 if (vdef && TREE_CODE (vdef) == SSA_NAME)
716 unlink_stmt_vdef (stmt);
717 release_ssa_name (vdef);
719 gsi_replace (gsi, repl, false);
720 return true;
723 /* If SRC and DEST are the same (and not volatile), return
724 DEST{,+LEN,+LEN-1}. */
725 if (operand_equal_p (src, dest, 0))
727 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
728 It's safe and may even be emitted by GCC itself (see bug
729 32667). */
730 unlink_stmt_vdef (stmt);
731 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
732 release_ssa_name (gimple_vdef (stmt));
733 if (!lhs)
735 gsi_replace (gsi, gimple_build_nop (), false);
736 return true;
738 goto done;
740 else
742 tree srctype, desttype;
743 unsigned int src_align, dest_align;
744 tree off0;
745 const char *tmp_str;
746 unsigned HOST_WIDE_INT tmp_len;
748 /* Build accesses at offset zero with a ref-all character type. */
749 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
750 ptr_mode, true), 0);
752 /* If we can perform the copy efficiently with first doing all loads
753 and then all stores inline it that way. Currently efficiently
754 means that we can load all the memory into a single integer
755 register which is what MOVE_MAX gives us. */
756 src_align = get_pointer_alignment (src);
757 dest_align = get_pointer_alignment (dest);
758 if (tree_fits_uhwi_p (len)
759 && compare_tree_int (len, MOVE_MAX) <= 0
760 /* ??? Don't transform copies from strings with known length this
761 confuses the tree-ssa-strlen.c. This doesn't handle
762 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
763 reason. */
764 && !c_strlen (src, 2)
765 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
766 && memchr (tmp_str, 0, tmp_len) == NULL))
768 unsigned ilen = tree_to_uhwi (len);
769 if (pow2p_hwi (ilen))
771 /* Detect invalid bounds and overlapping copies and issue
772 either -Warray-bounds or -Wrestrict. */
773 if (!nowarn
774 && check_bounds_or_overlap (as_a <gcall *>(stmt),
775 dest, src, len, len))
776 gimple_set_no_warning (stmt, true);
778 scalar_int_mode mode;
779 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
780 if (type
781 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
782 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
783 /* If the destination pointer is not aligned we must be able
784 to emit an unaligned store. */
785 && (dest_align >= GET_MODE_ALIGNMENT (mode)
786 || !targetm.slow_unaligned_access (mode, dest_align)
787 || (optab_handler (movmisalign_optab, mode)
788 != CODE_FOR_nothing)))
790 tree srctype = type;
791 tree desttype = type;
792 if (src_align < GET_MODE_ALIGNMENT (mode))
793 srctype = build_aligned_type (type, src_align);
794 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
795 tree tem = fold_const_aggregate_ref (srcmem);
796 if (tem)
797 srcmem = tem;
798 else if (src_align < GET_MODE_ALIGNMENT (mode)
799 && targetm.slow_unaligned_access (mode, src_align)
800 && (optab_handler (movmisalign_optab, mode)
801 == CODE_FOR_nothing))
802 srcmem = NULL_TREE;
803 if (srcmem)
805 gimple *new_stmt;
806 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
808 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
809 srcmem
810 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
811 new_stmt);
812 gimple_assign_set_lhs (new_stmt, srcmem);
813 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
814 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
816 if (dest_align < GET_MODE_ALIGNMENT (mode))
817 desttype = build_aligned_type (type, dest_align);
818 new_stmt
819 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
820 dest, off0),
821 srcmem);
822 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
823 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
824 if (gimple_vdef (new_stmt)
825 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
826 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
827 if (!lhs)
829 gsi_replace (gsi, new_stmt, false);
830 return true;
832 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
833 goto done;
839 if (endp == 3)
841 /* Both DEST and SRC must be pointer types.
842 ??? This is what old code did. Is the testing for pointer types
843 really mandatory?
845 If either SRC is readonly or length is 1, we can use memcpy. */
846 if (!dest_align || !src_align)
847 return false;
848 if (readonly_data_expr (src)
849 || (tree_fits_uhwi_p (len)
850 && (MIN (src_align, dest_align) / BITS_PER_UNIT
851 >= tree_to_uhwi (len))))
853 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
854 if (!fn)
855 return false;
856 gimple_call_set_fndecl (stmt, fn);
857 gimple_call_set_arg (stmt, 0, dest);
858 gimple_call_set_arg (stmt, 1, src);
859 fold_stmt (gsi);
860 return true;
863 /* If *src and *dest can't overlap, optimize into memcpy as well. */
864 if (TREE_CODE (src) == ADDR_EXPR
865 && TREE_CODE (dest) == ADDR_EXPR)
867 tree src_base, dest_base, fn;
868 poly_int64 src_offset = 0, dest_offset = 0;
869 poly_uint64 maxsize;
871 srcvar = TREE_OPERAND (src, 0);
872 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
873 if (src_base == NULL)
874 src_base = srcvar;
875 destvar = TREE_OPERAND (dest, 0);
876 dest_base = get_addr_base_and_unit_offset (destvar,
877 &dest_offset);
878 if (dest_base == NULL)
879 dest_base = destvar;
880 if (!poly_int_tree_p (len, &maxsize))
881 maxsize = -1;
882 if (SSA_VAR_P (src_base)
883 && SSA_VAR_P (dest_base))
885 if (operand_equal_p (src_base, dest_base, 0)
886 && ranges_maybe_overlap_p (src_offset, maxsize,
887 dest_offset, maxsize))
888 return false;
890 else if (TREE_CODE (src_base) == MEM_REF
891 && TREE_CODE (dest_base) == MEM_REF)
893 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
894 TREE_OPERAND (dest_base, 0), 0))
895 return false;
896 poly_offset_int full_src_offset
897 = mem_ref_offset (src_base) + src_offset;
898 poly_offset_int full_dest_offset
899 = mem_ref_offset (dest_base) + dest_offset;
900 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
901 full_dest_offset, maxsize))
902 return false;
904 else
905 return false;
907 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
908 if (!fn)
909 return false;
910 gimple_call_set_fndecl (stmt, fn);
911 gimple_call_set_arg (stmt, 0, dest);
912 gimple_call_set_arg (stmt, 1, src);
913 fold_stmt (gsi);
914 return true;
917 /* If the destination and source do not alias optimize into
918 memcpy as well. */
919 if ((is_gimple_min_invariant (dest)
920 || TREE_CODE (dest) == SSA_NAME)
921 && (is_gimple_min_invariant (src)
922 || TREE_CODE (src) == SSA_NAME))
924 ao_ref destr, srcr;
925 ao_ref_init_from_ptr_and_size (&destr, dest, len);
926 ao_ref_init_from_ptr_and_size (&srcr, src, len);
927 if (!refs_may_alias_p_1 (&destr, &srcr, false))
929 tree fn;
930 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
931 if (!fn)
932 return false;
933 gimple_call_set_fndecl (stmt, fn);
934 gimple_call_set_arg (stmt, 0, dest);
935 gimple_call_set_arg (stmt, 1, src);
936 fold_stmt (gsi);
937 return true;
941 return false;
944 if (!tree_fits_shwi_p (len))
945 return false;
946 if (!POINTER_TYPE_P (TREE_TYPE (src))
947 || !POINTER_TYPE_P (TREE_TYPE (dest)))
948 return false;
949 /* In the following try to find a type that is most natural to be
950 used for the memcpy source and destination and that allows
951 the most optimization when memcpy is turned into a plain assignment
952 using that type. In theory we could always use a char[len] type
953 but that only gains us that the destination and source possibly
954 no longer will have their address taken. */
955 srctype = TREE_TYPE (TREE_TYPE (src));
956 if (TREE_CODE (srctype) == ARRAY_TYPE
957 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
958 srctype = TREE_TYPE (srctype);
959 desttype = TREE_TYPE (TREE_TYPE (dest));
960 if (TREE_CODE (desttype) == ARRAY_TYPE
961 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
962 desttype = TREE_TYPE (desttype);
963 if (TREE_ADDRESSABLE (srctype)
964 || TREE_ADDRESSABLE (desttype))
965 return false;
967 /* Make sure we are not copying using a floating-point mode or
968 a type whose size possibly does not match its precision. */
969 if (FLOAT_MODE_P (TYPE_MODE (desttype))
970 || TREE_CODE (desttype) == BOOLEAN_TYPE
971 || TREE_CODE (desttype) == ENUMERAL_TYPE)
972 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
973 if (FLOAT_MODE_P (TYPE_MODE (srctype))
974 || TREE_CODE (srctype) == BOOLEAN_TYPE
975 || TREE_CODE (srctype) == ENUMERAL_TYPE)
976 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
977 if (!srctype)
978 srctype = desttype;
979 if (!desttype)
980 desttype = srctype;
981 if (!srctype)
982 return false;
984 src_align = get_pointer_alignment (src);
985 dest_align = get_pointer_alignment (dest);
986 if (dest_align < TYPE_ALIGN (desttype)
987 || src_align < TYPE_ALIGN (srctype))
988 return false;
990 destvar = NULL_TREE;
991 if (TREE_CODE (dest) == ADDR_EXPR
992 && var_decl_component_p (TREE_OPERAND (dest, 0))
993 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
994 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
996 srcvar = NULL_TREE;
997 if (TREE_CODE (src) == ADDR_EXPR
998 && var_decl_component_p (TREE_OPERAND (src, 0))
999 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1001 if (!destvar
1002 || src_align >= TYPE_ALIGN (desttype))
1003 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1004 src, off0);
1005 else if (!STRICT_ALIGNMENT)
1007 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1008 src_align);
1009 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1013 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1014 return false;
1016 if (srcvar == NULL_TREE)
1018 if (src_align >= TYPE_ALIGN (desttype))
1019 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1020 else
1022 if (STRICT_ALIGNMENT)
1023 return false;
1024 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1025 src_align);
1026 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1029 else if (destvar == NULL_TREE)
1031 if (dest_align >= TYPE_ALIGN (srctype))
1032 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1033 else
1035 if (STRICT_ALIGNMENT)
1036 return false;
1037 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1038 dest_align);
1039 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1043 /* Detect invalid bounds and overlapping copies and issue either
1044 -Warray-bounds or -Wrestrict. */
1045 if (!nowarn)
1046 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1048 gimple *new_stmt;
1049 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1051 tree tem = fold_const_aggregate_ref (srcvar);
1052 if (tem)
1053 srcvar = tem;
1054 if (! is_gimple_min_invariant (srcvar))
1056 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1057 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1058 new_stmt);
1059 gimple_assign_set_lhs (new_stmt, srcvar);
1060 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1061 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1063 new_stmt = gimple_build_assign (destvar, srcvar);
1064 goto set_vop_and_replace;
1067 /* We get an aggregate copy. Use an unsigned char[] type to
1068 perform the copying to preserve padding and to avoid any issues
1069 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1070 desttype = build_array_type_nelts (unsigned_char_type_node,
1071 tree_to_uhwi (len));
1072 srctype = desttype;
1073 if (src_align > TYPE_ALIGN (srctype))
1074 srctype = build_aligned_type (srctype, src_align);
1075 if (dest_align > TYPE_ALIGN (desttype))
1076 desttype = build_aligned_type (desttype, dest_align);
1077 new_stmt
1078 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1079 fold_build2 (MEM_REF, srctype, src, off0));
1080 set_vop_and_replace:
1081 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1082 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1083 if (gimple_vdef (new_stmt)
1084 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1085 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1086 if (!lhs)
1088 gsi_replace (gsi, new_stmt, false);
1089 return true;
1091 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1094 done:
1095 gimple_seq stmts = NULL;
1096 if (endp == 0 || endp == 3)
1097 len = NULL_TREE;
1098 else if (endp == 2)
1099 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1100 ssize_int (1));
1101 if (endp == 2 || endp == 1)
1103 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1104 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1105 TREE_TYPE (dest), dest, len);
1108 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1109 gimple *repl = gimple_build_assign (lhs, dest);
1110 gsi_replace (gsi, repl, false);
1111 return true;
1114 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1115 to built-in memcmp (a, b, len). */
1117 static bool
1118 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1120 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1122 if (!fn)
1123 return false;
1125 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1127 gimple *stmt = gsi_stmt (*gsi);
1128 tree a = gimple_call_arg (stmt, 0);
1129 tree b = gimple_call_arg (stmt, 1);
1130 tree len = gimple_call_arg (stmt, 2);
1132 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1133 replace_call_with_call_and_fold (gsi, repl);
1135 return true;
1138 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1139 to built-in memmove (dest, src, len). */
1141 static bool
1142 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1144 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1146 if (!fn)
1147 return false;
1149 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1150 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1151 len) into memmove (dest, src, len). */
1153 gimple *stmt = gsi_stmt (*gsi);
1154 tree src = gimple_call_arg (stmt, 0);
1155 tree dest = gimple_call_arg (stmt, 1);
1156 tree len = gimple_call_arg (stmt, 2);
1158 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1159 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1160 replace_call_with_call_and_fold (gsi, repl);
1162 return true;
1165 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1166 to built-in memset (dest, 0, len). */
1168 static bool
1169 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1171 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1173 if (!fn)
1174 return false;
1176 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1178 gimple *stmt = gsi_stmt (*gsi);
1179 tree dest = gimple_call_arg (stmt, 0);
1180 tree len = gimple_call_arg (stmt, 1);
1182 gimple_seq seq = NULL;
1183 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1184 gimple_seq_add_stmt_without_update (&seq, repl);
1185 gsi_replace_with_seq_vops (gsi, seq);
1186 fold_stmt (gsi);
1188 return true;
1191 /* Fold function call to builtin memset or bzero at *GSI setting the
1192 memory of size LEN to VAL. Return whether a simplification was made. */
1194 static bool
1195 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1197 gimple *stmt = gsi_stmt (*gsi);
1198 tree etype;
1199 unsigned HOST_WIDE_INT length, cval;
1201 /* If the LEN parameter is zero, return DEST. */
1202 if (integer_zerop (len))
1204 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1205 return true;
1208 if (! tree_fits_uhwi_p (len))
1209 return false;
1211 if (TREE_CODE (c) != INTEGER_CST)
1212 return false;
1214 tree dest = gimple_call_arg (stmt, 0);
1215 tree var = dest;
1216 if (TREE_CODE (var) != ADDR_EXPR)
1217 return false;
1219 var = TREE_OPERAND (var, 0);
1220 if (TREE_THIS_VOLATILE (var))
1221 return false;
1223 etype = TREE_TYPE (var);
1224 if (TREE_CODE (etype) == ARRAY_TYPE)
1225 etype = TREE_TYPE (etype);
1227 if (!INTEGRAL_TYPE_P (etype)
1228 && !POINTER_TYPE_P (etype))
1229 return NULL_TREE;
1231 if (! var_decl_component_p (var))
1232 return NULL_TREE;
1234 length = tree_to_uhwi (len);
1235 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1236 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1237 return NULL_TREE;
1239 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1240 return NULL_TREE;
1242 if (integer_zerop (c))
1243 cval = 0;
1244 else
1246 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1247 return NULL_TREE;
1249 cval = TREE_INT_CST_LOW (c);
1250 cval &= 0xff;
1251 cval |= cval << 8;
1252 cval |= cval << 16;
1253 cval |= (cval << 31) << 1;
1256 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1257 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1258 gimple_set_vuse (store, gimple_vuse (stmt));
1259 tree vdef = gimple_vdef (stmt);
1260 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1262 gimple_set_vdef (store, gimple_vdef (stmt));
1263 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1265 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1266 if (gimple_call_lhs (stmt))
1268 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1269 gsi_replace (gsi, asgn, false);
1271 else
1273 gimple_stmt_iterator gsi2 = *gsi;
1274 gsi_prev (gsi);
1275 gsi_remove (&gsi2, true);
1278 return true;
1281 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1283 static bool
1284 get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1285 c_strlen_data *pdata, unsigned eltsize)
1287 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1289 /* The length computed by this invocation of the function. */
1290 tree val = NULL_TREE;
1292 /* True if VAL is an optimistic (tight) bound determined from
1293 the size of the character array in which the string may be
1294 stored. In that case, the computed VAL is used to set
1295 PDATA->MAXBOUND. */
1296 bool tight_bound = false;
1298 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1299 if (TREE_CODE (arg) == ADDR_EXPR
1300 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1302 tree op = TREE_OPERAND (arg, 0);
1303 if (integer_zerop (TREE_OPERAND (op, 1)))
1305 tree aop0 = TREE_OPERAND (op, 0);
1306 if (TREE_CODE (aop0) == INDIRECT_REF
1307 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1308 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1309 pdata, eltsize);
1311 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1312 && (rkind == SRK_LENRANGE || rkind == SRK_LENRANGE_2))
1314 /* Fail if an array is the last member of a struct object
1315 since it could be treated as a (fake) flexible array
1316 member. */
1317 tree idx = TREE_OPERAND (op, 1);
1319 arg = TREE_OPERAND (op, 0);
1320 tree optype = TREE_TYPE (arg);
1321 if (tree dom = TYPE_DOMAIN (optype))
1322 if (tree bound = TYPE_MAX_VALUE (dom))
1323 if (TREE_CODE (bound) == INTEGER_CST
1324 && TREE_CODE (idx) == INTEGER_CST
1325 && tree_int_cst_lt (bound, idx))
1326 return false;
1330 if (rkind == SRK_INT_VALUE)
1332 /* We are computing the maximum value (not string length). */
1333 val = arg;
1334 if (TREE_CODE (val) != INTEGER_CST
1335 || tree_int_cst_sgn (val) < 0)
1336 return false;
1338 else
1340 c_strlen_data lendata = { };
1341 val = c_strlen (arg, 1, &lendata, eltsize);
1343 if (!val && lendata.decl)
1345 /* ARG refers to an unterminated const character array.
1346 DATA.DECL with size DATA.LEN. */
1347 val = lendata.minlen;
1348 pdata->decl = lendata.decl;
1352 if (!val && (rkind == SRK_LENRANGE || rkind == SRK_LENRANGE_2))
1354 if (TREE_CODE (arg) == ADDR_EXPR)
1355 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1356 pdata, eltsize);
1358 if (TREE_CODE (arg) == ARRAY_REF)
1360 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1362 /* Determine the "innermost" array type. */
1363 while (TREE_CODE (optype) == ARRAY_TYPE
1364 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1365 optype = TREE_TYPE (optype);
1367 /* Avoid arrays of pointers. */
1368 tree eltype = TREE_TYPE (optype);
1369 if (TREE_CODE (optype) != ARRAY_TYPE
1370 || !INTEGRAL_TYPE_P (eltype))
1371 return false;
1373 /* Fail when the array bound is unknown or zero. */
1374 val = TYPE_SIZE_UNIT (optype);
1375 if (!val || integer_zerop (val))
1376 return false;
1378 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1379 integer_one_node);
1381 /* Set the minimum size to zero since the string in
1382 the array could have zero length. */
1383 pdata->minlen = ssize_int (0);
1385 tight_bound = true;
1387 else if (TREE_CODE (arg) == COMPONENT_REF
1388 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1389 == ARRAY_TYPE))
1391 /* Use the type of the member array to determine the upper
1392 bound on the length of the array. This may be overly
1393 optimistic if the array itself isn't NUL-terminated and
1394 the caller relies on the subsequent member to contain
1395 the NUL but that would only be considered valid if
1396 the array were the last member of a struct. */
1398 tree fld = TREE_OPERAND (arg, 1);
1400 tree optype = TREE_TYPE (fld);
1402 /* Determine the "innermost" array type. */
1403 while (TREE_CODE (optype) == ARRAY_TYPE
1404 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1405 optype = TREE_TYPE (optype);
1407 /* Fail when the array bound is unknown or zero. */
1408 val = TYPE_SIZE_UNIT (optype);
1409 if (!val || integer_zerop (val))
1410 return false;
1411 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1412 integer_one_node);
1414 /* Set the minimum size to zero since the string in
1415 the array could have zero length. */
1416 pdata->minlen = ssize_int (0);
1418 /* The array size determined above is an optimistic bound
1419 on the length. If the array isn't nul-terminated the
1420 length computed by the library function would be greater.
1421 Even though using strlen to cross the subobject boundary
1422 is undefined, avoid drawing conclusions from the member
1423 type about the length here. */
1424 tight_bound = true;
1426 else if (VAR_P (arg))
1428 /* Avoid handling pointers to arrays. GCC might misuse
1429 a pointer to an array of one bound to point to an array
1430 object of a greater bound. */
1431 tree argtype = TREE_TYPE (arg);
1432 if (TREE_CODE (argtype) == ARRAY_TYPE)
1434 val = TYPE_SIZE_UNIT (argtype);
1435 if (!val
1436 || TREE_CODE (val) != INTEGER_CST
1437 || integer_zerop (val))
1438 return false;
1439 val = wide_int_to_tree (TREE_TYPE (val),
1440 wi::sub (wi::to_wide (val), 1));
1442 /* Set the minimum size to zero since the string in
1443 the array could have zero length. */
1444 pdata->minlen = ssize_int (0);
1449 if (!val)
1450 return false;
1452 /* Adjust the lower bound on the string length as necessary. */
1453 if (!pdata->minlen
1454 || (rkind != SRK_STRLEN
1455 && TREE_CODE (pdata->minlen) == INTEGER_CST
1456 && TREE_CODE (val) == INTEGER_CST
1457 && tree_int_cst_lt (val, pdata->minlen)))
1458 pdata->minlen = val;
1460 if (pdata->maxbound)
1462 /* Adjust the tighter (more optimistic) string length bound
1463 if necessary and proceed to adjust the more conservative
1464 bound. */
1465 if (TREE_CODE (val) == INTEGER_CST)
1467 if (TREE_CODE (pdata->maxbound) == INTEGER_CST)
1469 if (tree_int_cst_lt (pdata->maxbound, val))
1470 pdata->maxbound = val;
1472 else
1473 pdata->maxbound = build_all_ones_cst (size_type_node);
1475 else
1476 pdata->maxbound = val;
1478 else
1479 pdata->maxbound = val;
1481 if (tight_bound)
1483 /* VAL computed above represents an optimistically tight bound
1484 on the length of the string based on the referenced object's
1485 or subobject's type. Determine the conservative upper bound
1486 based on the enclosing object's size if possible. */
1487 if (rkind == SRK_LENRANGE || rkind == SRK_LENRANGE_2)
1489 poly_int64 offset;
1490 tree base = get_addr_base_and_unit_offset (arg, &offset);
1491 if (!base)
1493 /* When the call above fails due to a non-constant offset
1494 assume the offset is zero and use the size of the whole
1495 enclosing object instead. */
1496 base = get_base_address (arg);
1497 offset = 0;
1499 /* If the base object is a pointer no upper bound on the length
1500 can be determined. Otherwise the maximum length is equal to
1501 the size of the enclosing object minus the offset of
1502 the referenced subobject minus 1 (for the terminating nul). */
1503 tree type = TREE_TYPE (base);
1504 if (TREE_CODE (type) == POINTER_TYPE
1505 || !VAR_P (base) || !(val = DECL_SIZE_UNIT (base)))
1506 val = build_all_ones_cst (size_type_node);
1507 else
1509 val = DECL_SIZE_UNIT (base);
1510 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1511 size_int (offset + 1));
1514 else
1515 return false;
1518 if (pdata->maxlen)
1520 /* Adjust the more conservative bound if possible/necessary
1521 and fail otherwise. */
1522 if (rkind != SRK_STRLEN)
1524 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1525 || TREE_CODE (val) != INTEGER_CST)
1526 return false;
1528 if (tree_int_cst_lt (pdata->maxlen, val))
1529 pdata->maxlen = val;
1530 return true;
1532 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1534 /* Fail if the length of this ARG is different from that
1535 previously determined from another ARG. */
1536 return false;
1540 pdata->maxlen = val;
1541 return rkind == SRK_LENRANGE || rkind == SRK_LENRANGE_2 || !integer_all_onesp (val);
1544 /* For an ARG referencing one or more strings, try to obtain the range
1545 of their lengths, or the size of the largest array ARG referes to if
1546 the range of lengths cannot be determined, and store all in *PDATA.
1547 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1548 the maximum constant value.
1549 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1550 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1551 length or if we are unable to determine the length, return false.
1552 VISITED is a bitmap of visited variables.
1553 RKIND determines the kind of value or range to obtain (see
1554 strlen_range_kind).
1555 Set PDATA->DECL if ARG refers to an unterminated constant array.
1556 On input, set ELTSIZE to 1 for normal single byte character strings,
1557 and either 2 or 4 for wide characer strings (the size of wchar_t).
1558 Return true if *PDATA was successfully populated and false otherwise. */
1560 static bool
1561 get_range_strlen (tree arg, bitmap *visited,
1562 strlen_range_kind rkind,
1563 c_strlen_data *pdata, unsigned eltsize)
1566 if (TREE_CODE (arg) != SSA_NAME)
1567 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1569 /* If ARG is registered for SSA update we cannot look at its defining
1570 statement. */
1571 if (name_registered_for_update_p (arg))
1572 return false;
1574 /* If we were already here, break the infinite cycle. */
1575 if (!*visited)
1576 *visited = BITMAP_ALLOC (NULL);
1577 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1578 return true;
1580 tree var = arg;
1581 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1583 switch (gimple_code (def_stmt))
1585 case GIMPLE_ASSIGN:
1586 /* The RHS of the statement defining VAR must either have a
1587 constant length or come from another SSA_NAME with a constant
1588 length. */
1589 if (gimple_assign_single_p (def_stmt)
1590 || gimple_assign_unary_nop_p (def_stmt))
1592 tree rhs = gimple_assign_rhs1 (def_stmt);
1593 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1595 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1597 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1598 gimple_assign_rhs3 (def_stmt) };
1600 for (unsigned int i = 0; i < 2; i++)
1601 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1603 if (rkind != SRK_LENRANGE_2)
1604 return false;
1605 /* Set the upper bound to the maximum to prevent
1606 it from being adjusted in the next iteration but
1607 leave MINLEN and the more conservative MAXBOUND
1608 determined so far alone (or leave them null if
1609 they haven't been set yet). That the MINLEN is
1610 in fact zero can be determined from MAXLEN being
1611 unbounded but the discovered minimum is used for
1612 diagnostics. */
1613 pdata->maxlen = build_all_ones_cst (size_type_node);
1615 return true;
1617 return false;
1619 case GIMPLE_PHI:
1620 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1621 must have a constant length. */
1622 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1624 tree arg = gimple_phi_arg (def_stmt, i)->def;
1626 /* If this PHI has itself as an argument, we cannot
1627 determine the string length of this argument. However,
1628 if we can find a constant string length for the other
1629 PHI args then we can still be sure that this is a
1630 constant string length. So be optimistic and just
1631 continue with the next argument. */
1632 if (arg == gimple_phi_result (def_stmt))
1633 continue;
1635 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1637 if (rkind != SRK_LENRANGE_2)
1638 return false;
1639 /* Set the upper bound to the maximum to prevent
1640 it from being adjusted in the next iteration but
1641 leave MINLEN and the more conservative MAXBOUND
1642 determined so far alone (or leave them null if
1643 they haven't been set yet). That the MINLEN is
1644 in fact zero can be determined from MAXLEN being
1645 unbounded but the discovered minimum is used for
1646 diagnostics. */
1647 pdata->maxlen = build_all_ones_cst (size_type_node);
1650 return true;
1652 default:
1653 return false;
1657 /* Determine the minimum and maximum value or string length that ARG
1658 refers to and store each in the first two elements of MINMAXLEN.
1659 For expressions that point to strings of unknown lengths that are
1660 character arrays, use the upper bound of the array as the maximum
1661 length. For example, given an expression like 'x ? array : "xyz"'
1662 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1663 to 0 and MINMAXLEN[1] to 7, the longest string that could be
1664 stored in array.
1665 Return true if the range of the string lengths has been obtained
1666 from the upper bound of an array at the end of a struct. Such
1667 an array may hold a string that's longer than its upper bound
1668 due to it being used as a poor-man's flexible array member.
1670 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1671 and false if PHIs and COND_EXPRs are to be handled optimistically,
1672 if we can determine string length minimum and maximum; it will use
1673 the minimum from the ones where it can be determined.
1674 STRICT false should be only used for warning code.
1675 When non-null, clear *NONSTR if ARG refers to a constant array
1676 that is known not be nul-terminated. Otherwise set it to
1677 the declaration of the constant non-terminated array.
1679 ELTSIZE is 1 for normal single byte character strings, and 2 or
1680 4 for wide characer strings. ELTSIZE is by default 1. */
1682 bool
1683 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize, bool strict)
1685 bitmap visited = NULL;
1687 if (!get_range_strlen (arg, &visited, strict ? SRK_LENRANGE : SRK_LENRANGE_2,
1688 pdata, eltsize))
1690 /* On failure extend the length range to an impossible maximum
1691 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1692 members can stay unchanged regardless. */
1693 pdata->minlen = ssize_int (0);
1694 pdata->maxlen = build_all_ones_cst (size_type_node);
1696 else if (!pdata->minlen)
1697 pdata->minlen = ssize_int (0);
1699 /* Unless its null, leave the more conservative MAXBOUND unchanged. */
1700 if (!pdata->maxbound)
1701 pdata->maxbound = pdata->maxlen;
1703 if (visited)
1704 BITMAP_FREE (visited);
1706 return !integer_all_onesp (pdata->maxlen);
1709 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1710 For ARG of pointer types, NONSTR indicates if the caller is prepared
1711 to handle unterminated strings. For integer ARG and when RKIND ==
1712 SRK_INT_VALUE, NONSTR must be null.
1714 If an unterminated array is discovered and our caller handles
1715 unterminated arrays, then bubble up the offending DECL and
1716 return the maximum size. Otherwise return NULL. */
1718 static tree
1719 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1721 /* A non-null NONSTR is meaningless when determining the maximum
1722 value of an integer ARG. */
1723 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1724 /* ARG must have an integral type when RKIND says so. */
1725 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1727 bitmap visited = NULL;
1729 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1730 is unbounded. */
1731 c_strlen_data lendata = { };
1732 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
1733 lendata.maxlen = NULL_TREE;
1734 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1735 lendata.maxlen = NULL_TREE;
1737 if (visited)
1738 BITMAP_FREE (visited);
1740 if (nonstr)
1742 /* For callers prepared to handle unterminated arrays set
1743 *NONSTR to point to the declaration of the array and return
1744 the maximum length/size. */
1745 *nonstr = lendata.decl;
1746 return lendata.maxlen;
1749 /* Fail if the constant array isn't nul-terminated. */
1750 return lendata.decl ? NULL_TREE : lendata.maxlen;
1754 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1755 If LEN is not NULL, it represents the length of the string to be
1756 copied. Return NULL_TREE if no simplification can be made. */
1758 static bool
1759 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1760 tree dest, tree src)
1762 gimple *stmt = gsi_stmt (*gsi);
1763 location_t loc = gimple_location (stmt);
1764 tree fn;
1766 /* If SRC and DEST are the same (and not volatile), return DEST. */
1767 if (operand_equal_p (src, dest, 0))
1769 /* Issue -Wrestrict unless the pointers are null (those do
1770 not point to objects and so do not indicate an overlap;
1771 such calls could be the result of sanitization and jump
1772 threading). */
1773 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1775 tree func = gimple_call_fndecl (stmt);
1777 warning_at (loc, OPT_Wrestrict,
1778 "%qD source argument is the same as destination",
1779 func);
1782 replace_call_with_value (gsi, dest);
1783 return true;
1786 if (optimize_function_for_size_p (cfun))
1787 return false;
1789 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1790 if (!fn)
1791 return false;
1793 /* Set to non-null if ARG refers to an unterminated array. */
1794 tree nonstr = NULL;
1795 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
1797 if (nonstr)
1799 /* Avoid folding calls with unterminated arrays. */
1800 if (!gimple_no_warning_p (stmt))
1801 warn_string_no_nul (loc, "strcpy", src, nonstr);
1802 gimple_set_no_warning (stmt, true);
1803 return false;
1806 if (!len)
1807 return false;
1809 len = fold_convert_loc (loc, size_type_node, len);
1810 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1811 len = force_gimple_operand_gsi (gsi, len, true,
1812 NULL_TREE, true, GSI_SAME_STMT);
1813 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1814 replace_call_with_call_and_fold (gsi, repl);
1815 return true;
1818 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1819 If SLEN is not NULL, it represents the length of the source string.
1820 Return NULL_TREE if no simplification can be made. */
1822 static bool
1823 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1824 tree dest, tree src, tree len)
1826 gimple *stmt = gsi_stmt (*gsi);
1827 location_t loc = gimple_location (stmt);
1828 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1830 /* If the LEN parameter is zero, return DEST. */
1831 if (integer_zerop (len))
1833 /* Avoid warning if the destination refers to a an array/pointer
1834 decorate with attribute nonstring. */
1835 if (!nonstring)
1837 tree fndecl = gimple_call_fndecl (stmt);
1839 /* Warn about the lack of nul termination: the result is not
1840 a (nul-terminated) string. */
1841 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1842 if (slen && !integer_zerop (slen))
1843 warning_at (loc, OPT_Wstringop_truncation,
1844 "%G%qD destination unchanged after copying no bytes "
1845 "from a string of length %E",
1846 stmt, fndecl, slen);
1847 else
1848 warning_at (loc, OPT_Wstringop_truncation,
1849 "%G%qD destination unchanged after copying no bytes",
1850 stmt, fndecl);
1853 replace_call_with_value (gsi, dest);
1854 return true;
1857 /* We can't compare slen with len as constants below if len is not a
1858 constant. */
1859 if (TREE_CODE (len) != INTEGER_CST)
1860 return false;
1862 /* Now, we must be passed a constant src ptr parameter. */
1863 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1864 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1865 return false;
1867 /* The size of the source string including the terminating nul. */
1868 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1870 /* We do not support simplification of this case, though we do
1871 support it when expanding trees into RTL. */
1872 /* FIXME: generate a call to __builtin_memset. */
1873 if (tree_int_cst_lt (ssize, len))
1874 return false;
1876 /* Diagnose truncation that leaves the copy unterminated. */
1877 maybe_diag_stxncpy_trunc (*gsi, src, len);
1879 /* OK transform into builtin memcpy. */
1880 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1881 if (!fn)
1882 return false;
1884 len = fold_convert_loc (loc, size_type_node, len);
1885 len = force_gimple_operand_gsi (gsi, len, true,
1886 NULL_TREE, true, GSI_SAME_STMT);
1887 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1888 replace_call_with_call_and_fold (gsi, repl);
1890 return true;
1893 /* Fold function call to builtin strchr or strrchr.
1894 If both arguments are constant, evaluate and fold the result,
1895 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1896 In general strlen is significantly faster than strchr
1897 due to being a simpler operation. */
1898 static bool
1899 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1901 gimple *stmt = gsi_stmt (*gsi);
1902 tree str = gimple_call_arg (stmt, 0);
1903 tree c = gimple_call_arg (stmt, 1);
1904 location_t loc = gimple_location (stmt);
1905 const char *p;
1906 char ch;
1908 if (!gimple_call_lhs (stmt))
1909 return false;
1911 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1913 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1915 if (p1 == NULL)
1917 replace_call_with_value (gsi, integer_zero_node);
1918 return true;
1921 tree len = build_int_cst (size_type_node, p1 - p);
1922 gimple_seq stmts = NULL;
1923 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1924 POINTER_PLUS_EXPR, str, len);
1925 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1926 gsi_replace_with_seq_vops (gsi, stmts);
1927 return true;
1930 if (!integer_zerop (c))
1931 return false;
1933 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1934 if (is_strrchr && optimize_function_for_size_p (cfun))
1936 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1938 if (strchr_fn)
1940 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1941 replace_call_with_call_and_fold (gsi, repl);
1942 return true;
1945 return false;
1948 tree len;
1949 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1951 if (!strlen_fn)
1952 return false;
1954 /* Create newstr = strlen (str). */
1955 gimple_seq stmts = NULL;
1956 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1957 gimple_set_location (new_stmt, loc);
1958 len = create_tmp_reg_or_ssa_name (size_type_node);
1959 gimple_call_set_lhs (new_stmt, len);
1960 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1962 /* Create (str p+ strlen (str)). */
1963 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1964 POINTER_PLUS_EXPR, str, len);
1965 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1966 gsi_replace_with_seq_vops (gsi, stmts);
1967 /* gsi now points at the assignment to the lhs, get a
1968 stmt iterator to the strlen.
1969 ??? We can't use gsi_for_stmt as that doesn't work when the
1970 CFG isn't built yet. */
1971 gimple_stmt_iterator gsi2 = *gsi;
1972 gsi_prev (&gsi2);
1973 fold_stmt (&gsi2);
1974 return true;
1977 /* Fold function call to builtin strstr.
1978 If both arguments are constant, evaluate and fold the result,
1979 additionally fold strstr (x, "") into x and strstr (x, "c")
1980 into strchr (x, 'c'). */
1981 static bool
1982 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1984 gimple *stmt = gsi_stmt (*gsi);
1985 tree haystack = gimple_call_arg (stmt, 0);
1986 tree needle = gimple_call_arg (stmt, 1);
1987 const char *p, *q;
1989 if (!gimple_call_lhs (stmt))
1990 return false;
1992 q = c_getstr (needle);
1993 if (q == NULL)
1994 return false;
1996 if ((p = c_getstr (haystack)))
1998 const char *r = strstr (p, q);
2000 if (r == NULL)
2002 replace_call_with_value (gsi, integer_zero_node);
2003 return true;
2006 tree len = build_int_cst (size_type_node, r - p);
2007 gimple_seq stmts = NULL;
2008 gimple *new_stmt
2009 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2010 haystack, len);
2011 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2012 gsi_replace_with_seq_vops (gsi, stmts);
2013 return true;
2016 /* For strstr (x, "") return x. */
2017 if (q[0] == '\0')
2019 replace_call_with_value (gsi, haystack);
2020 return true;
2023 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2024 if (q[1] == '\0')
2026 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2027 if (strchr_fn)
2029 tree c = build_int_cst (integer_type_node, q[0]);
2030 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2031 replace_call_with_call_and_fold (gsi, repl);
2032 return true;
2036 return false;
2039 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2040 to the call.
2042 Return NULL_TREE if no simplification was possible, otherwise return the
2043 simplified form of the call as a tree.
2045 The simplified form may be a constant or other expression which
2046 computes the same value, but in a more efficient manner (including
2047 calls to other builtin functions).
2049 The call may contain arguments which need to be evaluated, but
2050 which are not useful to determine the result of the call. In
2051 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2052 COMPOUND_EXPR will be an argument which must be evaluated.
2053 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2054 COMPOUND_EXPR in the chain will contain the tree for the simplified
2055 form of the builtin function call. */
2057 static bool
2058 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2060 gimple *stmt = gsi_stmt (*gsi);
2061 location_t loc = gimple_location (stmt);
2063 const char *p = c_getstr (src);
2065 /* If the string length is zero, return the dst parameter. */
2066 if (p && *p == '\0')
2068 replace_call_with_value (gsi, dst);
2069 return true;
2072 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2073 return false;
2075 /* See if we can store by pieces into (dst + strlen(dst)). */
2076 tree newdst;
2077 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2078 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2080 if (!strlen_fn || !memcpy_fn)
2081 return false;
2083 /* If the length of the source string isn't computable don't
2084 split strcat into strlen and memcpy. */
2085 tree len = get_maxval_strlen (src, SRK_STRLEN);
2086 if (! len)
2087 return false;
2089 /* Create strlen (dst). */
2090 gimple_seq stmts = NULL, stmts2;
2091 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2092 gimple_set_location (repl, loc);
2093 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2094 gimple_call_set_lhs (repl, newdst);
2095 gimple_seq_add_stmt_without_update (&stmts, repl);
2097 /* Create (dst p+ strlen (dst)). */
2098 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2099 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2100 gimple_seq_add_seq_without_update (&stmts, stmts2);
2102 len = fold_convert_loc (loc, size_type_node, len);
2103 len = size_binop_loc (loc, PLUS_EXPR, len,
2104 build_int_cst (size_type_node, 1));
2105 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2106 gimple_seq_add_seq_without_update (&stmts, stmts2);
2108 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2109 gimple_seq_add_stmt_without_update (&stmts, repl);
2110 if (gimple_call_lhs (stmt))
2112 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2113 gimple_seq_add_stmt_without_update (&stmts, repl);
2114 gsi_replace_with_seq_vops (gsi, stmts);
2115 /* gsi now points at the assignment to the lhs, get a
2116 stmt iterator to the memcpy call.
2117 ??? We can't use gsi_for_stmt as that doesn't work when the
2118 CFG isn't built yet. */
2119 gimple_stmt_iterator gsi2 = *gsi;
2120 gsi_prev (&gsi2);
2121 fold_stmt (&gsi2);
2123 else
2125 gsi_replace_with_seq_vops (gsi, stmts);
2126 fold_stmt (gsi);
2128 return true;
2131 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2132 are the arguments to the call. */
2134 static bool
2135 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2137 gimple *stmt = gsi_stmt (*gsi);
2138 tree dest = gimple_call_arg (stmt, 0);
2139 tree src = gimple_call_arg (stmt, 1);
2140 tree size = gimple_call_arg (stmt, 2);
2141 tree fn;
2142 const char *p;
2145 p = c_getstr (src);
2146 /* If the SRC parameter is "", return DEST. */
2147 if (p && *p == '\0')
2149 replace_call_with_value (gsi, dest);
2150 return true;
2153 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2154 return false;
2156 /* If __builtin_strcat_chk is used, assume strcat is available. */
2157 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2158 if (!fn)
2159 return false;
2161 gimple *repl = gimple_build_call (fn, 2, dest, src);
2162 replace_call_with_call_and_fold (gsi, repl);
2163 return true;
2166 /* Simplify a call to the strncat builtin. */
2168 static bool
2169 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2171 gimple *stmt = gsi_stmt (*gsi);
2172 tree dst = gimple_call_arg (stmt, 0);
2173 tree src = gimple_call_arg (stmt, 1);
2174 tree len = gimple_call_arg (stmt, 2);
2176 const char *p = c_getstr (src);
2178 /* If the requested length is zero, or the src parameter string
2179 length is zero, return the dst parameter. */
2180 if (integer_zerop (len) || (p && *p == '\0'))
2182 replace_call_with_value (gsi, dst);
2183 return true;
2186 if (TREE_CODE (len) != INTEGER_CST || !p)
2187 return false;
2189 unsigned srclen = strlen (p);
2191 int cmpsrc = compare_tree_int (len, srclen);
2193 /* Return early if the requested len is less than the string length.
2194 Warnings will be issued elsewhere later. */
2195 if (cmpsrc < 0)
2196 return false;
2198 unsigned HOST_WIDE_INT dstsize;
2200 bool nowarn = gimple_no_warning_p (stmt);
2202 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2204 int cmpdst = compare_tree_int (len, dstsize);
2206 if (cmpdst >= 0)
2208 tree fndecl = gimple_call_fndecl (stmt);
2210 /* Strncat copies (at most) LEN bytes and always appends
2211 the terminating NUL so the specified bound should never
2212 be equal to (or greater than) the size of the destination.
2213 If it is, the copy could overflow. */
2214 location_t loc = gimple_location (stmt);
2215 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2216 cmpdst == 0
2217 ? G_("%G%qD specified bound %E equals "
2218 "destination size")
2219 : G_("%G%qD specified bound %E exceeds "
2220 "destination size %wu"),
2221 stmt, fndecl, len, dstsize);
2222 if (nowarn)
2223 gimple_set_no_warning (stmt, true);
2227 if (!nowarn && cmpsrc == 0)
2229 tree fndecl = gimple_call_fndecl (stmt);
2230 location_t loc = gimple_location (stmt);
2232 /* To avoid possible overflow the specified bound should also
2233 not be equal to the length of the source, even when the size
2234 of the destination is unknown (it's not an uncommon mistake
2235 to specify as the bound to strncpy the length of the source). */
2236 if (warning_at (loc, OPT_Wstringop_overflow_,
2237 "%G%qD specified bound %E equals source length",
2238 stmt, fndecl, len))
2239 gimple_set_no_warning (stmt, true);
2242 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2244 /* If the replacement _DECL isn't initialized, don't do the
2245 transformation. */
2246 if (!fn)
2247 return false;
2249 /* Otherwise, emit a call to strcat. */
2250 gcall *repl = gimple_build_call (fn, 2, dst, src);
2251 replace_call_with_call_and_fold (gsi, repl);
2252 return true;
2255 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2256 LEN, and SIZE. */
2258 static bool
2259 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2261 gimple *stmt = gsi_stmt (*gsi);
2262 tree dest = gimple_call_arg (stmt, 0);
2263 tree src = gimple_call_arg (stmt, 1);
2264 tree len = gimple_call_arg (stmt, 2);
2265 tree size = gimple_call_arg (stmt, 3);
2266 tree fn;
2267 const char *p;
2269 p = c_getstr (src);
2270 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2271 if ((p && *p == '\0')
2272 || integer_zerop (len))
2274 replace_call_with_value (gsi, dest);
2275 return true;
2278 if (! tree_fits_uhwi_p (size))
2279 return false;
2281 if (! integer_all_onesp (size))
2283 tree src_len = c_strlen (src, 1);
2284 if (src_len
2285 && tree_fits_uhwi_p (src_len)
2286 && tree_fits_uhwi_p (len)
2287 && ! tree_int_cst_lt (len, src_len))
2289 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2290 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2291 if (!fn)
2292 return false;
2294 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2295 replace_call_with_call_and_fold (gsi, repl);
2296 return true;
2298 return false;
2301 /* If __builtin_strncat_chk is used, assume strncat is available. */
2302 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2303 if (!fn)
2304 return false;
2306 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2307 replace_call_with_call_and_fold (gsi, repl);
2308 return true;
2311 /* Build and append gimple statements to STMTS that would load a first
2312 character of a memory location identified by STR. LOC is location
2313 of the statement. */
2315 static tree
2316 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2318 tree var;
2320 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2321 tree cst_uchar_ptr_node
2322 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2323 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2325 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2326 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2327 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2329 gimple_assign_set_lhs (stmt, var);
2330 gimple_seq_add_stmt_without_update (stmts, stmt);
2332 return var;
2335 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2336 FCODE is the name of the builtin. */
2338 static bool
2339 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2341 gimple *stmt = gsi_stmt (*gsi);
2342 tree callee = gimple_call_fndecl (stmt);
2343 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2345 tree type = integer_type_node;
2346 tree str1 = gimple_call_arg (stmt, 0);
2347 tree str2 = gimple_call_arg (stmt, 1);
2348 tree lhs = gimple_call_lhs (stmt);
2349 HOST_WIDE_INT length = -1;
2351 /* Handle strncmp and strncasecmp functions. */
2352 if (gimple_call_num_args (stmt) == 3)
2354 tree len = gimple_call_arg (stmt, 2);
2355 if (tree_fits_uhwi_p (len))
2356 length = tree_to_uhwi (len);
2359 /* If the LEN parameter is zero, return zero. */
2360 if (length == 0)
2362 replace_call_with_value (gsi, integer_zero_node);
2363 return true;
2366 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2367 if (operand_equal_p (str1, str2, 0))
2369 replace_call_with_value (gsi, integer_zero_node);
2370 return true;
2373 const char *p1 = c_getstr (str1);
2374 const char *p2 = c_getstr (str2);
2376 /* For known strings, return an immediate value. */
2377 if (p1 && p2)
2379 int r = 0;
2380 bool known_result = false;
2382 switch (fcode)
2384 case BUILT_IN_STRCMP:
2385 case BUILT_IN_STRCMP_EQ:
2387 r = strcmp (p1, p2);
2388 known_result = true;
2389 break;
2391 case BUILT_IN_STRNCMP:
2392 case BUILT_IN_STRNCMP_EQ:
2394 if (length == -1)
2395 break;
2396 r = strncmp (p1, p2, length);
2397 known_result = true;
2398 break;
2400 /* Only handleable situation is where the string are equal (result 0),
2401 which is already handled by operand_equal_p case. */
2402 case BUILT_IN_STRCASECMP:
2403 break;
2404 case BUILT_IN_STRNCASECMP:
2406 if (length == -1)
2407 break;
2408 r = strncmp (p1, p2, length);
2409 if (r == 0)
2410 known_result = true;
2411 break;
2413 default:
2414 gcc_unreachable ();
2417 if (known_result)
2419 replace_call_with_value (gsi, build_cmp_result (type, r));
2420 return true;
2424 bool nonzero_length = length >= 1
2425 || fcode == BUILT_IN_STRCMP
2426 || fcode == BUILT_IN_STRCMP_EQ
2427 || fcode == BUILT_IN_STRCASECMP;
2429 location_t loc = gimple_location (stmt);
2431 /* If the second arg is "", return *(const unsigned char*)arg1. */
2432 if (p2 && *p2 == '\0' && nonzero_length)
2434 gimple_seq stmts = NULL;
2435 tree var = gimple_load_first_char (loc, str1, &stmts);
2436 if (lhs)
2438 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2439 gimple_seq_add_stmt_without_update (&stmts, stmt);
2442 gsi_replace_with_seq_vops (gsi, stmts);
2443 return true;
2446 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2447 if (p1 && *p1 == '\0' && nonzero_length)
2449 gimple_seq stmts = NULL;
2450 tree var = gimple_load_first_char (loc, str2, &stmts);
2452 if (lhs)
2454 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2455 stmt = gimple_build_assign (c, NOP_EXPR, var);
2456 gimple_seq_add_stmt_without_update (&stmts, stmt);
2458 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2459 gimple_seq_add_stmt_without_update (&stmts, stmt);
2462 gsi_replace_with_seq_vops (gsi, stmts);
2463 return true;
2466 /* If len parameter is one, return an expression corresponding to
2467 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2468 if (fcode == BUILT_IN_STRNCMP && length == 1)
2470 gimple_seq stmts = NULL;
2471 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2472 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2474 if (lhs)
2476 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2477 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2478 gimple_seq_add_stmt_without_update (&stmts, convert1);
2480 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2481 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2482 gimple_seq_add_stmt_without_update (&stmts, convert2);
2484 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2485 gimple_seq_add_stmt_without_update (&stmts, stmt);
2488 gsi_replace_with_seq_vops (gsi, stmts);
2489 return true;
2492 /* If length is larger than the length of one constant string,
2493 replace strncmp with corresponding strcmp */
2494 if (fcode == BUILT_IN_STRNCMP
2495 && length > 0
2496 && ((p2 && (size_t) length > strlen (p2))
2497 || (p1 && (size_t) length > strlen (p1))))
2499 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2500 if (!fn)
2501 return false;
2502 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2503 replace_call_with_call_and_fold (gsi, repl);
2504 return true;
2507 return false;
2510 /* Fold a call to the memchr pointed by GSI iterator. */
2512 static bool
2513 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2515 gimple *stmt = gsi_stmt (*gsi);
2516 tree lhs = gimple_call_lhs (stmt);
2517 tree arg1 = gimple_call_arg (stmt, 0);
2518 tree arg2 = gimple_call_arg (stmt, 1);
2519 tree len = gimple_call_arg (stmt, 2);
2521 /* If the LEN parameter is zero, return zero. */
2522 if (integer_zerop (len))
2524 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2525 return true;
2528 char c;
2529 if (TREE_CODE (arg2) != INTEGER_CST
2530 || !tree_fits_uhwi_p (len)
2531 || !target_char_cst_p (arg2, &c))
2532 return false;
2534 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2535 unsigned HOST_WIDE_INT string_length;
2536 const char *p1 = c_getstr (arg1, &string_length);
2538 if (p1)
2540 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2541 if (r == NULL)
2543 if (length <= string_length)
2545 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2546 return true;
2549 else
2551 unsigned HOST_WIDE_INT offset = r - p1;
2552 gimple_seq stmts = NULL;
2553 if (lhs != NULL_TREE)
2555 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2556 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2557 arg1, offset_cst);
2558 gimple_seq_add_stmt_without_update (&stmts, stmt);
2560 else
2561 gimple_seq_add_stmt_without_update (&stmts,
2562 gimple_build_nop ());
2564 gsi_replace_with_seq_vops (gsi, stmts);
2565 return true;
2569 return false;
2572 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2573 to the call. IGNORE is true if the value returned
2574 by the builtin will be ignored. UNLOCKED is true is true if this
2575 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2576 the known length of the string. Return NULL_TREE if no simplification
2577 was possible. */
2579 static bool
2580 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2581 tree arg0, tree arg1,
2582 bool unlocked)
2584 gimple *stmt = gsi_stmt (*gsi);
2586 /* If we're using an unlocked function, assume the other unlocked
2587 functions exist explicitly. */
2588 tree const fn_fputc = (unlocked
2589 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2590 : builtin_decl_implicit (BUILT_IN_FPUTC));
2591 tree const fn_fwrite = (unlocked
2592 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2593 : builtin_decl_implicit (BUILT_IN_FWRITE));
2595 /* If the return value is used, don't do the transformation. */
2596 if (gimple_call_lhs (stmt))
2597 return false;
2599 /* Get the length of the string passed to fputs. If the length
2600 can't be determined, punt. */
2601 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2602 if (!len
2603 || TREE_CODE (len) != INTEGER_CST)
2604 return false;
2606 switch (compare_tree_int (len, 1))
2608 case -1: /* length is 0, delete the call entirely . */
2609 replace_call_with_value (gsi, integer_zero_node);
2610 return true;
2612 case 0: /* length is 1, call fputc. */
2614 const char *p = c_getstr (arg0);
2615 if (p != NULL)
2617 if (!fn_fputc)
2618 return false;
2620 gimple *repl = gimple_build_call (fn_fputc, 2,
2621 build_int_cst
2622 (integer_type_node, p[0]), arg1);
2623 replace_call_with_call_and_fold (gsi, repl);
2624 return true;
2627 /* FALLTHROUGH */
2628 case 1: /* length is greater than 1, call fwrite. */
2630 /* If optimizing for size keep fputs. */
2631 if (optimize_function_for_size_p (cfun))
2632 return false;
2633 /* New argument list transforming fputs(string, stream) to
2634 fwrite(string, 1, len, stream). */
2635 if (!fn_fwrite)
2636 return false;
2638 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2639 size_one_node, len, arg1);
2640 replace_call_with_call_and_fold (gsi, repl);
2641 return true;
2643 default:
2644 gcc_unreachable ();
2646 return false;
2649 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2650 DEST, SRC, LEN, and SIZE are the arguments to the call.
2651 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2652 code of the builtin. If MAXLEN is not NULL, it is maximum length
2653 passed as third argument. */
2655 static bool
2656 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2657 tree dest, tree src, tree len, tree size,
2658 enum built_in_function fcode)
2660 gimple *stmt = gsi_stmt (*gsi);
2661 location_t loc = gimple_location (stmt);
2662 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2663 tree fn;
2665 /* If SRC and DEST are the same (and not volatile), return DEST
2666 (resp. DEST+LEN for __mempcpy_chk). */
2667 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2669 if (fcode != BUILT_IN_MEMPCPY_CHK)
2671 replace_call_with_value (gsi, dest);
2672 return true;
2674 else
2676 gimple_seq stmts = NULL;
2677 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2678 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2679 TREE_TYPE (dest), dest, len);
2680 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2681 replace_call_with_value (gsi, temp);
2682 return true;
2686 if (! tree_fits_uhwi_p (size))
2687 return false;
2689 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2690 if (! integer_all_onesp (size))
2692 if (! tree_fits_uhwi_p (len))
2694 /* If LEN is not constant, try MAXLEN too.
2695 For MAXLEN only allow optimizing into non-_ocs function
2696 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2697 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2699 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2701 /* (void) __mempcpy_chk () can be optimized into
2702 (void) __memcpy_chk (). */
2703 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2704 if (!fn)
2705 return false;
2707 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2708 replace_call_with_call_and_fold (gsi, repl);
2709 return true;
2711 return false;
2714 else
2715 maxlen = len;
2717 if (tree_int_cst_lt (size, maxlen))
2718 return false;
2721 fn = NULL_TREE;
2722 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2723 mem{cpy,pcpy,move,set} is available. */
2724 switch (fcode)
2726 case BUILT_IN_MEMCPY_CHK:
2727 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2728 break;
2729 case BUILT_IN_MEMPCPY_CHK:
2730 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2731 break;
2732 case BUILT_IN_MEMMOVE_CHK:
2733 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2734 break;
2735 case BUILT_IN_MEMSET_CHK:
2736 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2737 break;
2738 default:
2739 break;
2742 if (!fn)
2743 return false;
2745 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2746 replace_call_with_call_and_fold (gsi, repl);
2747 return true;
2750 /* Fold a call to the __st[rp]cpy_chk builtin.
2751 DEST, SRC, and SIZE are the arguments to the call.
2752 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2753 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2754 strings passed as second argument. */
2756 static bool
2757 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2758 tree dest,
2759 tree src, tree size,
2760 enum built_in_function fcode)
2762 gimple *stmt = gsi_stmt (*gsi);
2763 location_t loc = gimple_location (stmt);
2764 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2765 tree len, fn;
2767 /* If SRC and DEST are the same (and not volatile), return DEST. */
2768 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2770 /* Issue -Wrestrict unless the pointers are null (those do
2771 not point to objects and so do not indicate an overlap;
2772 such calls could be the result of sanitization and jump
2773 threading). */
2774 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2776 tree func = gimple_call_fndecl (stmt);
2778 warning_at (loc, OPT_Wrestrict,
2779 "%qD source argument is the same as destination",
2780 func);
2783 replace_call_with_value (gsi, dest);
2784 return true;
2787 if (! tree_fits_uhwi_p (size))
2788 return false;
2790 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
2791 if (! integer_all_onesp (size))
2793 len = c_strlen (src, 1);
2794 if (! len || ! tree_fits_uhwi_p (len))
2796 /* If LEN is not constant, try MAXLEN too.
2797 For MAXLEN only allow optimizing into non-_ocs function
2798 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2799 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2801 if (fcode == BUILT_IN_STPCPY_CHK)
2803 if (! ignore)
2804 return false;
2806 /* If return value of __stpcpy_chk is ignored,
2807 optimize into __strcpy_chk. */
2808 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2809 if (!fn)
2810 return false;
2812 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2813 replace_call_with_call_and_fold (gsi, repl);
2814 return true;
2817 if (! len || TREE_SIDE_EFFECTS (len))
2818 return false;
2820 /* If c_strlen returned something, but not a constant,
2821 transform __strcpy_chk into __memcpy_chk. */
2822 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2823 if (!fn)
2824 return false;
2826 gimple_seq stmts = NULL;
2827 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
2828 len = gimple_convert (&stmts, loc, size_type_node, len);
2829 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2830 build_int_cst (size_type_node, 1));
2831 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2832 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2833 replace_call_with_call_and_fold (gsi, repl);
2834 return true;
2837 else
2838 maxlen = len;
2840 if (! tree_int_cst_lt (maxlen, size))
2841 return false;
2844 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2845 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2846 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2847 if (!fn)
2848 return false;
2850 gimple *repl = gimple_build_call (fn, 2, dest, src);
2851 replace_call_with_call_and_fold (gsi, repl);
2852 return true;
2855 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2856 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2857 length passed as third argument. IGNORE is true if return value can be
2858 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2860 static bool
2861 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2862 tree dest, tree src,
2863 tree len, tree size,
2864 enum built_in_function fcode)
2866 gimple *stmt = gsi_stmt (*gsi);
2867 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2868 tree fn;
2870 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2872 /* If return value of __stpncpy_chk is ignored,
2873 optimize into __strncpy_chk. */
2874 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2875 if (fn)
2877 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2878 replace_call_with_call_and_fold (gsi, repl);
2879 return true;
2883 if (! tree_fits_uhwi_p (size))
2884 return false;
2886 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2887 if (! integer_all_onesp (size))
2889 if (! tree_fits_uhwi_p (len))
2891 /* If LEN is not constant, try MAXLEN too.
2892 For MAXLEN only allow optimizing into non-_ocs function
2893 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2894 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2895 return false;
2897 else
2898 maxlen = len;
2900 if (tree_int_cst_lt (size, maxlen))
2901 return false;
2904 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2905 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2906 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2907 if (!fn)
2908 return false;
2910 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2911 replace_call_with_call_and_fold (gsi, repl);
2912 return true;
2915 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2916 Return NULL_TREE if no simplification can be made. */
2918 static bool
2919 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2921 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2922 location_t loc = gimple_location (stmt);
2923 tree dest = gimple_call_arg (stmt, 0);
2924 tree src = gimple_call_arg (stmt, 1);
2925 tree fn, lenp1;
2927 /* If the result is unused, replace stpcpy with strcpy. */
2928 if (gimple_call_lhs (stmt) == NULL_TREE)
2930 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2931 if (!fn)
2932 return false;
2933 gimple_call_set_fndecl (stmt, fn);
2934 fold_stmt (gsi);
2935 return true;
2938 /* Set to non-null if ARG refers to an unterminated array. */
2939 c_strlen_data data = { };
2940 tree len = c_strlen (src, 1, &data, 1);
2941 if (!len
2942 || TREE_CODE (len) != INTEGER_CST)
2944 data.decl = unterminated_array (src);
2945 if (!data.decl)
2946 return false;
2949 if (data.decl)
2951 /* Avoid folding calls with unterminated arrays. */
2952 if (!gimple_no_warning_p (stmt))
2953 warn_string_no_nul (loc, "stpcpy", src, data.decl);
2954 gimple_set_no_warning (stmt, true);
2955 return false;
2958 if (optimize_function_for_size_p (cfun)
2959 /* If length is zero it's small enough. */
2960 && !integer_zerop (len))
2961 return false;
2963 /* If the source has a known length replace stpcpy with memcpy. */
2964 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2965 if (!fn)
2966 return false;
2968 gimple_seq stmts = NULL;
2969 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2970 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2971 tem, build_int_cst (size_type_node, 1));
2972 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2973 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2974 gimple_set_vuse (repl, gimple_vuse (stmt));
2975 gimple_set_vdef (repl, gimple_vdef (stmt));
2976 if (gimple_vdef (repl)
2977 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2978 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2979 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2980 /* Replace the result with dest + len. */
2981 stmts = NULL;
2982 tem = gimple_convert (&stmts, loc, sizetype, len);
2983 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2984 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2985 POINTER_PLUS_EXPR, dest, tem);
2986 gsi_replace (gsi, ret, false);
2987 /* Finally fold the memcpy call. */
2988 gimple_stmt_iterator gsi2 = *gsi;
2989 gsi_prev (&gsi2);
2990 fold_stmt (&gsi2);
2991 return true;
2994 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2995 NULL_TREE if a normal call should be emitted rather than expanding
2996 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2997 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2998 passed as second argument. */
3000 static bool
3001 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3002 enum built_in_function fcode)
3004 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3005 tree dest, size, len, fn, fmt, flag;
3006 const char *fmt_str;
3008 /* Verify the required arguments in the original call. */
3009 if (gimple_call_num_args (stmt) < 5)
3010 return false;
3012 dest = gimple_call_arg (stmt, 0);
3013 len = gimple_call_arg (stmt, 1);
3014 flag = gimple_call_arg (stmt, 2);
3015 size = gimple_call_arg (stmt, 3);
3016 fmt = gimple_call_arg (stmt, 4);
3018 if (! tree_fits_uhwi_p (size))
3019 return false;
3021 if (! integer_all_onesp (size))
3023 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3024 if (! tree_fits_uhwi_p (len))
3026 /* If LEN is not constant, try MAXLEN too.
3027 For MAXLEN only allow optimizing into non-_ocs function
3028 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3029 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3030 return false;
3032 else
3033 maxlen = len;
3035 if (tree_int_cst_lt (size, maxlen))
3036 return false;
3039 if (!init_target_chars ())
3040 return false;
3042 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3043 or if format doesn't contain % chars or is "%s". */
3044 if (! integer_zerop (flag))
3046 fmt_str = c_getstr (fmt);
3047 if (fmt_str == NULL)
3048 return false;
3049 if (strchr (fmt_str, target_percent) != NULL
3050 && strcmp (fmt_str, target_percent_s))
3051 return false;
3054 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3055 available. */
3056 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3057 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3058 if (!fn)
3059 return false;
3061 /* Replace the called function and the first 5 argument by 3 retaining
3062 trailing varargs. */
3063 gimple_call_set_fndecl (stmt, fn);
3064 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3065 gimple_call_set_arg (stmt, 0, dest);
3066 gimple_call_set_arg (stmt, 1, len);
3067 gimple_call_set_arg (stmt, 2, fmt);
3068 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3069 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3070 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3071 fold_stmt (gsi);
3072 return true;
3075 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3076 Return NULL_TREE if a normal call should be emitted rather than
3077 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3078 or BUILT_IN_VSPRINTF_CHK. */
3080 static bool
3081 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3082 enum built_in_function fcode)
3084 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3085 tree dest, size, len, fn, fmt, flag;
3086 const char *fmt_str;
3087 unsigned nargs = gimple_call_num_args (stmt);
3089 /* Verify the required arguments in the original call. */
3090 if (nargs < 4)
3091 return false;
3092 dest = gimple_call_arg (stmt, 0);
3093 flag = gimple_call_arg (stmt, 1);
3094 size = gimple_call_arg (stmt, 2);
3095 fmt = gimple_call_arg (stmt, 3);
3097 if (! tree_fits_uhwi_p (size))
3098 return false;
3100 len = NULL_TREE;
3102 if (!init_target_chars ())
3103 return false;
3105 /* Check whether the format is a literal string constant. */
3106 fmt_str = c_getstr (fmt);
3107 if (fmt_str != NULL)
3109 /* If the format doesn't contain % args or %%, we know the size. */
3110 if (strchr (fmt_str, target_percent) == 0)
3112 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3113 len = build_int_cstu (size_type_node, strlen (fmt_str));
3115 /* If the format is "%s" and first ... argument is a string literal,
3116 we know the size too. */
3117 else if (fcode == BUILT_IN_SPRINTF_CHK
3118 && strcmp (fmt_str, target_percent_s) == 0)
3120 tree arg;
3122 if (nargs == 5)
3124 arg = gimple_call_arg (stmt, 4);
3125 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3127 len = c_strlen (arg, 1);
3128 if (! len || ! tree_fits_uhwi_p (len))
3129 len = NULL_TREE;
3135 if (! integer_all_onesp (size))
3137 if (! len || ! tree_int_cst_lt (len, size))
3138 return false;
3141 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3142 or if format doesn't contain % chars or is "%s". */
3143 if (! integer_zerop (flag))
3145 if (fmt_str == NULL)
3146 return false;
3147 if (strchr (fmt_str, target_percent) != NULL
3148 && strcmp (fmt_str, target_percent_s))
3149 return false;
3152 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3153 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3154 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3155 if (!fn)
3156 return false;
3158 /* Replace the called function and the first 4 argument by 2 retaining
3159 trailing varargs. */
3160 gimple_call_set_fndecl (stmt, fn);
3161 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3162 gimple_call_set_arg (stmt, 0, dest);
3163 gimple_call_set_arg (stmt, 1, fmt);
3164 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3165 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3166 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3167 fold_stmt (gsi);
3168 return true;
3171 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3172 ORIG may be null if this is a 2-argument call. We don't attempt to
3173 simplify calls with more than 3 arguments.
3175 Return true if simplification was possible, otherwise false. */
3177 bool
3178 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3180 gimple *stmt = gsi_stmt (*gsi);
3181 tree dest = gimple_call_arg (stmt, 0);
3182 tree fmt = gimple_call_arg (stmt, 1);
3183 tree orig = NULL_TREE;
3184 const char *fmt_str = NULL;
3186 /* Verify the required arguments in the original call. We deal with two
3187 types of sprintf() calls: 'sprintf (str, fmt)' and
3188 'sprintf (dest, "%s", orig)'. */
3189 if (gimple_call_num_args (stmt) > 3)
3190 return false;
3192 if (gimple_call_num_args (stmt) == 3)
3193 orig = gimple_call_arg (stmt, 2);
3195 /* Check whether the format is a literal string constant. */
3196 fmt_str = c_getstr (fmt);
3197 if (fmt_str == NULL)
3198 return false;
3200 if (!init_target_chars ())
3201 return false;
3203 /* If the format doesn't contain % args or %%, use strcpy. */
3204 if (strchr (fmt_str, target_percent) == NULL)
3206 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3208 if (!fn)
3209 return false;
3211 /* Don't optimize sprintf (buf, "abc", ptr++). */
3212 if (orig)
3213 return false;
3215 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3216 'format' is known to contain no % formats. */
3217 gimple_seq stmts = NULL;
3218 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3220 /* Propagate the NO_WARNING bit to avoid issuing the same
3221 warning more than once. */
3222 if (gimple_no_warning_p (stmt))
3223 gimple_set_no_warning (repl, true);
3225 gimple_seq_add_stmt_without_update (&stmts, repl);
3226 if (gimple_call_lhs (stmt))
3228 repl = gimple_build_assign (gimple_call_lhs (stmt),
3229 build_int_cst (integer_type_node,
3230 strlen (fmt_str)));
3231 gimple_seq_add_stmt_without_update (&stmts, repl);
3232 gsi_replace_with_seq_vops (gsi, stmts);
3233 /* gsi now points at the assignment to the lhs, get a
3234 stmt iterator to the memcpy call.
3235 ??? We can't use gsi_for_stmt as that doesn't work when the
3236 CFG isn't built yet. */
3237 gimple_stmt_iterator gsi2 = *gsi;
3238 gsi_prev (&gsi2);
3239 fold_stmt (&gsi2);
3241 else
3243 gsi_replace_with_seq_vops (gsi, stmts);
3244 fold_stmt (gsi);
3246 return true;
3249 /* If the format is "%s", use strcpy if the result isn't used. */
3250 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3252 tree fn;
3253 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3255 if (!fn)
3256 return false;
3258 /* Don't crash on sprintf (str1, "%s"). */
3259 if (!orig)
3260 return false;
3262 tree orig_len = NULL_TREE;
3263 if (gimple_call_lhs (stmt))
3265 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3266 if (!orig_len)
3267 return false;
3270 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3271 gimple_seq stmts = NULL;
3272 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3274 /* Propagate the NO_WARNING bit to avoid issuing the same
3275 warning more than once. */
3276 if (gimple_no_warning_p (stmt))
3277 gimple_set_no_warning (repl, true);
3279 gimple_seq_add_stmt_without_update (&stmts, repl);
3280 if (gimple_call_lhs (stmt))
3282 if (!useless_type_conversion_p (integer_type_node,
3283 TREE_TYPE (orig_len)))
3284 orig_len = fold_convert (integer_type_node, orig_len);
3285 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3286 gimple_seq_add_stmt_without_update (&stmts, repl);
3287 gsi_replace_with_seq_vops (gsi, stmts);
3288 /* gsi now points at the assignment to the lhs, get a
3289 stmt iterator to the memcpy call.
3290 ??? We can't use gsi_for_stmt as that doesn't work when the
3291 CFG isn't built yet. */
3292 gimple_stmt_iterator gsi2 = *gsi;
3293 gsi_prev (&gsi2);
3294 fold_stmt (&gsi2);
3296 else
3298 gsi_replace_with_seq_vops (gsi, stmts);
3299 fold_stmt (gsi);
3301 return true;
3303 return false;
3306 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3307 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3308 attempt to simplify calls with more than 4 arguments.
3310 Return true if simplification was possible, otherwise false. */
3312 bool
3313 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3315 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3316 tree dest = gimple_call_arg (stmt, 0);
3317 tree destsize = gimple_call_arg (stmt, 1);
3318 tree fmt = gimple_call_arg (stmt, 2);
3319 tree orig = NULL_TREE;
3320 const char *fmt_str = NULL;
3322 if (gimple_call_num_args (stmt) > 4)
3323 return false;
3325 if (gimple_call_num_args (stmt) == 4)
3326 orig = gimple_call_arg (stmt, 3);
3328 if (!tree_fits_uhwi_p (destsize))
3329 return false;
3330 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3332 /* Check whether the format is a literal string constant. */
3333 fmt_str = c_getstr (fmt);
3334 if (fmt_str == NULL)
3335 return false;
3337 if (!init_target_chars ())
3338 return false;
3340 /* If the format doesn't contain % args or %%, use strcpy. */
3341 if (strchr (fmt_str, target_percent) == NULL)
3343 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3344 if (!fn)
3345 return false;
3347 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3348 if (orig)
3349 return false;
3351 /* We could expand this as
3352 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3353 or to
3354 memcpy (str, fmt_with_nul_at_cstm1, cst);
3355 but in the former case that might increase code size
3356 and in the latter case grow .rodata section too much.
3357 So punt for now. */
3358 size_t len = strlen (fmt_str);
3359 if (len >= destlen)
3360 return false;
3362 gimple_seq stmts = NULL;
3363 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3364 gimple_seq_add_stmt_without_update (&stmts, repl);
3365 if (gimple_call_lhs (stmt))
3367 repl = gimple_build_assign (gimple_call_lhs (stmt),
3368 build_int_cst (integer_type_node, len));
3369 gimple_seq_add_stmt_without_update (&stmts, repl);
3370 gsi_replace_with_seq_vops (gsi, stmts);
3371 /* gsi now points at the assignment to the lhs, get a
3372 stmt iterator to the memcpy call.
3373 ??? We can't use gsi_for_stmt as that doesn't work when the
3374 CFG isn't built yet. */
3375 gimple_stmt_iterator gsi2 = *gsi;
3376 gsi_prev (&gsi2);
3377 fold_stmt (&gsi2);
3379 else
3381 gsi_replace_with_seq_vops (gsi, stmts);
3382 fold_stmt (gsi);
3384 return true;
3387 /* If the format is "%s", use strcpy if the result isn't used. */
3388 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3390 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3391 if (!fn)
3392 return false;
3394 /* Don't crash on snprintf (str1, cst, "%s"). */
3395 if (!orig)
3396 return false;
3398 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3399 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3400 return false;
3402 /* We could expand this as
3403 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3404 or to
3405 memcpy (str1, str2_with_nul_at_cstm1, cst);
3406 but in the former case that might increase code size
3407 and in the latter case grow .rodata section too much.
3408 So punt for now. */
3409 if (compare_tree_int (orig_len, destlen) >= 0)
3410 return false;
3412 /* Convert snprintf (str1, cst, "%s", str2) into
3413 strcpy (str1, str2) if strlen (str2) < cst. */
3414 gimple_seq stmts = NULL;
3415 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3416 gimple_seq_add_stmt_without_update (&stmts, repl);
3417 if (gimple_call_lhs (stmt))
3419 if (!useless_type_conversion_p (integer_type_node,
3420 TREE_TYPE (orig_len)))
3421 orig_len = fold_convert (integer_type_node, orig_len);
3422 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3423 gimple_seq_add_stmt_without_update (&stmts, repl);
3424 gsi_replace_with_seq_vops (gsi, stmts);
3425 /* gsi now points at the assignment to the lhs, get a
3426 stmt iterator to the memcpy call.
3427 ??? We can't use gsi_for_stmt as that doesn't work when the
3428 CFG isn't built yet. */
3429 gimple_stmt_iterator gsi2 = *gsi;
3430 gsi_prev (&gsi2);
3431 fold_stmt (&gsi2);
3433 else
3435 gsi_replace_with_seq_vops (gsi, stmts);
3436 fold_stmt (gsi);
3438 return true;
3440 return false;
3443 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3444 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3445 more than 3 arguments, and ARG may be null in the 2-argument case.
3447 Return NULL_TREE if no simplification was possible, otherwise return the
3448 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3449 code of the function to be simplified. */
3451 static bool
3452 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3453 tree fp, tree fmt, tree arg,
3454 enum built_in_function fcode)
3456 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3457 tree fn_fputc, fn_fputs;
3458 const char *fmt_str = NULL;
3460 /* If the return value is used, don't do the transformation. */
3461 if (gimple_call_lhs (stmt) != NULL_TREE)
3462 return false;
3464 /* Check whether the format is a literal string constant. */
3465 fmt_str = c_getstr (fmt);
3466 if (fmt_str == NULL)
3467 return false;
3469 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3471 /* If we're using an unlocked function, assume the other
3472 unlocked functions exist explicitly. */
3473 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3474 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3476 else
3478 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3479 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3482 if (!init_target_chars ())
3483 return false;
3485 /* If the format doesn't contain % args or %%, use strcpy. */
3486 if (strchr (fmt_str, target_percent) == NULL)
3488 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3489 && arg)
3490 return false;
3492 /* If the format specifier was "", fprintf does nothing. */
3493 if (fmt_str[0] == '\0')
3495 replace_call_with_value (gsi, NULL_TREE);
3496 return true;
3499 /* When "string" doesn't contain %, replace all cases of
3500 fprintf (fp, string) with fputs (string, fp). The fputs
3501 builtin will take care of special cases like length == 1. */
3502 if (fn_fputs)
3504 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3505 replace_call_with_call_and_fold (gsi, repl);
3506 return true;
3510 /* The other optimizations can be done only on the non-va_list variants. */
3511 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3512 return false;
3514 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3515 else if (strcmp (fmt_str, target_percent_s) == 0)
3517 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3518 return false;
3519 if (fn_fputs)
3521 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3522 replace_call_with_call_and_fold (gsi, repl);
3523 return true;
3527 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3528 else if (strcmp (fmt_str, target_percent_c) == 0)
3530 if (!arg
3531 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3532 return false;
3533 if (fn_fputc)
3535 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3536 replace_call_with_call_and_fold (gsi, repl);
3537 return true;
3541 return false;
3544 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3545 FMT and ARG are the arguments to the call; we don't fold cases with
3546 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3548 Return NULL_TREE if no simplification was possible, otherwise return the
3549 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3550 code of the function to be simplified. */
3552 static bool
3553 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3554 tree arg, enum built_in_function fcode)
3556 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3557 tree fn_putchar, fn_puts, newarg;
3558 const char *fmt_str = NULL;
3560 /* If the return value is used, don't do the transformation. */
3561 if (gimple_call_lhs (stmt) != NULL_TREE)
3562 return false;
3564 /* Check whether the format is a literal string constant. */
3565 fmt_str = c_getstr (fmt);
3566 if (fmt_str == NULL)
3567 return false;
3569 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3571 /* If we're using an unlocked function, assume the other
3572 unlocked functions exist explicitly. */
3573 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3574 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3576 else
3578 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3579 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3582 if (!init_target_chars ())
3583 return false;
3585 if (strcmp (fmt_str, target_percent_s) == 0
3586 || strchr (fmt_str, target_percent) == NULL)
3588 const char *str;
3590 if (strcmp (fmt_str, target_percent_s) == 0)
3592 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3593 return false;
3595 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3596 return false;
3598 str = c_getstr (arg);
3599 if (str == NULL)
3600 return false;
3602 else
3604 /* The format specifier doesn't contain any '%' characters. */
3605 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3606 && arg)
3607 return false;
3608 str = fmt_str;
3611 /* If the string was "", printf does nothing. */
3612 if (str[0] == '\0')
3614 replace_call_with_value (gsi, NULL_TREE);
3615 return true;
3618 /* If the string has length of 1, call putchar. */
3619 if (str[1] == '\0')
3621 /* Given printf("c"), (where c is any one character,)
3622 convert "c"[0] to an int and pass that to the replacement
3623 function. */
3624 newarg = build_int_cst (integer_type_node, str[0]);
3625 if (fn_putchar)
3627 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3628 replace_call_with_call_and_fold (gsi, repl);
3629 return true;
3632 else
3634 /* If the string was "string\n", call puts("string"). */
3635 size_t len = strlen (str);
3636 if ((unsigned char)str[len - 1] == target_newline
3637 && (size_t) (int) len == len
3638 && (int) len > 0)
3640 char *newstr;
3642 /* Create a NUL-terminated string that's one char shorter
3643 than the original, stripping off the trailing '\n'. */
3644 newstr = xstrdup (str);
3645 newstr[len - 1] = '\0';
3646 newarg = build_string_literal (len, newstr);
3647 free (newstr);
3648 if (fn_puts)
3650 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3651 replace_call_with_call_and_fold (gsi, repl);
3652 return true;
3655 else
3656 /* We'd like to arrange to call fputs(string,stdout) here,
3657 but we need stdout and don't have a way to get it yet. */
3658 return false;
3662 /* The other optimizations can be done only on the non-va_list variants. */
3663 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3664 return false;
3666 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3667 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3669 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3670 return false;
3671 if (fn_puts)
3673 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3674 replace_call_with_call_and_fold (gsi, repl);
3675 return true;
3679 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3680 else if (strcmp (fmt_str, target_percent_c) == 0)
3682 if (!arg || ! useless_type_conversion_p (integer_type_node,
3683 TREE_TYPE (arg)))
3684 return false;
3685 if (fn_putchar)
3687 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3688 replace_call_with_call_and_fold (gsi, repl);
3689 return true;
3693 return false;
3698 /* Fold a call to __builtin_strlen with known length LEN. */
3700 static bool
3701 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3703 gimple *stmt = gsi_stmt (*gsi);
3704 tree arg = gimple_call_arg (stmt, 0);
3706 wide_int minlen;
3707 wide_int maxlen;
3709 c_strlen_data lendata = { };
3710 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
3711 && !lendata.decl
3712 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3713 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
3715 /* The range of lengths refers to either a single constant
3716 string or to the longest and shortest constant string
3717 referenced by the argument of the strlen() call, or to
3718 the strings that can possibly be stored in the arrays
3719 the argument refers to. */
3720 minlen = wi::to_wide (lendata.minlen);
3721 maxlen = wi::to_wide (lendata.maxlen);
3723 else
3725 unsigned prec = TYPE_PRECISION (sizetype);
3727 minlen = wi::shwi (0, prec);
3728 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3731 if (minlen == maxlen)
3733 /* Fold the strlen call to a constant. */
3734 tree type = TREE_TYPE (lendata.minlen);
3735 tree len = force_gimple_operand_gsi (gsi,
3736 wide_int_to_tree (type, minlen),
3737 true, NULL, true, GSI_SAME_STMT);
3738 replace_call_with_value (gsi, len);
3739 return true;
3742 if (tree lhs = gimple_call_lhs (stmt))
3743 if (TREE_CODE (lhs) == SSA_NAME
3744 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
3745 set_range_info (lhs, VR_RANGE, minlen, maxlen);
3747 return false;
3750 /* Fold a call to __builtin_acc_on_device. */
3752 static bool
3753 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3755 /* Defer folding until we know which compiler we're in. */
3756 if (symtab->state != EXPANSION)
3757 return false;
3759 unsigned val_host = GOMP_DEVICE_HOST;
3760 unsigned val_dev = GOMP_DEVICE_NONE;
3762 #ifdef ACCEL_COMPILER
3763 val_host = GOMP_DEVICE_NOT_HOST;
3764 val_dev = ACCEL_COMPILER_acc_device;
3765 #endif
3767 location_t loc = gimple_location (gsi_stmt (*gsi));
3769 tree host_eq = make_ssa_name (boolean_type_node);
3770 gimple *host_ass = gimple_build_assign
3771 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3772 gimple_set_location (host_ass, loc);
3773 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3775 tree dev_eq = make_ssa_name (boolean_type_node);
3776 gimple *dev_ass = gimple_build_assign
3777 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3778 gimple_set_location (dev_ass, loc);
3779 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3781 tree result = make_ssa_name (boolean_type_node);
3782 gimple *result_ass = gimple_build_assign
3783 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3784 gimple_set_location (result_ass, loc);
3785 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3787 replace_call_with_value (gsi, result);
3789 return true;
3792 /* Fold realloc (0, n) -> malloc (n). */
3794 static bool
3795 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3797 gimple *stmt = gsi_stmt (*gsi);
3798 tree arg = gimple_call_arg (stmt, 0);
3799 tree size = gimple_call_arg (stmt, 1);
3801 if (operand_equal_p (arg, null_pointer_node, 0))
3803 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3804 if (fn_malloc)
3806 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3807 replace_call_with_call_and_fold (gsi, repl);
3808 return true;
3811 return false;
3814 /* Fold the non-target builtin at *GSI and return whether any simplification
3815 was made. */
3817 static bool
3818 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3820 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3821 tree callee = gimple_call_fndecl (stmt);
3823 /* Give up for always_inline inline builtins until they are
3824 inlined. */
3825 if (avoid_folding_inline_builtin (callee))
3826 return false;
3828 unsigned n = gimple_call_num_args (stmt);
3829 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3830 switch (fcode)
3832 case BUILT_IN_BCMP:
3833 return gimple_fold_builtin_bcmp (gsi);
3834 case BUILT_IN_BCOPY:
3835 return gimple_fold_builtin_bcopy (gsi);
3836 case BUILT_IN_BZERO:
3837 return gimple_fold_builtin_bzero (gsi);
3839 case BUILT_IN_MEMSET:
3840 return gimple_fold_builtin_memset (gsi,
3841 gimple_call_arg (stmt, 1),
3842 gimple_call_arg (stmt, 2));
3843 case BUILT_IN_MEMCPY:
3844 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3845 gimple_call_arg (stmt, 1), 0);
3846 case BUILT_IN_MEMPCPY:
3847 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3848 gimple_call_arg (stmt, 1), 1);
3849 case BUILT_IN_MEMMOVE:
3850 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3851 gimple_call_arg (stmt, 1), 3);
3852 case BUILT_IN_SPRINTF_CHK:
3853 case BUILT_IN_VSPRINTF_CHK:
3854 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3855 case BUILT_IN_STRCAT_CHK:
3856 return gimple_fold_builtin_strcat_chk (gsi);
3857 case BUILT_IN_STRNCAT_CHK:
3858 return gimple_fold_builtin_strncat_chk (gsi);
3859 case BUILT_IN_STRLEN:
3860 return gimple_fold_builtin_strlen (gsi);
3861 case BUILT_IN_STRCPY:
3862 return gimple_fold_builtin_strcpy (gsi,
3863 gimple_call_arg (stmt, 0),
3864 gimple_call_arg (stmt, 1));
3865 case BUILT_IN_STRNCPY:
3866 return gimple_fold_builtin_strncpy (gsi,
3867 gimple_call_arg (stmt, 0),
3868 gimple_call_arg (stmt, 1),
3869 gimple_call_arg (stmt, 2));
3870 case BUILT_IN_STRCAT:
3871 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3872 gimple_call_arg (stmt, 1));
3873 case BUILT_IN_STRNCAT:
3874 return gimple_fold_builtin_strncat (gsi);
3875 case BUILT_IN_INDEX:
3876 case BUILT_IN_STRCHR:
3877 return gimple_fold_builtin_strchr (gsi, false);
3878 case BUILT_IN_RINDEX:
3879 case BUILT_IN_STRRCHR:
3880 return gimple_fold_builtin_strchr (gsi, true);
3881 case BUILT_IN_STRSTR:
3882 return gimple_fold_builtin_strstr (gsi);
3883 case BUILT_IN_STRCMP:
3884 case BUILT_IN_STRCMP_EQ:
3885 case BUILT_IN_STRCASECMP:
3886 case BUILT_IN_STRNCMP:
3887 case BUILT_IN_STRNCMP_EQ:
3888 case BUILT_IN_STRNCASECMP:
3889 return gimple_fold_builtin_string_compare (gsi);
3890 case BUILT_IN_MEMCHR:
3891 return gimple_fold_builtin_memchr (gsi);
3892 case BUILT_IN_FPUTS:
3893 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3894 gimple_call_arg (stmt, 1), false);
3895 case BUILT_IN_FPUTS_UNLOCKED:
3896 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3897 gimple_call_arg (stmt, 1), true);
3898 case BUILT_IN_MEMCPY_CHK:
3899 case BUILT_IN_MEMPCPY_CHK:
3900 case BUILT_IN_MEMMOVE_CHK:
3901 case BUILT_IN_MEMSET_CHK:
3902 return gimple_fold_builtin_memory_chk (gsi,
3903 gimple_call_arg (stmt, 0),
3904 gimple_call_arg (stmt, 1),
3905 gimple_call_arg (stmt, 2),
3906 gimple_call_arg (stmt, 3),
3907 fcode);
3908 case BUILT_IN_STPCPY:
3909 return gimple_fold_builtin_stpcpy (gsi);
3910 case BUILT_IN_STRCPY_CHK:
3911 case BUILT_IN_STPCPY_CHK:
3912 return gimple_fold_builtin_stxcpy_chk (gsi,
3913 gimple_call_arg (stmt, 0),
3914 gimple_call_arg (stmt, 1),
3915 gimple_call_arg (stmt, 2),
3916 fcode);
3917 case BUILT_IN_STRNCPY_CHK:
3918 case BUILT_IN_STPNCPY_CHK:
3919 return gimple_fold_builtin_stxncpy_chk (gsi,
3920 gimple_call_arg (stmt, 0),
3921 gimple_call_arg (stmt, 1),
3922 gimple_call_arg (stmt, 2),
3923 gimple_call_arg (stmt, 3),
3924 fcode);
3925 case BUILT_IN_SNPRINTF_CHK:
3926 case BUILT_IN_VSNPRINTF_CHK:
3927 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3929 case BUILT_IN_FPRINTF:
3930 case BUILT_IN_FPRINTF_UNLOCKED:
3931 case BUILT_IN_VFPRINTF:
3932 if (n == 2 || n == 3)
3933 return gimple_fold_builtin_fprintf (gsi,
3934 gimple_call_arg (stmt, 0),
3935 gimple_call_arg (stmt, 1),
3936 n == 3
3937 ? gimple_call_arg (stmt, 2)
3938 : NULL_TREE,
3939 fcode);
3940 break;
3941 case BUILT_IN_FPRINTF_CHK:
3942 case BUILT_IN_VFPRINTF_CHK:
3943 if (n == 3 || n == 4)
3944 return gimple_fold_builtin_fprintf (gsi,
3945 gimple_call_arg (stmt, 0),
3946 gimple_call_arg (stmt, 2),
3947 n == 4
3948 ? gimple_call_arg (stmt, 3)
3949 : NULL_TREE,
3950 fcode);
3951 break;
3952 case BUILT_IN_PRINTF:
3953 case BUILT_IN_PRINTF_UNLOCKED:
3954 case BUILT_IN_VPRINTF:
3955 if (n == 1 || n == 2)
3956 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3957 n == 2
3958 ? gimple_call_arg (stmt, 1)
3959 : NULL_TREE, fcode);
3960 break;
3961 case BUILT_IN_PRINTF_CHK:
3962 case BUILT_IN_VPRINTF_CHK:
3963 if (n == 2 || n == 3)
3964 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3965 n == 3
3966 ? gimple_call_arg (stmt, 2)
3967 : NULL_TREE, fcode);
3968 break;
3969 case BUILT_IN_ACC_ON_DEVICE:
3970 return gimple_fold_builtin_acc_on_device (gsi,
3971 gimple_call_arg (stmt, 0));
3972 case BUILT_IN_REALLOC:
3973 return gimple_fold_builtin_realloc (gsi);
3975 default:;
3978 /* Try the generic builtin folder. */
3979 bool ignore = (gimple_call_lhs (stmt) == NULL);
3980 tree result = fold_call_stmt (stmt, ignore);
3981 if (result)
3983 if (ignore)
3984 STRIP_NOPS (result);
3985 else
3986 result = fold_convert (gimple_call_return_type (stmt), result);
3987 if (!update_call_from_tree (gsi, result))
3988 gimplify_and_update_call_from_tree (gsi, result);
3989 return true;
3992 return false;
3995 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3996 function calls to constants, where possible. */
3998 static tree
3999 fold_internal_goacc_dim (const gimple *call)
4001 int axis = oacc_get_ifn_dim_arg (call);
4002 int size = oacc_get_fn_dim_size (current_function_decl, axis);
4003 tree result = NULL_TREE;
4004 tree type = TREE_TYPE (gimple_call_lhs (call));
4006 switch (gimple_call_internal_fn (call))
4008 case IFN_GOACC_DIM_POS:
4009 /* If the size is 1, we know the answer. */
4010 if (size == 1)
4011 result = build_int_cst (type, 0);
4012 break;
4013 case IFN_GOACC_DIM_SIZE:
4014 /* If the size is not dynamic, we know the answer. */
4015 if (size)
4016 result = build_int_cst (type, size);
4017 break;
4018 default:
4019 break;
4022 return result;
4025 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4026 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4027 &var where var is only addressable because of such calls. */
4029 bool
4030 optimize_atomic_compare_exchange_p (gimple *stmt)
4032 if (gimple_call_num_args (stmt) != 6
4033 || !flag_inline_atomics
4034 || !optimize
4035 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
4036 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4037 || !gimple_vdef (stmt)
4038 || !gimple_vuse (stmt))
4039 return false;
4041 tree fndecl = gimple_call_fndecl (stmt);
4042 switch (DECL_FUNCTION_CODE (fndecl))
4044 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
4045 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
4046 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
4047 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
4048 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
4049 break;
4050 default:
4051 return false;
4054 tree expected = gimple_call_arg (stmt, 1);
4055 if (TREE_CODE (expected) != ADDR_EXPR
4056 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
4057 return false;
4059 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
4060 if (!is_gimple_reg_type (etype)
4061 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
4062 || TREE_THIS_VOLATILE (etype)
4063 || VECTOR_TYPE_P (etype)
4064 || TREE_CODE (etype) == COMPLEX_TYPE
4065 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4066 might not preserve all the bits. See PR71716. */
4067 || SCALAR_FLOAT_TYPE_P (etype)
4068 || maybe_ne (TYPE_PRECISION (etype),
4069 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4070 return false;
4072 tree weak = gimple_call_arg (stmt, 3);
4073 if (!integer_zerop (weak) && !integer_onep (weak))
4074 return false;
4076 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4077 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4078 machine_mode mode = TYPE_MODE (itype);
4080 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
4081 == CODE_FOR_nothing
4082 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
4083 return false;
4085 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
4086 return false;
4088 return true;
4091 /* Fold
4092 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4093 into
4094 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4095 i = IMAGPART_EXPR <t>;
4096 r = (_Bool) i;
4097 e = REALPART_EXPR <t>; */
4099 void
4100 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
4102 gimple *stmt = gsi_stmt (*gsi);
4103 tree fndecl = gimple_call_fndecl (stmt);
4104 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4105 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4106 tree ctype = build_complex_type (itype);
4107 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
4108 bool throws = false;
4109 edge e = NULL;
4110 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4111 expected);
4112 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4113 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4114 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4116 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4117 build1 (VIEW_CONVERT_EXPR, itype,
4118 gimple_assign_lhs (g)));
4119 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4121 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4122 + int_size_in_bytes (itype);
4123 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4124 gimple_call_arg (stmt, 0),
4125 gimple_assign_lhs (g),
4126 gimple_call_arg (stmt, 2),
4127 build_int_cst (integer_type_node, flag),
4128 gimple_call_arg (stmt, 4),
4129 gimple_call_arg (stmt, 5));
4130 tree lhs = make_ssa_name (ctype);
4131 gimple_call_set_lhs (g, lhs);
4132 gimple_set_vdef (g, gimple_vdef (stmt));
4133 gimple_set_vuse (g, gimple_vuse (stmt));
4134 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
4135 tree oldlhs = gimple_call_lhs (stmt);
4136 if (stmt_can_throw_internal (cfun, stmt))
4138 throws = true;
4139 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4141 gimple_call_set_nothrow (as_a <gcall *> (g),
4142 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4143 gimple_call_set_lhs (stmt, NULL_TREE);
4144 gsi_replace (gsi, g, true);
4145 if (oldlhs)
4147 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4148 build1 (IMAGPART_EXPR, itype, lhs));
4149 if (throws)
4151 gsi_insert_on_edge_immediate (e, g);
4152 *gsi = gsi_for_stmt (g);
4154 else
4155 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4156 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4157 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4159 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4160 build1 (REALPART_EXPR, itype, lhs));
4161 if (throws && oldlhs == NULL_TREE)
4163 gsi_insert_on_edge_immediate (e, g);
4164 *gsi = gsi_for_stmt (g);
4166 else
4167 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4168 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4170 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4171 VIEW_CONVERT_EXPR,
4172 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4173 gimple_assign_lhs (g)));
4174 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4176 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4177 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4178 *gsi = gsiret;
4181 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4182 doesn't fit into TYPE. The test for overflow should be regardless of
4183 -fwrapv, and even for unsigned types. */
4185 bool
4186 arith_overflowed_p (enum tree_code code, const_tree type,
4187 const_tree arg0, const_tree arg1)
4189 widest2_int warg0 = widest2_int_cst (arg0);
4190 widest2_int warg1 = widest2_int_cst (arg1);
4191 widest2_int wres;
4192 switch (code)
4194 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4195 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4196 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4197 default: gcc_unreachable ();
4199 signop sign = TYPE_SIGN (type);
4200 if (sign == UNSIGNED && wi::neg_p (wres))
4201 return true;
4202 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4205 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4206 The statement may be replaced by another statement, e.g., if the call
4207 simplifies to a constant value. Return true if any changes were made.
4208 It is assumed that the operands have been previously folded. */
4210 static bool
4211 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4213 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4214 tree callee;
4215 bool changed = false;
4216 unsigned i;
4218 /* Fold *& in call arguments. */
4219 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4220 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4222 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4223 if (tmp)
4225 gimple_call_set_arg (stmt, i, tmp);
4226 changed = true;
4230 /* Check for virtual calls that became direct calls. */
4231 callee = gimple_call_fn (stmt);
4232 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4234 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4236 if (dump_file && virtual_method_call_p (callee)
4237 && !possible_polymorphic_call_target_p
4238 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4239 (OBJ_TYPE_REF_EXPR (callee)))))
4241 fprintf (dump_file,
4242 "Type inheritance inconsistent devirtualization of ");
4243 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4244 fprintf (dump_file, " to ");
4245 print_generic_expr (dump_file, callee, TDF_SLIM);
4246 fprintf (dump_file, "\n");
4249 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4250 changed = true;
4252 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4254 bool final;
4255 vec <cgraph_node *>targets
4256 = possible_polymorphic_call_targets (callee, stmt, &final);
4257 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4259 tree lhs = gimple_call_lhs (stmt);
4260 if (dump_enabled_p ())
4262 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4263 "folding virtual function call to %s\n",
4264 targets.length () == 1
4265 ? targets[0]->name ()
4266 : "__builtin_unreachable");
4268 if (targets.length () == 1)
4270 tree fndecl = targets[0]->decl;
4271 gimple_call_set_fndecl (stmt, fndecl);
4272 changed = true;
4273 /* If changing the call to __cxa_pure_virtual
4274 or similar noreturn function, adjust gimple_call_fntype
4275 too. */
4276 if (gimple_call_noreturn_p (stmt)
4277 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4278 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4279 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4280 == void_type_node))
4281 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4282 /* If the call becomes noreturn, remove the lhs. */
4283 if (lhs
4284 && gimple_call_noreturn_p (stmt)
4285 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4286 || should_remove_lhs_p (lhs)))
4288 if (TREE_CODE (lhs) == SSA_NAME)
4290 tree var = create_tmp_var (TREE_TYPE (lhs));
4291 tree def = get_or_create_ssa_default_def (cfun, var);
4292 gimple *new_stmt = gimple_build_assign (lhs, def);
4293 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4295 gimple_call_set_lhs (stmt, NULL_TREE);
4297 maybe_remove_unused_call_args (cfun, stmt);
4299 else
4301 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4302 gimple *new_stmt = gimple_build_call (fndecl, 0);
4303 gimple_set_location (new_stmt, gimple_location (stmt));
4304 /* If the call had a SSA name as lhs morph that into
4305 an uninitialized value. */
4306 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4308 tree var = create_tmp_var (TREE_TYPE (lhs));
4309 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4310 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4311 set_ssa_default_def (cfun, var, lhs);
4313 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4314 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4315 gsi_replace (gsi, new_stmt, false);
4316 return true;
4322 /* Check for indirect calls that became direct calls, and then
4323 no longer require a static chain. */
4324 if (gimple_call_chain (stmt))
4326 tree fn = gimple_call_fndecl (stmt);
4327 if (fn && !DECL_STATIC_CHAIN (fn))
4329 gimple_call_set_chain (stmt, NULL);
4330 changed = true;
4332 else
4334 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4335 if (tmp)
4337 gimple_call_set_chain (stmt, tmp);
4338 changed = true;
4343 if (inplace)
4344 return changed;
4346 /* Check for builtins that CCP can handle using information not
4347 available in the generic fold routines. */
4348 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4350 if (gimple_fold_builtin (gsi))
4351 changed = true;
4353 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4355 changed |= targetm.gimple_fold_builtin (gsi);
4357 else if (gimple_call_internal_p (stmt))
4359 enum tree_code subcode = ERROR_MARK;
4360 tree result = NULL_TREE;
4361 bool cplx_result = false;
4362 tree overflow = NULL_TREE;
4363 switch (gimple_call_internal_fn (stmt))
4365 case IFN_BUILTIN_EXPECT:
4366 result = fold_builtin_expect (gimple_location (stmt),
4367 gimple_call_arg (stmt, 0),
4368 gimple_call_arg (stmt, 1),
4369 gimple_call_arg (stmt, 2),
4370 NULL_TREE);
4371 break;
4372 case IFN_UBSAN_OBJECT_SIZE:
4374 tree offset = gimple_call_arg (stmt, 1);
4375 tree objsize = gimple_call_arg (stmt, 2);
4376 if (integer_all_onesp (objsize)
4377 || (TREE_CODE (offset) == INTEGER_CST
4378 && TREE_CODE (objsize) == INTEGER_CST
4379 && tree_int_cst_le (offset, objsize)))
4381 replace_call_with_value (gsi, NULL_TREE);
4382 return true;
4385 break;
4386 case IFN_UBSAN_PTR:
4387 if (integer_zerop (gimple_call_arg (stmt, 1)))
4389 replace_call_with_value (gsi, NULL_TREE);
4390 return true;
4392 break;
4393 case IFN_UBSAN_BOUNDS:
4395 tree index = gimple_call_arg (stmt, 1);
4396 tree bound = gimple_call_arg (stmt, 2);
4397 if (TREE_CODE (index) == INTEGER_CST
4398 && TREE_CODE (bound) == INTEGER_CST)
4400 index = fold_convert (TREE_TYPE (bound), index);
4401 if (TREE_CODE (index) == INTEGER_CST
4402 && tree_int_cst_le (index, bound))
4404 replace_call_with_value (gsi, NULL_TREE);
4405 return true;
4409 break;
4410 case IFN_GOACC_DIM_SIZE:
4411 case IFN_GOACC_DIM_POS:
4412 result = fold_internal_goacc_dim (stmt);
4413 break;
4414 case IFN_UBSAN_CHECK_ADD:
4415 subcode = PLUS_EXPR;
4416 break;
4417 case IFN_UBSAN_CHECK_SUB:
4418 subcode = MINUS_EXPR;
4419 break;
4420 case IFN_UBSAN_CHECK_MUL:
4421 subcode = MULT_EXPR;
4422 break;
4423 case IFN_ADD_OVERFLOW:
4424 subcode = PLUS_EXPR;
4425 cplx_result = true;
4426 break;
4427 case IFN_SUB_OVERFLOW:
4428 subcode = MINUS_EXPR;
4429 cplx_result = true;
4430 break;
4431 case IFN_MUL_OVERFLOW:
4432 subcode = MULT_EXPR;
4433 cplx_result = true;
4434 break;
4435 default:
4436 break;
4438 if (subcode != ERROR_MARK)
4440 tree arg0 = gimple_call_arg (stmt, 0);
4441 tree arg1 = gimple_call_arg (stmt, 1);
4442 tree type = TREE_TYPE (arg0);
4443 if (cplx_result)
4445 tree lhs = gimple_call_lhs (stmt);
4446 if (lhs == NULL_TREE)
4447 type = NULL_TREE;
4448 else
4449 type = TREE_TYPE (TREE_TYPE (lhs));
4451 if (type == NULL_TREE)
4453 /* x = y + 0; x = y - 0; x = y * 0; */
4454 else if (integer_zerop (arg1))
4455 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4456 /* x = 0 + y; x = 0 * y; */
4457 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4458 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4459 /* x = y - y; */
4460 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4461 result = integer_zero_node;
4462 /* x = y * 1; x = 1 * y; */
4463 else if (subcode == MULT_EXPR && integer_onep (arg1))
4464 result = arg0;
4465 else if (subcode == MULT_EXPR && integer_onep (arg0))
4466 result = arg1;
4467 else if (TREE_CODE (arg0) == INTEGER_CST
4468 && TREE_CODE (arg1) == INTEGER_CST)
4470 if (cplx_result)
4471 result = int_const_binop (subcode, fold_convert (type, arg0),
4472 fold_convert (type, arg1));
4473 else
4474 result = int_const_binop (subcode, arg0, arg1);
4475 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4477 if (cplx_result)
4478 overflow = build_one_cst (type);
4479 else
4480 result = NULL_TREE;
4483 if (result)
4485 if (result == integer_zero_node)
4486 result = build_zero_cst (type);
4487 else if (cplx_result && TREE_TYPE (result) != type)
4489 if (TREE_CODE (result) == INTEGER_CST)
4491 if (arith_overflowed_p (PLUS_EXPR, type, result,
4492 integer_zero_node))
4493 overflow = build_one_cst (type);
4495 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4496 && TYPE_UNSIGNED (type))
4497 || (TYPE_PRECISION (type)
4498 < (TYPE_PRECISION (TREE_TYPE (result))
4499 + (TYPE_UNSIGNED (TREE_TYPE (result))
4500 && !TYPE_UNSIGNED (type)))))
4501 result = NULL_TREE;
4502 if (result)
4503 result = fold_convert (type, result);
4508 if (result)
4510 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4511 result = drop_tree_overflow (result);
4512 if (cplx_result)
4514 if (overflow == NULL_TREE)
4515 overflow = build_zero_cst (TREE_TYPE (result));
4516 tree ctype = build_complex_type (TREE_TYPE (result));
4517 if (TREE_CODE (result) == INTEGER_CST
4518 && TREE_CODE (overflow) == INTEGER_CST)
4519 result = build_complex (ctype, result, overflow);
4520 else
4521 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4522 ctype, result, overflow);
4524 if (!update_call_from_tree (gsi, result))
4525 gimplify_and_update_call_from_tree (gsi, result);
4526 changed = true;
4530 return changed;
4534 /* Return true whether NAME has a use on STMT. */
4536 static bool
4537 has_use_on_stmt (tree name, gimple *stmt)
4539 imm_use_iterator iter;
4540 use_operand_p use_p;
4541 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4542 if (USE_STMT (use_p) == stmt)
4543 return true;
4544 return false;
4547 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4548 gimple_simplify.
4550 Replaces *GSI with the simplification result in RCODE and OPS
4551 and the associated statements in *SEQ. Does the replacement
4552 according to INPLACE and returns true if the operation succeeded. */
4554 static bool
4555 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4556 gimple_match_op *res_op,
4557 gimple_seq *seq, bool inplace)
4559 gimple *stmt = gsi_stmt (*gsi);
4560 tree *ops = res_op->ops;
4561 unsigned int num_ops = res_op->num_ops;
4563 /* Play safe and do not allow abnormals to be mentioned in
4564 newly created statements. See also maybe_push_res_to_seq.
4565 As an exception allow such uses if there was a use of the
4566 same SSA name on the old stmt. */
4567 for (unsigned int i = 0; i < num_ops; ++i)
4568 if (TREE_CODE (ops[i]) == SSA_NAME
4569 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4570 && !has_use_on_stmt (ops[i], stmt))
4571 return false;
4573 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4574 for (unsigned int i = 0; i < 2; ++i)
4575 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4576 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4577 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4578 return false;
4580 /* Don't insert new statements when INPLACE is true, even if we could
4581 reuse STMT for the final statement. */
4582 if (inplace && !gimple_seq_empty_p (*seq))
4583 return false;
4585 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4587 gcc_assert (res_op->code.is_tree_code ());
4588 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4589 /* GIMPLE_CONDs condition may not throw. */
4590 && (!flag_exceptions
4591 || !cfun->can_throw_non_call_exceptions
4592 || !operation_could_trap_p (res_op->code,
4593 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4594 false, NULL_TREE)))
4595 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4596 else if (res_op->code == SSA_NAME)
4597 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4598 build_zero_cst (TREE_TYPE (ops[0])));
4599 else if (res_op->code == INTEGER_CST)
4601 if (integer_zerop (ops[0]))
4602 gimple_cond_make_false (cond_stmt);
4603 else
4604 gimple_cond_make_true (cond_stmt);
4606 else if (!inplace)
4608 tree res = maybe_push_res_to_seq (res_op, seq);
4609 if (!res)
4610 return false;
4611 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4612 build_zero_cst (TREE_TYPE (res)));
4614 else
4615 return false;
4616 if (dump_file && (dump_flags & TDF_DETAILS))
4618 fprintf (dump_file, "gimple_simplified to ");
4619 if (!gimple_seq_empty_p (*seq))
4620 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4621 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4622 0, TDF_SLIM);
4624 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4625 return true;
4627 else if (is_gimple_assign (stmt)
4628 && res_op->code.is_tree_code ())
4630 if (!inplace
4631 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4633 maybe_build_generic_op (res_op);
4634 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4635 res_op->op_or_null (0),
4636 res_op->op_or_null (1),
4637 res_op->op_or_null (2));
4638 if (dump_file && (dump_flags & TDF_DETAILS))
4640 fprintf (dump_file, "gimple_simplified to ");
4641 if (!gimple_seq_empty_p (*seq))
4642 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4643 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4644 0, TDF_SLIM);
4646 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4647 return true;
4650 else if (res_op->code.is_fn_code ()
4651 && gimple_call_combined_fn (stmt) == res_op->code)
4653 gcc_assert (num_ops == gimple_call_num_args (stmt));
4654 for (unsigned int i = 0; i < num_ops; ++i)
4655 gimple_call_set_arg (stmt, i, ops[i]);
4656 if (dump_file && (dump_flags & TDF_DETAILS))
4658 fprintf (dump_file, "gimple_simplified to ");
4659 if (!gimple_seq_empty_p (*seq))
4660 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4661 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4663 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4664 return true;
4666 else if (!inplace)
4668 if (gimple_has_lhs (stmt))
4670 tree lhs = gimple_get_lhs (stmt);
4671 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4672 return false;
4673 if (dump_file && (dump_flags & TDF_DETAILS))
4675 fprintf (dump_file, "gimple_simplified to ");
4676 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4678 gsi_replace_with_seq_vops (gsi, *seq);
4679 return true;
4681 else
4682 gcc_unreachable ();
4685 return false;
4688 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4690 static bool
4691 maybe_canonicalize_mem_ref_addr (tree *t)
4693 bool res = false;
4695 if (TREE_CODE (*t) == ADDR_EXPR)
4696 t = &TREE_OPERAND (*t, 0);
4698 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4699 generic vector extension. The actual vector referenced is
4700 view-converted to an array type for this purpose. If the index
4701 is constant the canonical representation in the middle-end is a
4702 BIT_FIELD_REF so re-write the former to the latter here. */
4703 if (TREE_CODE (*t) == ARRAY_REF
4704 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4705 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4706 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4708 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4709 if (VECTOR_TYPE_P (vtype))
4711 tree low = array_ref_low_bound (*t);
4712 if (TREE_CODE (low) == INTEGER_CST)
4714 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4716 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4717 wi::to_widest (low));
4718 idx = wi::mul (idx, wi::to_widest
4719 (TYPE_SIZE (TREE_TYPE (*t))));
4720 widest_int ext
4721 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4722 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4724 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4725 TREE_TYPE (*t),
4726 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4727 TYPE_SIZE (TREE_TYPE (*t)),
4728 wide_int_to_tree (bitsizetype, idx));
4729 res = true;
4736 while (handled_component_p (*t))
4737 t = &TREE_OPERAND (*t, 0);
4739 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4740 of invariant addresses into a SSA name MEM_REF address. */
4741 if (TREE_CODE (*t) == MEM_REF
4742 || TREE_CODE (*t) == TARGET_MEM_REF)
4744 tree addr = TREE_OPERAND (*t, 0);
4745 if (TREE_CODE (addr) == ADDR_EXPR
4746 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4747 || handled_component_p (TREE_OPERAND (addr, 0))))
4749 tree base;
4750 poly_int64 coffset;
4751 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4752 &coffset);
4753 if (!base)
4754 gcc_unreachable ();
4756 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4757 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4758 TREE_OPERAND (*t, 1),
4759 size_int (coffset));
4760 res = true;
4762 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4763 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4766 /* Canonicalize back MEM_REFs to plain reference trees if the object
4767 accessed is a decl that has the same access semantics as the MEM_REF. */
4768 if (TREE_CODE (*t) == MEM_REF
4769 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4770 && integer_zerop (TREE_OPERAND (*t, 1))
4771 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4773 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4774 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4775 if (/* Same volatile qualification. */
4776 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4777 /* Same TBAA behavior with -fstrict-aliasing. */
4778 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4779 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4780 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4781 /* Same alignment. */
4782 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4783 /* We have to look out here to not drop a required conversion
4784 from the rhs to the lhs if *t appears on the lhs or vice-versa
4785 if it appears on the rhs. Thus require strict type
4786 compatibility. */
4787 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4789 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4790 res = true;
4794 /* Canonicalize TARGET_MEM_REF in particular with respect to
4795 the indexes becoming constant. */
4796 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4798 tree tem = maybe_fold_tmr (*t);
4799 if (tem)
4801 *t = tem;
4802 res = true;
4806 return res;
4809 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4810 distinguishes both cases. */
4812 static bool
4813 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4815 bool changed = false;
4816 gimple *stmt = gsi_stmt (*gsi);
4817 bool nowarning = gimple_no_warning_p (stmt);
4818 unsigned i;
4819 fold_defer_overflow_warnings ();
4821 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4822 after propagation.
4823 ??? This shouldn't be done in generic folding but in the
4824 propagation helpers which also know whether an address was
4825 propagated.
4826 Also canonicalize operand order. */
4827 switch (gimple_code (stmt))
4829 case GIMPLE_ASSIGN:
4830 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4832 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4833 if ((REFERENCE_CLASS_P (*rhs)
4834 || TREE_CODE (*rhs) == ADDR_EXPR)
4835 && maybe_canonicalize_mem_ref_addr (rhs))
4836 changed = true;
4837 tree *lhs = gimple_assign_lhs_ptr (stmt);
4838 if (REFERENCE_CLASS_P (*lhs)
4839 && maybe_canonicalize_mem_ref_addr (lhs))
4840 changed = true;
4842 else
4844 /* Canonicalize operand order. */
4845 enum tree_code code = gimple_assign_rhs_code (stmt);
4846 if (TREE_CODE_CLASS (code) == tcc_comparison
4847 || commutative_tree_code (code)
4848 || commutative_ternary_tree_code (code))
4850 tree rhs1 = gimple_assign_rhs1 (stmt);
4851 tree rhs2 = gimple_assign_rhs2 (stmt);
4852 if (tree_swap_operands_p (rhs1, rhs2))
4854 gimple_assign_set_rhs1 (stmt, rhs2);
4855 gimple_assign_set_rhs2 (stmt, rhs1);
4856 if (TREE_CODE_CLASS (code) == tcc_comparison)
4857 gimple_assign_set_rhs_code (stmt,
4858 swap_tree_comparison (code));
4859 changed = true;
4863 break;
4864 case GIMPLE_CALL:
4866 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4868 tree *arg = gimple_call_arg_ptr (stmt, i);
4869 if (REFERENCE_CLASS_P (*arg)
4870 && maybe_canonicalize_mem_ref_addr (arg))
4871 changed = true;
4873 tree *lhs = gimple_call_lhs_ptr (stmt);
4874 if (*lhs
4875 && REFERENCE_CLASS_P (*lhs)
4876 && maybe_canonicalize_mem_ref_addr (lhs))
4877 changed = true;
4878 break;
4880 case GIMPLE_ASM:
4882 gasm *asm_stmt = as_a <gasm *> (stmt);
4883 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4885 tree link = gimple_asm_output_op (asm_stmt, i);
4886 tree op = TREE_VALUE (link);
4887 if (REFERENCE_CLASS_P (op)
4888 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4889 changed = true;
4891 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4893 tree link = gimple_asm_input_op (asm_stmt, i);
4894 tree op = TREE_VALUE (link);
4895 if ((REFERENCE_CLASS_P (op)
4896 || TREE_CODE (op) == ADDR_EXPR)
4897 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4898 changed = true;
4901 break;
4902 case GIMPLE_DEBUG:
4903 if (gimple_debug_bind_p (stmt))
4905 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4906 if (*val
4907 && (REFERENCE_CLASS_P (*val)
4908 || TREE_CODE (*val) == ADDR_EXPR)
4909 && maybe_canonicalize_mem_ref_addr (val))
4910 changed = true;
4912 break;
4913 case GIMPLE_COND:
4915 /* Canonicalize operand order. */
4916 tree lhs = gimple_cond_lhs (stmt);
4917 tree rhs = gimple_cond_rhs (stmt);
4918 if (tree_swap_operands_p (lhs, rhs))
4920 gcond *gc = as_a <gcond *> (stmt);
4921 gimple_cond_set_lhs (gc, rhs);
4922 gimple_cond_set_rhs (gc, lhs);
4923 gimple_cond_set_code (gc,
4924 swap_tree_comparison (gimple_cond_code (gc)));
4925 changed = true;
4928 default:;
4931 /* Dispatch to pattern-based folding. */
4932 if (!inplace
4933 || is_gimple_assign (stmt)
4934 || gimple_code (stmt) == GIMPLE_COND)
4936 gimple_seq seq = NULL;
4937 gimple_match_op res_op;
4938 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
4939 valueize, valueize))
4941 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
4942 changed = true;
4943 else
4944 gimple_seq_discard (seq);
4948 stmt = gsi_stmt (*gsi);
4950 /* Fold the main computation performed by the statement. */
4951 switch (gimple_code (stmt))
4953 case GIMPLE_ASSIGN:
4955 /* Try to canonicalize for boolean-typed X the comparisons
4956 X == 0, X == 1, X != 0, and X != 1. */
4957 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4958 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4960 tree lhs = gimple_assign_lhs (stmt);
4961 tree op1 = gimple_assign_rhs1 (stmt);
4962 tree op2 = gimple_assign_rhs2 (stmt);
4963 tree type = TREE_TYPE (op1);
4965 /* Check whether the comparison operands are of the same boolean
4966 type as the result type is.
4967 Check that second operand is an integer-constant with value
4968 one or zero. */
4969 if (TREE_CODE (op2) == INTEGER_CST
4970 && (integer_zerop (op2) || integer_onep (op2))
4971 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4973 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4974 bool is_logical_not = false;
4976 /* X == 0 and X != 1 is a logical-not.of X
4977 X == 1 and X != 0 is X */
4978 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4979 || (cmp_code == NE_EXPR && integer_onep (op2)))
4980 is_logical_not = true;
4982 if (is_logical_not == false)
4983 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4984 /* Only for one-bit precision typed X the transformation
4985 !X -> ~X is valied. */
4986 else if (TYPE_PRECISION (type) == 1)
4987 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4988 /* Otherwise we use !X -> X ^ 1. */
4989 else
4990 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4991 build_int_cst (type, 1));
4992 changed = true;
4993 break;
4997 unsigned old_num_ops = gimple_num_ops (stmt);
4998 tree lhs = gimple_assign_lhs (stmt);
4999 tree new_rhs = fold_gimple_assign (gsi);
5000 if (new_rhs
5001 && !useless_type_conversion_p (TREE_TYPE (lhs),
5002 TREE_TYPE (new_rhs)))
5003 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
5004 if (new_rhs
5005 && (!inplace
5006 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
5008 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
5009 changed = true;
5011 break;
5014 case GIMPLE_CALL:
5015 changed |= gimple_fold_call (gsi, inplace);
5016 break;
5018 case GIMPLE_ASM:
5019 /* Fold *& in asm operands. */
5021 gasm *asm_stmt = as_a <gasm *> (stmt);
5022 size_t noutputs;
5023 const char **oconstraints;
5024 const char *constraint;
5025 bool allows_mem, allows_reg;
5027 noutputs = gimple_asm_noutputs (asm_stmt);
5028 oconstraints = XALLOCAVEC (const char *, noutputs);
5030 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
5032 tree link = gimple_asm_output_op (asm_stmt, i);
5033 tree op = TREE_VALUE (link);
5034 oconstraints[i]
5035 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5036 if (REFERENCE_CLASS_P (op)
5037 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
5039 TREE_VALUE (link) = op;
5040 changed = true;
5043 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5045 tree link = gimple_asm_input_op (asm_stmt, i);
5046 tree op = TREE_VALUE (link);
5047 constraint
5048 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5049 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5050 oconstraints, &allows_mem, &allows_reg);
5051 if (REFERENCE_CLASS_P (op)
5052 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
5053 != NULL_TREE)
5055 TREE_VALUE (link) = op;
5056 changed = true;
5060 break;
5062 case GIMPLE_DEBUG:
5063 if (gimple_debug_bind_p (stmt))
5065 tree val = gimple_debug_bind_get_value (stmt);
5066 if (val
5067 && REFERENCE_CLASS_P (val))
5069 tree tem = maybe_fold_reference (val, false);
5070 if (tem)
5072 gimple_debug_bind_set_value (stmt, tem);
5073 changed = true;
5076 else if (val
5077 && TREE_CODE (val) == ADDR_EXPR)
5079 tree ref = TREE_OPERAND (val, 0);
5080 tree tem = maybe_fold_reference (ref, false);
5081 if (tem)
5083 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
5084 gimple_debug_bind_set_value (stmt, tem);
5085 changed = true;
5089 break;
5091 case GIMPLE_RETURN:
5093 greturn *ret_stmt = as_a<greturn *> (stmt);
5094 tree ret = gimple_return_retval(ret_stmt);
5096 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
5098 tree val = valueize (ret);
5099 if (val && val != ret
5100 && may_propagate_copy (ret, val))
5102 gimple_return_set_retval (ret_stmt, val);
5103 changed = true;
5107 break;
5109 default:;
5112 stmt = gsi_stmt (*gsi);
5114 /* Fold *& on the lhs. */
5115 if (gimple_has_lhs (stmt))
5117 tree lhs = gimple_get_lhs (stmt);
5118 if (lhs && REFERENCE_CLASS_P (lhs))
5120 tree new_lhs = maybe_fold_reference (lhs, true);
5121 if (new_lhs)
5123 gimple_set_lhs (stmt, new_lhs);
5124 changed = true;
5129 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
5130 return changed;
5133 /* Valueziation callback that ends up not following SSA edges. */
5135 tree
5136 no_follow_ssa_edges (tree)
5138 return NULL_TREE;
5141 /* Valueization callback that ends up following single-use SSA edges only. */
5143 tree
5144 follow_single_use_edges (tree val)
5146 if (TREE_CODE (val) == SSA_NAME
5147 && !has_single_use (val))
5148 return NULL_TREE;
5149 return val;
5152 /* Valueization callback that follows all SSA edges. */
5154 tree
5155 follow_all_ssa_edges (tree val)
5157 return val;
5160 /* Fold the statement pointed to by GSI. In some cases, this function may
5161 replace the whole statement with a new one. Returns true iff folding
5162 makes any changes.
5163 The statement pointed to by GSI should be in valid gimple form but may
5164 be in unfolded state as resulting from for example constant propagation
5165 which can produce *&x = 0. */
5167 bool
5168 fold_stmt (gimple_stmt_iterator *gsi)
5170 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5173 bool
5174 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5176 return fold_stmt_1 (gsi, false, valueize);
5179 /* Perform the minimal folding on statement *GSI. Only operations like
5180 *&x created by constant propagation are handled. The statement cannot
5181 be replaced with a new one. Return true if the statement was
5182 changed, false otherwise.
5183 The statement *GSI should be in valid gimple form but may
5184 be in unfolded state as resulting from for example constant propagation
5185 which can produce *&x = 0. */
5187 bool
5188 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5190 gimple *stmt = gsi_stmt (*gsi);
5191 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5192 gcc_assert (gsi_stmt (*gsi) == stmt);
5193 return changed;
5196 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5197 if EXPR is null or we don't know how.
5198 If non-null, the result always has boolean type. */
5200 static tree
5201 canonicalize_bool (tree expr, bool invert)
5203 if (!expr)
5204 return NULL_TREE;
5205 else if (invert)
5207 if (integer_nonzerop (expr))
5208 return boolean_false_node;
5209 else if (integer_zerop (expr))
5210 return boolean_true_node;
5211 else if (TREE_CODE (expr) == SSA_NAME)
5212 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5213 build_int_cst (TREE_TYPE (expr), 0));
5214 else if (COMPARISON_CLASS_P (expr))
5215 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5216 boolean_type_node,
5217 TREE_OPERAND (expr, 0),
5218 TREE_OPERAND (expr, 1));
5219 else
5220 return NULL_TREE;
5222 else
5224 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5225 return expr;
5226 if (integer_nonzerop (expr))
5227 return boolean_true_node;
5228 else if (integer_zerop (expr))
5229 return boolean_false_node;
5230 else if (TREE_CODE (expr) == SSA_NAME)
5231 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5232 build_int_cst (TREE_TYPE (expr), 0));
5233 else if (COMPARISON_CLASS_P (expr))
5234 return fold_build2 (TREE_CODE (expr),
5235 boolean_type_node,
5236 TREE_OPERAND (expr, 0),
5237 TREE_OPERAND (expr, 1));
5238 else
5239 return NULL_TREE;
5243 /* Check to see if a boolean expression EXPR is logically equivalent to the
5244 comparison (OP1 CODE OP2). Check for various identities involving
5245 SSA_NAMEs. */
5247 static bool
5248 same_bool_comparison_p (const_tree expr, enum tree_code code,
5249 const_tree op1, const_tree op2)
5251 gimple *s;
5253 /* The obvious case. */
5254 if (TREE_CODE (expr) == code
5255 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5256 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5257 return true;
5259 /* Check for comparing (name, name != 0) and the case where expr
5260 is an SSA_NAME with a definition matching the comparison. */
5261 if (TREE_CODE (expr) == SSA_NAME
5262 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5264 if (operand_equal_p (expr, op1, 0))
5265 return ((code == NE_EXPR && integer_zerop (op2))
5266 || (code == EQ_EXPR && integer_nonzerop (op2)));
5267 s = SSA_NAME_DEF_STMT (expr);
5268 if (is_gimple_assign (s)
5269 && gimple_assign_rhs_code (s) == code
5270 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5271 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5272 return true;
5275 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5276 of name is a comparison, recurse. */
5277 if (TREE_CODE (op1) == SSA_NAME
5278 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5280 s = SSA_NAME_DEF_STMT (op1);
5281 if (is_gimple_assign (s)
5282 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5284 enum tree_code c = gimple_assign_rhs_code (s);
5285 if ((c == NE_EXPR && integer_zerop (op2))
5286 || (c == EQ_EXPR && integer_nonzerop (op2)))
5287 return same_bool_comparison_p (expr, c,
5288 gimple_assign_rhs1 (s),
5289 gimple_assign_rhs2 (s));
5290 if ((c == EQ_EXPR && integer_zerop (op2))
5291 || (c == NE_EXPR && integer_nonzerop (op2)))
5292 return same_bool_comparison_p (expr,
5293 invert_tree_comparison (c, false),
5294 gimple_assign_rhs1 (s),
5295 gimple_assign_rhs2 (s));
5298 return false;
5301 /* Check to see if two boolean expressions OP1 and OP2 are logically
5302 equivalent. */
5304 static bool
5305 same_bool_result_p (const_tree op1, const_tree op2)
5307 /* Simple cases first. */
5308 if (operand_equal_p (op1, op2, 0))
5309 return true;
5311 /* Check the cases where at least one of the operands is a comparison.
5312 These are a bit smarter than operand_equal_p in that they apply some
5313 identifies on SSA_NAMEs. */
5314 if (COMPARISON_CLASS_P (op2)
5315 && same_bool_comparison_p (op1, TREE_CODE (op2),
5316 TREE_OPERAND (op2, 0),
5317 TREE_OPERAND (op2, 1)))
5318 return true;
5319 if (COMPARISON_CLASS_P (op1)
5320 && same_bool_comparison_p (op2, TREE_CODE (op1),
5321 TREE_OPERAND (op1, 0),
5322 TREE_OPERAND (op1, 1)))
5323 return true;
5325 /* Default case. */
5326 return false;
5329 /* Forward declarations for some mutually recursive functions. */
5331 static tree
5332 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5333 enum tree_code code2, tree op2a, tree op2b);
5334 static tree
5335 and_var_with_comparison (tree var, bool invert,
5336 enum tree_code code2, tree op2a, tree op2b);
5337 static tree
5338 and_var_with_comparison_1 (gimple *stmt,
5339 enum tree_code code2, tree op2a, tree op2b);
5340 static tree
5341 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5342 enum tree_code code2, tree op2a, tree op2b);
5343 static tree
5344 or_var_with_comparison (tree var, bool invert,
5345 enum tree_code code2, tree op2a, tree op2b);
5346 static tree
5347 or_var_with_comparison_1 (gimple *stmt,
5348 enum tree_code code2, tree op2a, tree op2b);
5350 /* Helper function for and_comparisons_1: try to simplify the AND of the
5351 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5352 If INVERT is true, invert the value of the VAR before doing the AND.
5353 Return NULL_EXPR if we can't simplify this to a single expression. */
5355 static tree
5356 and_var_with_comparison (tree var, bool invert,
5357 enum tree_code code2, tree op2a, tree op2b)
5359 tree t;
5360 gimple *stmt = SSA_NAME_DEF_STMT (var);
5362 /* We can only deal with variables whose definitions are assignments. */
5363 if (!is_gimple_assign (stmt))
5364 return NULL_TREE;
5366 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5367 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5368 Then we only have to consider the simpler non-inverted cases. */
5369 if (invert)
5370 t = or_var_with_comparison_1 (stmt,
5371 invert_tree_comparison (code2, false),
5372 op2a, op2b);
5373 else
5374 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5375 return canonicalize_bool (t, invert);
5378 /* Try to simplify the AND of the ssa variable defined by the assignment
5379 STMT with the comparison specified by (OP2A CODE2 OP2B).
5380 Return NULL_EXPR if we can't simplify this to a single expression. */
5382 static tree
5383 and_var_with_comparison_1 (gimple *stmt,
5384 enum tree_code code2, tree op2a, tree op2b)
5386 tree var = gimple_assign_lhs (stmt);
5387 tree true_test_var = NULL_TREE;
5388 tree false_test_var = NULL_TREE;
5389 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5391 /* Check for identities like (var AND (var == 0)) => false. */
5392 if (TREE_CODE (op2a) == SSA_NAME
5393 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5395 if ((code2 == NE_EXPR && integer_zerop (op2b))
5396 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5398 true_test_var = op2a;
5399 if (var == true_test_var)
5400 return var;
5402 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5403 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5405 false_test_var = op2a;
5406 if (var == false_test_var)
5407 return boolean_false_node;
5411 /* If the definition is a comparison, recurse on it. */
5412 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5414 tree t = and_comparisons_1 (innercode,
5415 gimple_assign_rhs1 (stmt),
5416 gimple_assign_rhs2 (stmt),
5417 code2,
5418 op2a,
5419 op2b);
5420 if (t)
5421 return t;
5424 /* If the definition is an AND or OR expression, we may be able to
5425 simplify by reassociating. */
5426 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5427 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5429 tree inner1 = gimple_assign_rhs1 (stmt);
5430 tree inner2 = gimple_assign_rhs2 (stmt);
5431 gimple *s;
5432 tree t;
5433 tree partial = NULL_TREE;
5434 bool is_and = (innercode == BIT_AND_EXPR);
5436 /* Check for boolean identities that don't require recursive examination
5437 of inner1/inner2:
5438 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5439 inner1 AND (inner1 OR inner2) => inner1
5440 !inner1 AND (inner1 AND inner2) => false
5441 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5442 Likewise for similar cases involving inner2. */
5443 if (inner1 == true_test_var)
5444 return (is_and ? var : inner1);
5445 else if (inner2 == true_test_var)
5446 return (is_and ? var : inner2);
5447 else if (inner1 == false_test_var)
5448 return (is_and
5449 ? boolean_false_node
5450 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5451 else if (inner2 == false_test_var)
5452 return (is_and
5453 ? boolean_false_node
5454 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5456 /* Next, redistribute/reassociate the AND across the inner tests.
5457 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5458 if (TREE_CODE (inner1) == SSA_NAME
5459 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5460 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5461 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5462 gimple_assign_rhs1 (s),
5463 gimple_assign_rhs2 (s),
5464 code2, op2a, op2b)))
5466 /* Handle the AND case, where we are reassociating:
5467 (inner1 AND inner2) AND (op2a code2 op2b)
5468 => (t AND inner2)
5469 If the partial result t is a constant, we win. Otherwise
5470 continue on to try reassociating with the other inner test. */
5471 if (is_and)
5473 if (integer_onep (t))
5474 return inner2;
5475 else if (integer_zerop (t))
5476 return boolean_false_node;
5479 /* Handle the OR case, where we are redistributing:
5480 (inner1 OR inner2) AND (op2a code2 op2b)
5481 => (t OR (inner2 AND (op2a code2 op2b))) */
5482 else if (integer_onep (t))
5483 return boolean_true_node;
5485 /* Save partial result for later. */
5486 partial = t;
5489 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5490 if (TREE_CODE (inner2) == SSA_NAME
5491 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5492 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5493 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5494 gimple_assign_rhs1 (s),
5495 gimple_assign_rhs2 (s),
5496 code2, op2a, op2b)))
5498 /* Handle the AND case, where we are reassociating:
5499 (inner1 AND inner2) AND (op2a code2 op2b)
5500 => (inner1 AND t) */
5501 if (is_and)
5503 if (integer_onep (t))
5504 return inner1;
5505 else if (integer_zerop (t))
5506 return boolean_false_node;
5507 /* If both are the same, we can apply the identity
5508 (x AND x) == x. */
5509 else if (partial && same_bool_result_p (t, partial))
5510 return t;
5513 /* Handle the OR case. where we are redistributing:
5514 (inner1 OR inner2) AND (op2a code2 op2b)
5515 => (t OR (inner1 AND (op2a code2 op2b)))
5516 => (t OR partial) */
5517 else
5519 if (integer_onep (t))
5520 return boolean_true_node;
5521 else if (partial)
5523 /* We already got a simplification for the other
5524 operand to the redistributed OR expression. The
5525 interesting case is when at least one is false.
5526 Or, if both are the same, we can apply the identity
5527 (x OR x) == x. */
5528 if (integer_zerop (partial))
5529 return t;
5530 else if (integer_zerop (t))
5531 return partial;
5532 else if (same_bool_result_p (t, partial))
5533 return t;
5538 return NULL_TREE;
5541 /* Try to simplify the AND of two comparisons defined by
5542 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5543 If this can be done without constructing an intermediate value,
5544 return the resulting tree; otherwise NULL_TREE is returned.
5545 This function is deliberately asymmetric as it recurses on SSA_DEFs
5546 in the first comparison but not the second. */
5548 static tree
5549 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5550 enum tree_code code2, tree op2a, tree op2b)
5552 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5554 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5555 if (operand_equal_p (op1a, op2a, 0)
5556 && operand_equal_p (op1b, op2b, 0))
5558 /* Result will be either NULL_TREE, or a combined comparison. */
5559 tree t = combine_comparisons (UNKNOWN_LOCATION,
5560 TRUTH_ANDIF_EXPR, code1, code2,
5561 truth_type, op1a, op1b);
5562 if (t)
5563 return t;
5566 /* Likewise the swapped case of the above. */
5567 if (operand_equal_p (op1a, op2b, 0)
5568 && operand_equal_p (op1b, op2a, 0))
5570 /* Result will be either NULL_TREE, or a combined comparison. */
5571 tree t = combine_comparisons (UNKNOWN_LOCATION,
5572 TRUTH_ANDIF_EXPR, code1,
5573 swap_tree_comparison (code2),
5574 truth_type, op1a, op1b);
5575 if (t)
5576 return t;
5579 /* If both comparisons are of the same value against constants, we might
5580 be able to merge them. */
5581 if (operand_equal_p (op1a, op2a, 0)
5582 && TREE_CODE (op1b) == INTEGER_CST
5583 && TREE_CODE (op2b) == INTEGER_CST)
5585 int cmp = tree_int_cst_compare (op1b, op2b);
5587 /* If we have (op1a == op1b), we should either be able to
5588 return that or FALSE, depending on whether the constant op1b
5589 also satisfies the other comparison against op2b. */
5590 if (code1 == EQ_EXPR)
5592 bool done = true;
5593 bool val;
5594 switch (code2)
5596 case EQ_EXPR: val = (cmp == 0); break;
5597 case NE_EXPR: val = (cmp != 0); break;
5598 case LT_EXPR: val = (cmp < 0); break;
5599 case GT_EXPR: val = (cmp > 0); break;
5600 case LE_EXPR: val = (cmp <= 0); break;
5601 case GE_EXPR: val = (cmp >= 0); break;
5602 default: done = false;
5604 if (done)
5606 if (val)
5607 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5608 else
5609 return boolean_false_node;
5612 /* Likewise if the second comparison is an == comparison. */
5613 else if (code2 == EQ_EXPR)
5615 bool done = true;
5616 bool val;
5617 switch (code1)
5619 case EQ_EXPR: val = (cmp == 0); break;
5620 case NE_EXPR: val = (cmp != 0); break;
5621 case LT_EXPR: val = (cmp > 0); break;
5622 case GT_EXPR: val = (cmp < 0); break;
5623 case LE_EXPR: val = (cmp >= 0); break;
5624 case GE_EXPR: val = (cmp <= 0); break;
5625 default: done = false;
5627 if (done)
5629 if (val)
5630 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5631 else
5632 return boolean_false_node;
5636 /* Same business with inequality tests. */
5637 else if (code1 == NE_EXPR)
5639 bool val;
5640 switch (code2)
5642 case EQ_EXPR: val = (cmp != 0); break;
5643 case NE_EXPR: val = (cmp == 0); break;
5644 case LT_EXPR: val = (cmp >= 0); break;
5645 case GT_EXPR: val = (cmp <= 0); break;
5646 case LE_EXPR: val = (cmp > 0); break;
5647 case GE_EXPR: val = (cmp < 0); break;
5648 default:
5649 val = false;
5651 if (val)
5652 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5654 else if (code2 == NE_EXPR)
5656 bool val;
5657 switch (code1)
5659 case EQ_EXPR: val = (cmp == 0); break;
5660 case NE_EXPR: val = (cmp != 0); break;
5661 case LT_EXPR: val = (cmp <= 0); break;
5662 case GT_EXPR: val = (cmp >= 0); break;
5663 case LE_EXPR: val = (cmp < 0); break;
5664 case GE_EXPR: val = (cmp > 0); break;
5665 default:
5666 val = false;
5668 if (val)
5669 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5672 /* Chose the more restrictive of two < or <= comparisons. */
5673 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5674 && (code2 == LT_EXPR || code2 == LE_EXPR))
5676 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5677 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5678 else
5679 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5682 /* Likewise chose the more restrictive of two > or >= comparisons. */
5683 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5684 && (code2 == GT_EXPR || code2 == GE_EXPR))
5686 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5687 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5688 else
5689 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5692 /* Check for singleton ranges. */
5693 else if (cmp == 0
5694 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5695 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5696 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5698 /* Check for disjoint ranges. */
5699 else if (cmp <= 0
5700 && (code1 == LT_EXPR || code1 == LE_EXPR)
5701 && (code2 == GT_EXPR || code2 == GE_EXPR))
5702 return boolean_false_node;
5703 else if (cmp >= 0
5704 && (code1 == GT_EXPR || code1 == GE_EXPR)
5705 && (code2 == LT_EXPR || code2 == LE_EXPR))
5706 return boolean_false_node;
5709 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5710 NAME's definition is a truth value. See if there are any simplifications
5711 that can be done against the NAME's definition. */
5712 if (TREE_CODE (op1a) == SSA_NAME
5713 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5714 && (integer_zerop (op1b) || integer_onep (op1b)))
5716 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5717 || (code1 == NE_EXPR && integer_onep (op1b)));
5718 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5719 switch (gimple_code (stmt))
5721 case GIMPLE_ASSIGN:
5722 /* Try to simplify by copy-propagating the definition. */
5723 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5725 case GIMPLE_PHI:
5726 /* If every argument to the PHI produces the same result when
5727 ANDed with the second comparison, we win.
5728 Do not do this unless the type is bool since we need a bool
5729 result here anyway. */
5730 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5732 tree result = NULL_TREE;
5733 unsigned i;
5734 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5736 tree arg = gimple_phi_arg_def (stmt, i);
5738 /* If this PHI has itself as an argument, ignore it.
5739 If all the other args produce the same result,
5740 we're still OK. */
5741 if (arg == gimple_phi_result (stmt))
5742 continue;
5743 else if (TREE_CODE (arg) == INTEGER_CST)
5745 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5747 if (!result)
5748 result = boolean_false_node;
5749 else if (!integer_zerop (result))
5750 return NULL_TREE;
5752 else if (!result)
5753 result = fold_build2 (code2, boolean_type_node,
5754 op2a, op2b);
5755 else if (!same_bool_comparison_p (result,
5756 code2, op2a, op2b))
5757 return NULL_TREE;
5759 else if (TREE_CODE (arg) == SSA_NAME
5760 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5762 tree temp;
5763 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5764 /* In simple cases we can look through PHI nodes,
5765 but we have to be careful with loops.
5766 See PR49073. */
5767 if (! dom_info_available_p (CDI_DOMINATORS)
5768 || gimple_bb (def_stmt) == gimple_bb (stmt)
5769 || dominated_by_p (CDI_DOMINATORS,
5770 gimple_bb (def_stmt),
5771 gimple_bb (stmt)))
5772 return NULL_TREE;
5773 temp = and_var_with_comparison (arg, invert, code2,
5774 op2a, op2b);
5775 if (!temp)
5776 return NULL_TREE;
5777 else if (!result)
5778 result = temp;
5779 else if (!same_bool_result_p (result, temp))
5780 return NULL_TREE;
5782 else
5783 return NULL_TREE;
5785 return result;
5788 default:
5789 break;
5792 return NULL_TREE;
5795 /* Try to simplify the AND of two comparisons, specified by
5796 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5797 If this can be simplified to a single expression (without requiring
5798 introducing more SSA variables to hold intermediate values),
5799 return the resulting tree. Otherwise return NULL_TREE.
5800 If the result expression is non-null, it has boolean type. */
5802 tree
5803 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5804 enum tree_code code2, tree op2a, tree op2b)
5806 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5807 if (t)
5808 return t;
5809 else
5810 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5813 /* Helper function for or_comparisons_1: try to simplify the OR of the
5814 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5815 If INVERT is true, invert the value of VAR before doing the OR.
5816 Return NULL_EXPR if we can't simplify this to a single expression. */
5818 static tree
5819 or_var_with_comparison (tree var, bool invert,
5820 enum tree_code code2, tree op2a, tree op2b)
5822 tree t;
5823 gimple *stmt = SSA_NAME_DEF_STMT (var);
5825 /* We can only deal with variables whose definitions are assignments. */
5826 if (!is_gimple_assign (stmt))
5827 return NULL_TREE;
5829 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5830 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5831 Then we only have to consider the simpler non-inverted cases. */
5832 if (invert)
5833 t = and_var_with_comparison_1 (stmt,
5834 invert_tree_comparison (code2, false),
5835 op2a, op2b);
5836 else
5837 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5838 return canonicalize_bool (t, invert);
5841 /* Try to simplify the OR of the ssa variable defined by the assignment
5842 STMT with the comparison specified by (OP2A CODE2 OP2B).
5843 Return NULL_EXPR if we can't simplify this to a single expression. */
5845 static tree
5846 or_var_with_comparison_1 (gimple *stmt,
5847 enum tree_code code2, tree op2a, tree op2b)
5849 tree var = gimple_assign_lhs (stmt);
5850 tree true_test_var = NULL_TREE;
5851 tree false_test_var = NULL_TREE;
5852 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5854 /* Check for identities like (var OR (var != 0)) => true . */
5855 if (TREE_CODE (op2a) == SSA_NAME
5856 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5858 if ((code2 == NE_EXPR && integer_zerop (op2b))
5859 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5861 true_test_var = op2a;
5862 if (var == true_test_var)
5863 return var;
5865 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5866 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5868 false_test_var = op2a;
5869 if (var == false_test_var)
5870 return boolean_true_node;
5874 /* If the definition is a comparison, recurse on it. */
5875 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5877 tree t = or_comparisons_1 (innercode,
5878 gimple_assign_rhs1 (stmt),
5879 gimple_assign_rhs2 (stmt),
5880 code2,
5881 op2a,
5882 op2b);
5883 if (t)
5884 return t;
5887 /* If the definition is an AND or OR expression, we may be able to
5888 simplify by reassociating. */
5889 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5890 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5892 tree inner1 = gimple_assign_rhs1 (stmt);
5893 tree inner2 = gimple_assign_rhs2 (stmt);
5894 gimple *s;
5895 tree t;
5896 tree partial = NULL_TREE;
5897 bool is_or = (innercode == BIT_IOR_EXPR);
5899 /* Check for boolean identities that don't require recursive examination
5900 of inner1/inner2:
5901 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5902 inner1 OR (inner1 AND inner2) => inner1
5903 !inner1 OR (inner1 OR inner2) => true
5904 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5906 if (inner1 == true_test_var)
5907 return (is_or ? var : inner1);
5908 else if (inner2 == true_test_var)
5909 return (is_or ? var : inner2);
5910 else if (inner1 == false_test_var)
5911 return (is_or
5912 ? boolean_true_node
5913 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5914 else if (inner2 == false_test_var)
5915 return (is_or
5916 ? boolean_true_node
5917 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5919 /* Next, redistribute/reassociate the OR across the inner tests.
5920 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5921 if (TREE_CODE (inner1) == SSA_NAME
5922 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5923 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5924 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5925 gimple_assign_rhs1 (s),
5926 gimple_assign_rhs2 (s),
5927 code2, op2a, op2b)))
5929 /* Handle the OR case, where we are reassociating:
5930 (inner1 OR inner2) OR (op2a code2 op2b)
5931 => (t OR inner2)
5932 If the partial result t is a constant, we win. Otherwise
5933 continue on to try reassociating with the other inner test. */
5934 if (is_or)
5936 if (integer_onep (t))
5937 return boolean_true_node;
5938 else if (integer_zerop (t))
5939 return inner2;
5942 /* Handle the AND case, where we are redistributing:
5943 (inner1 AND inner2) OR (op2a code2 op2b)
5944 => (t AND (inner2 OR (op2a code op2b))) */
5945 else if (integer_zerop (t))
5946 return boolean_false_node;
5948 /* Save partial result for later. */
5949 partial = t;
5952 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5953 if (TREE_CODE (inner2) == SSA_NAME
5954 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5955 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5956 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5957 gimple_assign_rhs1 (s),
5958 gimple_assign_rhs2 (s),
5959 code2, op2a, op2b)))
5961 /* Handle the OR case, where we are reassociating:
5962 (inner1 OR inner2) OR (op2a code2 op2b)
5963 => (inner1 OR t)
5964 => (t OR partial) */
5965 if (is_or)
5967 if (integer_zerop (t))
5968 return inner1;
5969 else if (integer_onep (t))
5970 return boolean_true_node;
5971 /* If both are the same, we can apply the identity
5972 (x OR x) == x. */
5973 else if (partial && same_bool_result_p (t, partial))
5974 return t;
5977 /* Handle the AND case, where we are redistributing:
5978 (inner1 AND inner2) OR (op2a code2 op2b)
5979 => (t AND (inner1 OR (op2a code2 op2b)))
5980 => (t AND partial) */
5981 else
5983 if (integer_zerop (t))
5984 return boolean_false_node;
5985 else if (partial)
5987 /* We already got a simplification for the other
5988 operand to the redistributed AND expression. The
5989 interesting case is when at least one is true.
5990 Or, if both are the same, we can apply the identity
5991 (x AND x) == x. */
5992 if (integer_onep (partial))
5993 return t;
5994 else if (integer_onep (t))
5995 return partial;
5996 else if (same_bool_result_p (t, partial))
5997 return t;
6002 return NULL_TREE;
6005 /* Try to simplify the OR of two comparisons defined by
6006 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6007 If this can be done without constructing an intermediate value,
6008 return the resulting tree; otherwise NULL_TREE is returned.
6009 This function is deliberately asymmetric as it recurses on SSA_DEFs
6010 in the first comparison but not the second. */
6012 static tree
6013 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
6014 enum tree_code code2, tree op2a, tree op2b)
6016 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6018 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
6019 if (operand_equal_p (op1a, op2a, 0)
6020 && operand_equal_p (op1b, op2b, 0))
6022 /* Result will be either NULL_TREE, or a combined comparison. */
6023 tree t = combine_comparisons (UNKNOWN_LOCATION,
6024 TRUTH_ORIF_EXPR, code1, code2,
6025 truth_type, op1a, op1b);
6026 if (t)
6027 return t;
6030 /* Likewise the swapped case of the above. */
6031 if (operand_equal_p (op1a, op2b, 0)
6032 && operand_equal_p (op1b, op2a, 0))
6034 /* Result will be either NULL_TREE, or a combined comparison. */
6035 tree t = combine_comparisons (UNKNOWN_LOCATION,
6036 TRUTH_ORIF_EXPR, code1,
6037 swap_tree_comparison (code2),
6038 truth_type, op1a, op1b);
6039 if (t)
6040 return t;
6043 /* If both comparisons are of the same value against constants, we might
6044 be able to merge them. */
6045 if (operand_equal_p (op1a, op2a, 0)
6046 && TREE_CODE (op1b) == INTEGER_CST
6047 && TREE_CODE (op2b) == INTEGER_CST)
6049 int cmp = tree_int_cst_compare (op1b, op2b);
6051 /* If we have (op1a != op1b), we should either be able to
6052 return that or TRUE, depending on whether the constant op1b
6053 also satisfies the other comparison against op2b. */
6054 if (code1 == NE_EXPR)
6056 bool done = true;
6057 bool val;
6058 switch (code2)
6060 case EQ_EXPR: val = (cmp == 0); break;
6061 case NE_EXPR: val = (cmp != 0); break;
6062 case LT_EXPR: val = (cmp < 0); break;
6063 case GT_EXPR: val = (cmp > 0); break;
6064 case LE_EXPR: val = (cmp <= 0); break;
6065 case GE_EXPR: val = (cmp >= 0); break;
6066 default: done = false;
6068 if (done)
6070 if (val)
6071 return boolean_true_node;
6072 else
6073 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6076 /* Likewise if the second comparison is a != comparison. */
6077 else if (code2 == NE_EXPR)
6079 bool done = true;
6080 bool val;
6081 switch (code1)
6083 case EQ_EXPR: val = (cmp == 0); break;
6084 case NE_EXPR: val = (cmp != 0); break;
6085 case LT_EXPR: val = (cmp > 0); break;
6086 case GT_EXPR: val = (cmp < 0); break;
6087 case LE_EXPR: val = (cmp >= 0); break;
6088 case GE_EXPR: val = (cmp <= 0); break;
6089 default: done = false;
6091 if (done)
6093 if (val)
6094 return boolean_true_node;
6095 else
6096 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6100 /* See if an equality test is redundant with the other comparison. */
6101 else if (code1 == EQ_EXPR)
6103 bool val;
6104 switch (code2)
6106 case EQ_EXPR: val = (cmp == 0); break;
6107 case NE_EXPR: val = (cmp != 0); break;
6108 case LT_EXPR: val = (cmp < 0); break;
6109 case GT_EXPR: val = (cmp > 0); break;
6110 case LE_EXPR: val = (cmp <= 0); break;
6111 case GE_EXPR: val = (cmp >= 0); break;
6112 default:
6113 val = false;
6115 if (val)
6116 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6118 else if (code2 == EQ_EXPR)
6120 bool val;
6121 switch (code1)
6123 case EQ_EXPR: val = (cmp == 0); break;
6124 case NE_EXPR: val = (cmp != 0); break;
6125 case LT_EXPR: val = (cmp > 0); break;
6126 case GT_EXPR: val = (cmp < 0); break;
6127 case LE_EXPR: val = (cmp >= 0); break;
6128 case GE_EXPR: val = (cmp <= 0); break;
6129 default:
6130 val = false;
6132 if (val)
6133 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6136 /* Chose the less restrictive of two < or <= comparisons. */
6137 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
6138 && (code2 == LT_EXPR || code2 == LE_EXPR))
6140 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
6141 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6142 else
6143 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6146 /* Likewise chose the less restrictive of two > or >= comparisons. */
6147 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
6148 && (code2 == GT_EXPR || code2 == GE_EXPR))
6150 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
6151 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6152 else
6153 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6156 /* Check for singleton ranges. */
6157 else if (cmp == 0
6158 && ((code1 == LT_EXPR && code2 == GT_EXPR)
6159 || (code1 == GT_EXPR && code2 == LT_EXPR)))
6160 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
6162 /* Check for less/greater pairs that don't restrict the range at all. */
6163 else if (cmp >= 0
6164 && (code1 == LT_EXPR || code1 == LE_EXPR)
6165 && (code2 == GT_EXPR || code2 == GE_EXPR))
6166 return boolean_true_node;
6167 else if (cmp <= 0
6168 && (code1 == GT_EXPR || code1 == GE_EXPR)
6169 && (code2 == LT_EXPR || code2 == LE_EXPR))
6170 return boolean_true_node;
6173 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6174 NAME's definition is a truth value. See if there are any simplifications
6175 that can be done against the NAME's definition. */
6176 if (TREE_CODE (op1a) == SSA_NAME
6177 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6178 && (integer_zerop (op1b) || integer_onep (op1b)))
6180 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6181 || (code1 == NE_EXPR && integer_onep (op1b)));
6182 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6183 switch (gimple_code (stmt))
6185 case GIMPLE_ASSIGN:
6186 /* Try to simplify by copy-propagating the definition. */
6187 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
6189 case GIMPLE_PHI:
6190 /* If every argument to the PHI produces the same result when
6191 ORed with the second comparison, we win.
6192 Do not do this unless the type is bool since we need a bool
6193 result here anyway. */
6194 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6196 tree result = NULL_TREE;
6197 unsigned i;
6198 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6200 tree arg = gimple_phi_arg_def (stmt, i);
6202 /* If this PHI has itself as an argument, ignore it.
6203 If all the other args produce the same result,
6204 we're still OK. */
6205 if (arg == gimple_phi_result (stmt))
6206 continue;
6207 else if (TREE_CODE (arg) == INTEGER_CST)
6209 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6211 if (!result)
6212 result = boolean_true_node;
6213 else if (!integer_onep (result))
6214 return NULL_TREE;
6216 else if (!result)
6217 result = fold_build2 (code2, boolean_type_node,
6218 op2a, op2b);
6219 else if (!same_bool_comparison_p (result,
6220 code2, op2a, op2b))
6221 return NULL_TREE;
6223 else if (TREE_CODE (arg) == SSA_NAME
6224 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6226 tree temp;
6227 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6228 /* In simple cases we can look through PHI nodes,
6229 but we have to be careful with loops.
6230 See PR49073. */
6231 if (! dom_info_available_p (CDI_DOMINATORS)
6232 || gimple_bb (def_stmt) == gimple_bb (stmt)
6233 || dominated_by_p (CDI_DOMINATORS,
6234 gimple_bb (def_stmt),
6235 gimple_bb (stmt)))
6236 return NULL_TREE;
6237 temp = or_var_with_comparison (arg, invert, code2,
6238 op2a, op2b);
6239 if (!temp)
6240 return NULL_TREE;
6241 else if (!result)
6242 result = temp;
6243 else if (!same_bool_result_p (result, temp))
6244 return NULL_TREE;
6246 else
6247 return NULL_TREE;
6249 return result;
6252 default:
6253 break;
6256 return NULL_TREE;
6259 /* Try to simplify the OR of two comparisons, specified by
6260 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6261 If this can be simplified to a single expression (without requiring
6262 introducing more SSA variables to hold intermediate values),
6263 return the resulting tree. Otherwise return NULL_TREE.
6264 If the result expression is non-null, it has boolean type. */
6266 tree
6267 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6268 enum tree_code code2, tree op2a, tree op2b)
6270 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6271 if (t)
6272 return t;
6273 else
6274 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6278 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6280 Either NULL_TREE, a simplified but non-constant or a constant
6281 is returned.
6283 ??? This should go into a gimple-fold-inline.h file to be eventually
6284 privatized with the single valueize function used in the various TUs
6285 to avoid the indirect function call overhead. */
6287 tree
6288 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6289 tree (*gvalueize) (tree))
6291 gimple_match_op res_op;
6292 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6293 edges if there are intermediate VARYING defs. For this reason
6294 do not follow SSA edges here even though SCCVN can technically
6295 just deal fine with that. */
6296 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6298 tree res = NULL_TREE;
6299 if (gimple_simplified_result_is_gimple_val (&res_op))
6300 res = res_op.ops[0];
6301 else if (mprts_hook)
6302 res = mprts_hook (&res_op);
6303 if (res)
6305 if (dump_file && dump_flags & TDF_DETAILS)
6307 fprintf (dump_file, "Match-and-simplified ");
6308 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6309 fprintf (dump_file, " to ");
6310 print_generic_expr (dump_file, res);
6311 fprintf (dump_file, "\n");
6313 return res;
6317 location_t loc = gimple_location (stmt);
6318 switch (gimple_code (stmt))
6320 case GIMPLE_ASSIGN:
6322 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6324 switch (get_gimple_rhs_class (subcode))
6326 case GIMPLE_SINGLE_RHS:
6328 tree rhs = gimple_assign_rhs1 (stmt);
6329 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6331 if (TREE_CODE (rhs) == SSA_NAME)
6333 /* If the RHS is an SSA_NAME, return its known constant value,
6334 if any. */
6335 return (*valueize) (rhs);
6337 /* Handle propagating invariant addresses into address
6338 operations. */
6339 else if (TREE_CODE (rhs) == ADDR_EXPR
6340 && !is_gimple_min_invariant (rhs))
6342 poly_int64 offset = 0;
6343 tree base;
6344 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6345 &offset,
6346 valueize);
6347 if (base
6348 && (CONSTANT_CLASS_P (base)
6349 || decl_address_invariant_p (base)))
6350 return build_invariant_address (TREE_TYPE (rhs),
6351 base, offset);
6353 else if (TREE_CODE (rhs) == CONSTRUCTOR
6354 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6355 && known_eq (CONSTRUCTOR_NELTS (rhs),
6356 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6358 unsigned i, nelts;
6359 tree val;
6361 nelts = CONSTRUCTOR_NELTS (rhs);
6362 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6363 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6365 val = (*valueize) (val);
6366 if (TREE_CODE (val) == INTEGER_CST
6367 || TREE_CODE (val) == REAL_CST
6368 || TREE_CODE (val) == FIXED_CST)
6369 vec.quick_push (val);
6370 else
6371 return NULL_TREE;
6374 return vec.build ();
6376 if (subcode == OBJ_TYPE_REF)
6378 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6379 /* If callee is constant, we can fold away the wrapper. */
6380 if (is_gimple_min_invariant (val))
6381 return val;
6384 if (kind == tcc_reference)
6386 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6387 || TREE_CODE (rhs) == REALPART_EXPR
6388 || TREE_CODE (rhs) == IMAGPART_EXPR)
6389 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6391 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6392 return fold_unary_loc (EXPR_LOCATION (rhs),
6393 TREE_CODE (rhs),
6394 TREE_TYPE (rhs), val);
6396 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6397 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6399 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6400 return fold_ternary_loc (EXPR_LOCATION (rhs),
6401 TREE_CODE (rhs),
6402 TREE_TYPE (rhs), val,
6403 TREE_OPERAND (rhs, 1),
6404 TREE_OPERAND (rhs, 2));
6406 else if (TREE_CODE (rhs) == MEM_REF
6407 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6409 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6410 if (TREE_CODE (val) == ADDR_EXPR
6411 && is_gimple_min_invariant (val))
6413 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6414 unshare_expr (val),
6415 TREE_OPERAND (rhs, 1));
6416 if (tem)
6417 rhs = tem;
6420 return fold_const_aggregate_ref_1 (rhs, valueize);
6422 else if (kind == tcc_declaration)
6423 return get_symbol_constant_value (rhs);
6424 return rhs;
6427 case GIMPLE_UNARY_RHS:
6428 return NULL_TREE;
6430 case GIMPLE_BINARY_RHS:
6431 /* Translate &x + CST into an invariant form suitable for
6432 further propagation. */
6433 if (subcode == POINTER_PLUS_EXPR)
6435 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6436 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6437 if (TREE_CODE (op0) == ADDR_EXPR
6438 && TREE_CODE (op1) == INTEGER_CST)
6440 tree off = fold_convert (ptr_type_node, op1);
6441 return build_fold_addr_expr_loc
6442 (loc,
6443 fold_build2 (MEM_REF,
6444 TREE_TYPE (TREE_TYPE (op0)),
6445 unshare_expr (op0), off));
6448 /* Canonicalize bool != 0 and bool == 0 appearing after
6449 valueization. While gimple_simplify handles this
6450 it can get confused by the ~X == 1 -> X == 0 transform
6451 which we cant reduce to a SSA name or a constant
6452 (and we have no way to tell gimple_simplify to not
6453 consider those transforms in the first place). */
6454 else if (subcode == EQ_EXPR
6455 || subcode == NE_EXPR)
6457 tree lhs = gimple_assign_lhs (stmt);
6458 tree op0 = gimple_assign_rhs1 (stmt);
6459 if (useless_type_conversion_p (TREE_TYPE (lhs),
6460 TREE_TYPE (op0)))
6462 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6463 op0 = (*valueize) (op0);
6464 if (TREE_CODE (op0) == INTEGER_CST)
6465 std::swap (op0, op1);
6466 if (TREE_CODE (op1) == INTEGER_CST
6467 && ((subcode == NE_EXPR && integer_zerop (op1))
6468 || (subcode == EQ_EXPR && integer_onep (op1))))
6469 return op0;
6472 return NULL_TREE;
6474 case GIMPLE_TERNARY_RHS:
6476 /* Handle ternary operators that can appear in GIMPLE form. */
6477 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6478 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6479 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6480 return fold_ternary_loc (loc, subcode,
6481 gimple_expr_type (stmt), op0, op1, op2);
6484 default:
6485 gcc_unreachable ();
6489 case GIMPLE_CALL:
6491 tree fn;
6492 gcall *call_stmt = as_a <gcall *> (stmt);
6494 if (gimple_call_internal_p (stmt))
6496 enum tree_code subcode = ERROR_MARK;
6497 switch (gimple_call_internal_fn (stmt))
6499 case IFN_UBSAN_CHECK_ADD:
6500 subcode = PLUS_EXPR;
6501 break;
6502 case IFN_UBSAN_CHECK_SUB:
6503 subcode = MINUS_EXPR;
6504 break;
6505 case IFN_UBSAN_CHECK_MUL:
6506 subcode = MULT_EXPR;
6507 break;
6508 case IFN_BUILTIN_EXPECT:
6510 tree arg0 = gimple_call_arg (stmt, 0);
6511 tree op0 = (*valueize) (arg0);
6512 if (TREE_CODE (op0) == INTEGER_CST)
6513 return op0;
6514 return NULL_TREE;
6516 default:
6517 return NULL_TREE;
6519 tree arg0 = gimple_call_arg (stmt, 0);
6520 tree arg1 = gimple_call_arg (stmt, 1);
6521 tree op0 = (*valueize) (arg0);
6522 tree op1 = (*valueize) (arg1);
6524 if (TREE_CODE (op0) != INTEGER_CST
6525 || TREE_CODE (op1) != INTEGER_CST)
6527 switch (subcode)
6529 case MULT_EXPR:
6530 /* x * 0 = 0 * x = 0 without overflow. */
6531 if (integer_zerop (op0) || integer_zerop (op1))
6532 return build_zero_cst (TREE_TYPE (arg0));
6533 break;
6534 case MINUS_EXPR:
6535 /* y - y = 0 without overflow. */
6536 if (operand_equal_p (op0, op1, 0))
6537 return build_zero_cst (TREE_TYPE (arg0));
6538 break;
6539 default:
6540 break;
6543 tree res
6544 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6545 if (res
6546 && TREE_CODE (res) == INTEGER_CST
6547 && !TREE_OVERFLOW (res))
6548 return res;
6549 return NULL_TREE;
6552 fn = (*valueize) (gimple_call_fn (stmt));
6553 if (TREE_CODE (fn) == ADDR_EXPR
6554 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
6555 && gimple_builtin_call_types_compatible_p (stmt,
6556 TREE_OPERAND (fn, 0)))
6558 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6559 tree retval;
6560 unsigned i;
6561 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6562 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6563 retval = fold_builtin_call_array (loc,
6564 gimple_call_return_type (call_stmt),
6565 fn, gimple_call_num_args (stmt), args);
6566 if (retval)
6568 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6569 STRIP_NOPS (retval);
6570 retval = fold_convert (gimple_call_return_type (call_stmt),
6571 retval);
6573 return retval;
6575 return NULL_TREE;
6578 default:
6579 return NULL_TREE;
6583 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6584 Returns NULL_TREE if folding to a constant is not possible, otherwise
6585 returns a constant according to is_gimple_min_invariant. */
6587 tree
6588 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6590 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6591 if (res && is_gimple_min_invariant (res))
6592 return res;
6593 return NULL_TREE;
6597 /* The following set of functions are supposed to fold references using
6598 their constant initializers. */
6600 /* See if we can find constructor defining value of BASE.
6601 When we know the consructor with constant offset (such as
6602 base is array[40] and we do know constructor of array), then
6603 BIT_OFFSET is adjusted accordingly.
6605 As a special case, return error_mark_node when constructor
6606 is not explicitly available, but it is known to be zero
6607 such as 'static const int a;'. */
6608 static tree
6609 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6610 tree (*valueize)(tree))
6612 poly_int64 bit_offset2, size, max_size;
6613 bool reverse;
6615 if (TREE_CODE (base) == MEM_REF)
6617 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6618 if (!boff.to_shwi (bit_offset))
6619 return NULL_TREE;
6621 if (valueize
6622 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6623 base = valueize (TREE_OPERAND (base, 0));
6624 if (!base || TREE_CODE (base) != ADDR_EXPR)
6625 return NULL_TREE;
6626 base = TREE_OPERAND (base, 0);
6628 else if (valueize
6629 && TREE_CODE (base) == SSA_NAME)
6630 base = valueize (base);
6632 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6633 DECL_INITIAL. If BASE is a nested reference into another
6634 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6635 the inner reference. */
6636 switch (TREE_CODE (base))
6638 case VAR_DECL:
6639 case CONST_DECL:
6641 tree init = ctor_for_folding (base);
6643 /* Our semantic is exact opposite of ctor_for_folding;
6644 NULL means unknown, while error_mark_node is 0. */
6645 if (init == error_mark_node)
6646 return NULL_TREE;
6647 if (!init)
6648 return error_mark_node;
6649 return init;
6652 case VIEW_CONVERT_EXPR:
6653 return get_base_constructor (TREE_OPERAND (base, 0),
6654 bit_offset, valueize);
6656 case ARRAY_REF:
6657 case COMPONENT_REF:
6658 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6659 &reverse);
6660 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6661 return NULL_TREE;
6662 *bit_offset += bit_offset2;
6663 return get_base_constructor (base, bit_offset, valueize);
6665 case CONSTRUCTOR:
6666 return base;
6668 default:
6669 if (CONSTANT_CLASS_P (base))
6670 return base;
6672 return NULL_TREE;
6676 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6677 to the memory at bit OFFSET. When non-null, TYPE is the expected
6678 type of the reference; otherwise the type of the referenced element
6679 is used instead. When SIZE is zero, attempt to fold a reference to
6680 the entire element which OFFSET refers to. Increment *SUBOFF by
6681 the bit offset of the accessed element. */
6683 static tree
6684 fold_array_ctor_reference (tree type, tree ctor,
6685 unsigned HOST_WIDE_INT offset,
6686 unsigned HOST_WIDE_INT size,
6687 tree from_decl,
6688 unsigned HOST_WIDE_INT *suboff)
6690 offset_int low_bound;
6691 offset_int elt_size;
6692 offset_int access_index;
6693 tree domain_type = NULL_TREE;
6694 HOST_WIDE_INT inner_offset;
6696 /* Compute low bound and elt size. */
6697 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6698 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6699 if (domain_type && TYPE_MIN_VALUE (domain_type))
6701 /* Static constructors for variably sized objects makes no sense. */
6702 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6703 return NULL_TREE;
6704 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6706 else
6707 low_bound = 0;
6708 /* Static constructors for variably sized objects makes no sense. */
6709 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6710 return NULL_TREE;
6711 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6713 /* When TYPE is non-null, verify that it specifies a constant-sized
6714 accessed not larger than size of array element. */
6715 if (type
6716 && (!TYPE_SIZE_UNIT (type)
6717 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6718 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6719 || elt_size == 0))
6720 return NULL_TREE;
6722 /* Compute the array index we look for. */
6723 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6724 elt_size);
6725 access_index += low_bound;
6727 /* And offset within the access. */
6728 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6730 /* See if the array field is large enough to span whole access. We do not
6731 care to fold accesses spanning multiple array indexes. */
6732 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6733 return NULL_TREE;
6734 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6736 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6738 /* For the final reference to the entire accessed element
6739 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6740 may be null) in favor of the type of the element, and set
6741 SIZE to the size of the accessed element. */
6742 inner_offset = 0;
6743 type = TREE_TYPE (val);
6744 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6747 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6748 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6749 suboff);
6752 /* Memory not explicitly mentioned in constructor is 0 (or
6753 the reference is out of range). */
6754 return type ? build_zero_cst (type) : NULL_TREE;
6757 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6758 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6759 is the expected type of the reference; otherwise the type of
6760 the referenced member is used instead. When SIZE is zero,
6761 attempt to fold a reference to the entire member which OFFSET
6762 refers to; in this case. Increment *SUBOFF by the bit offset
6763 of the accessed member. */
6765 static tree
6766 fold_nonarray_ctor_reference (tree type, tree ctor,
6767 unsigned HOST_WIDE_INT offset,
6768 unsigned HOST_WIDE_INT size,
6769 tree from_decl,
6770 unsigned HOST_WIDE_INT *suboff)
6772 unsigned HOST_WIDE_INT cnt;
6773 tree cfield, cval;
6775 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6776 cval)
6778 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6779 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6780 tree field_size = DECL_SIZE (cfield);
6782 if (!field_size)
6784 /* Determine the size of the flexible array member from
6785 the size of the initializer provided for it. */
6786 field_size = TYPE_SIZE (TREE_TYPE (cval));
6789 /* Variable sized objects in static constructors makes no sense,
6790 but field_size can be NULL for flexible array members. */
6791 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6792 && TREE_CODE (byte_offset) == INTEGER_CST
6793 && (field_size != NULL_TREE
6794 ? TREE_CODE (field_size) == INTEGER_CST
6795 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6797 /* Compute bit offset of the field. */
6798 offset_int bitoffset
6799 = (wi::to_offset (field_offset)
6800 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6801 /* Compute bit offset where the field ends. */
6802 offset_int bitoffset_end;
6803 if (field_size != NULL_TREE)
6804 bitoffset_end = bitoffset + wi::to_offset (field_size);
6805 else
6806 bitoffset_end = 0;
6808 /* Compute the bit offset of the end of the desired access.
6809 As a special case, if the size of the desired access is
6810 zero, assume the access is to the entire field (and let
6811 the caller make any necessary adjustments by storing
6812 the actual bounds of the field in FIELDBOUNDS). */
6813 offset_int access_end = offset_int (offset);
6814 if (size)
6815 access_end += size;
6816 else
6817 access_end = bitoffset_end;
6819 /* Is there any overlap between the desired access at
6820 [OFFSET, OFFSET+SIZE) and the offset of the field within
6821 the object at [BITOFFSET, BITOFFSET_END)? */
6822 if (wi::cmps (access_end, bitoffset) > 0
6823 && (field_size == NULL_TREE
6824 || wi::lts_p (offset, bitoffset_end)))
6826 *suboff += bitoffset.to_uhwi ();
6828 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6830 /* For the final reference to the entire accessed member
6831 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6832 be null) in favor of the type of the member, and set
6833 SIZE to the size of the accessed member. */
6834 offset = bitoffset.to_uhwi ();
6835 type = TREE_TYPE (cval);
6836 size = (bitoffset_end - bitoffset).to_uhwi ();
6839 /* We do have overlap. Now see if the field is large enough
6840 to cover the access. Give up for accesses that extend
6841 beyond the end of the object or that span multiple fields. */
6842 if (wi::cmps (access_end, bitoffset_end) > 0)
6843 return NULL_TREE;
6844 if (offset < bitoffset)
6845 return NULL_TREE;
6847 offset_int inner_offset = offset_int (offset) - bitoffset;
6848 return fold_ctor_reference (type, cval,
6849 inner_offset.to_uhwi (), size,
6850 from_decl, suboff);
6853 /* Memory not explicitly mentioned in constructor is 0. */
6854 return type ? build_zero_cst (type) : NULL_TREE;
6857 /* CTOR is value initializing memory. Fold a reference of TYPE and
6858 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
6859 is zero, attempt to fold a reference to the entire subobject
6860 which OFFSET refers to. This is used when folding accesses to
6861 string members of aggregates. When non-null, set *SUBOFF to
6862 the bit offset of the accessed subobject. */
6864 tree
6865 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6866 const poly_uint64 &poly_size, tree from_decl,
6867 unsigned HOST_WIDE_INT *suboff /* = NULL */)
6869 tree ret;
6871 /* We found the field with exact match. */
6872 if (type
6873 && useless_type_conversion_p (type, TREE_TYPE (ctor))
6874 && known_eq (poly_offset, 0U))
6875 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6877 /* The remaining optimizations need a constant size and offset. */
6878 unsigned HOST_WIDE_INT size, offset;
6879 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6880 return NULL_TREE;
6882 /* We are at the end of walk, see if we can view convert the
6883 result. */
6884 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6885 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6886 && !compare_tree_int (TYPE_SIZE (type), size)
6887 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6889 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6890 if (ret)
6892 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6893 if (ret)
6894 STRIP_USELESS_TYPE_CONVERSION (ret);
6896 return ret;
6898 /* For constants and byte-aligned/sized reads try to go through
6899 native_encode/interpret. */
6900 if (CONSTANT_CLASS_P (ctor)
6901 && BITS_PER_UNIT == 8
6902 && offset % BITS_PER_UNIT == 0
6903 && size % BITS_PER_UNIT == 0
6904 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6906 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6907 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6908 offset / BITS_PER_UNIT);
6909 if (len > 0)
6910 return native_interpret_expr (type, buf, len);
6912 if (TREE_CODE (ctor) == CONSTRUCTOR)
6914 unsigned HOST_WIDE_INT dummy = 0;
6915 if (!suboff)
6916 suboff = &dummy;
6918 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6919 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6920 return fold_array_ctor_reference (type, ctor, offset, size,
6921 from_decl, suboff);
6923 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6924 from_decl, suboff);
6927 return NULL_TREE;
6930 /* Return the tree representing the element referenced by T if T is an
6931 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6932 names using VALUEIZE. Return NULL_TREE otherwise. */
6934 tree
6935 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6937 tree ctor, idx, base;
6938 poly_int64 offset, size, max_size;
6939 tree tem;
6940 bool reverse;
6942 if (TREE_THIS_VOLATILE (t))
6943 return NULL_TREE;
6945 if (DECL_P (t))
6946 return get_symbol_constant_value (t);
6948 tem = fold_read_from_constant_string (t);
6949 if (tem)
6950 return tem;
6952 switch (TREE_CODE (t))
6954 case ARRAY_REF:
6955 case ARRAY_RANGE_REF:
6956 /* Constant indexes are handled well by get_base_constructor.
6957 Only special case variable offsets.
6958 FIXME: This code can't handle nested references with variable indexes
6959 (they will be handled only by iteration of ccp). Perhaps we can bring
6960 get_ref_base_and_extent here and make it use a valueize callback. */
6961 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6962 && valueize
6963 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6964 && poly_int_tree_p (idx))
6966 tree low_bound, unit_size;
6968 /* If the resulting bit-offset is constant, track it. */
6969 if ((low_bound = array_ref_low_bound (t),
6970 poly_int_tree_p (low_bound))
6971 && (unit_size = array_ref_element_size (t),
6972 tree_fits_uhwi_p (unit_size)))
6974 poly_offset_int woffset
6975 = wi::sext (wi::to_poly_offset (idx)
6976 - wi::to_poly_offset (low_bound),
6977 TYPE_PRECISION (TREE_TYPE (idx)));
6979 if (woffset.to_shwi (&offset))
6981 /* TODO: This code seems wrong, multiply then check
6982 to see if it fits. */
6983 offset *= tree_to_uhwi (unit_size);
6984 offset *= BITS_PER_UNIT;
6986 base = TREE_OPERAND (t, 0);
6987 ctor = get_base_constructor (base, &offset, valueize);
6988 /* Empty constructor. Always fold to 0. */
6989 if (ctor == error_mark_node)
6990 return build_zero_cst (TREE_TYPE (t));
6991 /* Out of bound array access. Value is undefined,
6992 but don't fold. */
6993 if (maybe_lt (offset, 0))
6994 return NULL_TREE;
6995 /* We can not determine ctor. */
6996 if (!ctor)
6997 return NULL_TREE;
6998 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6999 tree_to_uhwi (unit_size)
7000 * BITS_PER_UNIT,
7001 base);
7005 /* Fallthru. */
7007 case COMPONENT_REF:
7008 case BIT_FIELD_REF:
7009 case TARGET_MEM_REF:
7010 case MEM_REF:
7011 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
7012 ctor = get_base_constructor (base, &offset, valueize);
7014 /* Empty constructor. Always fold to 0. */
7015 if (ctor == error_mark_node)
7016 return build_zero_cst (TREE_TYPE (t));
7017 /* We do not know precise address. */
7018 if (!known_size_p (max_size) || maybe_ne (max_size, size))
7019 return NULL_TREE;
7020 /* We can not determine ctor. */
7021 if (!ctor)
7022 return NULL_TREE;
7024 /* Out of bound array access. Value is undefined, but don't fold. */
7025 if (maybe_lt (offset, 0))
7026 return NULL_TREE;
7028 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
7029 base);
7031 case REALPART_EXPR:
7032 case IMAGPART_EXPR:
7034 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
7035 if (c && TREE_CODE (c) == COMPLEX_CST)
7036 return fold_build1_loc (EXPR_LOCATION (t),
7037 TREE_CODE (t), TREE_TYPE (t), c);
7038 break;
7041 default:
7042 break;
7045 return NULL_TREE;
7048 tree
7049 fold_const_aggregate_ref (tree t)
7051 return fold_const_aggregate_ref_1 (t, NULL);
7054 /* Lookup virtual method with index TOKEN in a virtual table V
7055 at OFFSET.
7056 Set CAN_REFER if non-NULL to false if method
7057 is not referable or if the virtual table is ill-formed (such as rewriten
7058 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7060 tree
7061 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
7062 tree v,
7063 unsigned HOST_WIDE_INT offset,
7064 bool *can_refer)
7066 tree vtable = v, init, fn;
7067 unsigned HOST_WIDE_INT size;
7068 unsigned HOST_WIDE_INT elt_size, access_index;
7069 tree domain_type;
7071 if (can_refer)
7072 *can_refer = true;
7074 /* First of all double check we have virtual table. */
7075 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
7077 /* Pass down that we lost track of the target. */
7078 if (can_refer)
7079 *can_refer = false;
7080 return NULL_TREE;
7083 init = ctor_for_folding (v);
7085 /* The virtual tables should always be born with constructors
7086 and we always should assume that they are avaialble for
7087 folding. At the moment we do not stream them in all cases,
7088 but it should never happen that ctor seem unreachable. */
7089 gcc_assert (init);
7090 if (init == error_mark_node)
7092 /* Pass down that we lost track of the target. */
7093 if (can_refer)
7094 *can_refer = false;
7095 return NULL_TREE;
7097 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
7098 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
7099 offset *= BITS_PER_UNIT;
7100 offset += token * size;
7102 /* Lookup the value in the constructor that is assumed to be array.
7103 This is equivalent to
7104 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7105 offset, size, NULL);
7106 but in a constant time. We expect that frontend produced a simple
7107 array without indexed initializers. */
7109 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7110 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7111 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7112 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7114 access_index = offset / BITS_PER_UNIT / elt_size;
7115 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7117 /* The C++ FE can now produce indexed fields, and we check if the indexes
7118 match. */
7119 if (access_index < CONSTRUCTOR_NELTS (init))
7121 fn = CONSTRUCTOR_ELT (init, access_index)->value;
7122 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7123 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
7124 STRIP_NOPS (fn);
7126 else
7127 fn = NULL;
7129 /* For type inconsistent program we may end up looking up virtual method
7130 in virtual table that does not contain TOKEN entries. We may overrun
7131 the virtual table and pick up a constant or RTTI info pointer.
7132 In any case the call is undefined. */
7133 if (!fn
7134 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7135 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7136 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7137 else
7139 fn = TREE_OPERAND (fn, 0);
7141 /* When cgraph node is missing and function is not public, we cannot
7142 devirtualize. This can happen in WHOPR when the actual method
7143 ends up in other partition, because we found devirtualization
7144 possibility too late. */
7145 if (!can_refer_decl_in_current_unit_p (fn, vtable))
7147 if (can_refer)
7149 *can_refer = false;
7150 return fn;
7152 return NULL_TREE;
7156 /* Make sure we create a cgraph node for functions we'll reference.
7157 They can be non-existent if the reference comes from an entry
7158 of an external vtable for example. */
7159 cgraph_node::get_create (fn);
7161 return fn;
7164 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7165 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7166 KNOWN_BINFO carries the binfo describing the true type of
7167 OBJ_TYPE_REF_OBJECT(REF).
7168 Set CAN_REFER if non-NULL to false if method
7169 is not referable or if the virtual table is ill-formed (such as rewriten
7170 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7172 tree
7173 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7174 bool *can_refer)
7176 unsigned HOST_WIDE_INT offset;
7177 tree v;
7179 v = BINFO_VTABLE (known_binfo);
7180 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7181 if (!v)
7182 return NULL_TREE;
7184 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
7186 if (can_refer)
7187 *can_refer = false;
7188 return NULL_TREE;
7190 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
7193 /* Given a pointer value T, return a simplified version of an
7194 indirection through T, or NULL_TREE if no simplification is
7195 possible. Note that the resulting type may be different from
7196 the type pointed to in the sense that it is still compatible
7197 from the langhooks point of view. */
7199 tree
7200 gimple_fold_indirect_ref (tree t)
7202 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7203 tree sub = t;
7204 tree subtype;
7206 STRIP_NOPS (sub);
7207 subtype = TREE_TYPE (sub);
7208 if (!POINTER_TYPE_P (subtype)
7209 || TYPE_REF_CAN_ALIAS_ALL (ptype))
7210 return NULL_TREE;
7212 if (TREE_CODE (sub) == ADDR_EXPR)
7214 tree op = TREE_OPERAND (sub, 0);
7215 tree optype = TREE_TYPE (op);
7216 /* *&p => p */
7217 if (useless_type_conversion_p (type, optype))
7218 return op;
7220 /* *(foo *)&fooarray => fooarray[0] */
7221 if (TREE_CODE (optype) == ARRAY_TYPE
7222 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7223 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7225 tree type_domain = TYPE_DOMAIN (optype);
7226 tree min_val = size_zero_node;
7227 if (type_domain && TYPE_MIN_VALUE (type_domain))
7228 min_val = TYPE_MIN_VALUE (type_domain);
7229 if (TREE_CODE (min_val) == INTEGER_CST)
7230 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7232 /* *(foo *)&complexfoo => __real__ complexfoo */
7233 else if (TREE_CODE (optype) == COMPLEX_TYPE
7234 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7235 return fold_build1 (REALPART_EXPR, type, op);
7236 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7237 else if (TREE_CODE (optype) == VECTOR_TYPE
7238 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7240 tree part_width = TYPE_SIZE (type);
7241 tree index = bitsize_int (0);
7242 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7246 /* *(p + CST) -> ... */
7247 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7248 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7250 tree addr = TREE_OPERAND (sub, 0);
7251 tree off = TREE_OPERAND (sub, 1);
7252 tree addrtype;
7254 STRIP_NOPS (addr);
7255 addrtype = TREE_TYPE (addr);
7257 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7258 if (TREE_CODE (addr) == ADDR_EXPR
7259 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7260 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7261 && tree_fits_uhwi_p (off))
7263 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7264 tree part_width = TYPE_SIZE (type);
7265 unsigned HOST_WIDE_INT part_widthi
7266 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7267 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7268 tree index = bitsize_int (indexi);
7269 if (known_lt (offset / part_widthi,
7270 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7271 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7272 part_width, index);
7275 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7276 if (TREE_CODE (addr) == ADDR_EXPR
7277 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7278 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7280 tree size = TYPE_SIZE_UNIT (type);
7281 if (tree_int_cst_equal (size, off))
7282 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7285 /* *(p + CST) -> MEM_REF <p, CST>. */
7286 if (TREE_CODE (addr) != ADDR_EXPR
7287 || DECL_P (TREE_OPERAND (addr, 0)))
7288 return fold_build2 (MEM_REF, type,
7289 addr,
7290 wide_int_to_tree (ptype, wi::to_wide (off)));
7293 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7294 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7295 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7296 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7298 tree type_domain;
7299 tree min_val = size_zero_node;
7300 tree osub = sub;
7301 sub = gimple_fold_indirect_ref (sub);
7302 if (! sub)
7303 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7304 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7305 if (type_domain && TYPE_MIN_VALUE (type_domain))
7306 min_val = TYPE_MIN_VALUE (type_domain);
7307 if (TREE_CODE (min_val) == INTEGER_CST)
7308 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7311 return NULL_TREE;
7314 /* Return true if CODE is an operation that when operating on signed
7315 integer types involves undefined behavior on overflow and the
7316 operation can be expressed with unsigned arithmetic. */
7318 bool
7319 arith_code_with_undefined_signed_overflow (tree_code code)
7321 switch (code)
7323 case PLUS_EXPR:
7324 case MINUS_EXPR:
7325 case MULT_EXPR:
7326 case NEGATE_EXPR:
7327 case POINTER_PLUS_EXPR:
7328 return true;
7329 default:
7330 return false;
7334 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7335 operation that can be transformed to unsigned arithmetic by converting
7336 its operand, carrying out the operation in the corresponding unsigned
7337 type and converting the result back to the original type.
7339 Returns a sequence of statements that replace STMT and also contain
7340 a modified form of STMT itself. */
7342 gimple_seq
7343 rewrite_to_defined_overflow (gimple *stmt)
7345 if (dump_file && (dump_flags & TDF_DETAILS))
7347 fprintf (dump_file, "rewriting stmt with undefined signed "
7348 "overflow ");
7349 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7352 tree lhs = gimple_assign_lhs (stmt);
7353 tree type = unsigned_type_for (TREE_TYPE (lhs));
7354 gimple_seq stmts = NULL;
7355 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7357 tree op = gimple_op (stmt, i);
7358 op = gimple_convert (&stmts, type, op);
7359 gimple_set_op (stmt, i, op);
7361 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7362 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7363 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7364 gimple_seq_add_stmt (&stmts, stmt);
7365 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7366 gimple_seq_add_stmt (&stmts, cvt);
7368 return stmts;
7372 /* The valueization hook we use for the gimple_build API simplification.
7373 This makes us match fold_buildN behavior by only combining with
7374 statements in the sequence(s) we are currently building. */
7376 static tree
7377 gimple_build_valueize (tree op)
7379 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7380 return op;
7381 return NULL_TREE;
7384 /* Build the expression CODE OP0 of type TYPE with location LOC,
7385 simplifying it first if possible. Returns the built
7386 expression value and appends statements possibly defining it
7387 to SEQ. */
7389 tree
7390 gimple_build (gimple_seq *seq, location_t loc,
7391 enum tree_code code, tree type, tree op0)
7393 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7394 if (!res)
7396 res = create_tmp_reg_or_ssa_name (type);
7397 gimple *stmt;
7398 if (code == REALPART_EXPR
7399 || code == IMAGPART_EXPR
7400 || code == VIEW_CONVERT_EXPR)
7401 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7402 else
7403 stmt = gimple_build_assign (res, code, op0);
7404 gimple_set_location (stmt, loc);
7405 gimple_seq_add_stmt_without_update (seq, stmt);
7407 return res;
7410 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7411 simplifying it first if possible. Returns the built
7412 expression value and appends statements possibly defining it
7413 to SEQ. */
7415 tree
7416 gimple_build (gimple_seq *seq, location_t loc,
7417 enum tree_code code, tree type, tree op0, tree op1)
7419 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7420 if (!res)
7422 res = create_tmp_reg_or_ssa_name (type);
7423 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7424 gimple_set_location (stmt, loc);
7425 gimple_seq_add_stmt_without_update (seq, stmt);
7427 return res;
7430 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7431 simplifying it first if possible. Returns the built
7432 expression value and appends statements possibly defining it
7433 to SEQ. */
7435 tree
7436 gimple_build (gimple_seq *seq, location_t loc,
7437 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7439 tree res = gimple_simplify (code, type, op0, op1, op2,
7440 seq, gimple_build_valueize);
7441 if (!res)
7443 res = create_tmp_reg_or_ssa_name (type);
7444 gimple *stmt;
7445 if (code == BIT_FIELD_REF)
7446 stmt = gimple_build_assign (res, code,
7447 build3 (code, type, op0, op1, op2));
7448 else
7449 stmt = gimple_build_assign (res, code, op0, op1, op2);
7450 gimple_set_location (stmt, loc);
7451 gimple_seq_add_stmt_without_update (seq, stmt);
7453 return res;
7456 /* Build the call FN (ARG0) with a result of type TYPE
7457 (or no result if TYPE is void) with location LOC,
7458 simplifying it first if possible. Returns the built
7459 expression value (or NULL_TREE if TYPE is void) and appends
7460 statements possibly defining it to SEQ. */
7462 tree
7463 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7464 tree type, tree arg0)
7466 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7467 if (!res)
7469 gcall *stmt;
7470 if (internal_fn_p (fn))
7471 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7472 else
7474 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7475 stmt = gimple_build_call (decl, 1, arg0);
7477 if (!VOID_TYPE_P (type))
7479 res = create_tmp_reg_or_ssa_name (type);
7480 gimple_call_set_lhs (stmt, res);
7482 gimple_set_location (stmt, loc);
7483 gimple_seq_add_stmt_without_update (seq, stmt);
7485 return res;
7488 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7489 (or no result if TYPE is void) with location LOC,
7490 simplifying it first if possible. Returns the built
7491 expression value (or NULL_TREE if TYPE is void) and appends
7492 statements possibly defining it to SEQ. */
7494 tree
7495 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7496 tree type, tree arg0, tree arg1)
7498 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7499 if (!res)
7501 gcall *stmt;
7502 if (internal_fn_p (fn))
7503 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7504 else
7506 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7507 stmt = gimple_build_call (decl, 2, arg0, arg1);
7509 if (!VOID_TYPE_P (type))
7511 res = create_tmp_reg_or_ssa_name (type);
7512 gimple_call_set_lhs (stmt, res);
7514 gimple_set_location (stmt, loc);
7515 gimple_seq_add_stmt_without_update (seq, stmt);
7517 return res;
7520 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7521 (or no result if TYPE is void) with location LOC,
7522 simplifying it first if possible. Returns the built
7523 expression value (or NULL_TREE if TYPE is void) and appends
7524 statements possibly defining it to SEQ. */
7526 tree
7527 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7528 tree type, tree arg0, tree arg1, tree arg2)
7530 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7531 seq, gimple_build_valueize);
7532 if (!res)
7534 gcall *stmt;
7535 if (internal_fn_p (fn))
7536 stmt = gimple_build_call_internal (as_internal_fn (fn),
7537 3, arg0, arg1, arg2);
7538 else
7540 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7541 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7543 if (!VOID_TYPE_P (type))
7545 res = create_tmp_reg_or_ssa_name (type);
7546 gimple_call_set_lhs (stmt, res);
7548 gimple_set_location (stmt, loc);
7549 gimple_seq_add_stmt_without_update (seq, stmt);
7551 return res;
7554 /* Build the conversion (TYPE) OP with a result of type TYPE
7555 with location LOC if such conversion is neccesary in GIMPLE,
7556 simplifying it first.
7557 Returns the built expression value and appends
7558 statements possibly defining it to SEQ. */
7560 tree
7561 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7563 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7564 return op;
7565 return gimple_build (seq, loc, NOP_EXPR, type, op);
7568 /* Build the conversion (ptrofftype) OP with a result of a type
7569 compatible with ptrofftype with location LOC if such conversion
7570 is neccesary in GIMPLE, simplifying it first.
7571 Returns the built expression value and appends
7572 statements possibly defining it to SEQ. */
7574 tree
7575 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7577 if (ptrofftype_p (TREE_TYPE (op)))
7578 return op;
7579 return gimple_convert (seq, loc, sizetype, op);
7582 /* Build a vector of type TYPE in which each element has the value OP.
7583 Return a gimple value for the result, appending any new statements
7584 to SEQ. */
7586 tree
7587 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7588 tree op)
7590 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7591 && !CONSTANT_CLASS_P (op))
7592 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7594 tree res, vec = build_vector_from_val (type, op);
7595 if (is_gimple_val (vec))
7596 return vec;
7597 if (gimple_in_ssa_p (cfun))
7598 res = make_ssa_name (type);
7599 else
7600 res = create_tmp_reg (type);
7601 gimple *stmt = gimple_build_assign (res, vec);
7602 gimple_set_location (stmt, loc);
7603 gimple_seq_add_stmt_without_update (seq, stmt);
7604 return res;
7607 /* Build a vector from BUILDER, handling the case in which some elements
7608 are non-constant. Return a gimple value for the result, appending any
7609 new instructions to SEQ.
7611 BUILDER must not have a stepped encoding on entry. This is because
7612 the function is not geared up to handle the arithmetic that would
7613 be needed in the variable case, and any code building a vector that
7614 is known to be constant should use BUILDER->build () directly. */
7616 tree
7617 gimple_build_vector (gimple_seq *seq, location_t loc,
7618 tree_vector_builder *builder)
7620 gcc_assert (builder->nelts_per_pattern () <= 2);
7621 unsigned int encoded_nelts = builder->encoded_nelts ();
7622 for (unsigned int i = 0; i < encoded_nelts; ++i)
7623 if (!TREE_CONSTANT ((*builder)[i]))
7625 tree type = builder->type ();
7626 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7627 vec<constructor_elt, va_gc> *v;
7628 vec_alloc (v, nelts);
7629 for (i = 0; i < nelts; ++i)
7630 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7632 tree res;
7633 if (gimple_in_ssa_p (cfun))
7634 res = make_ssa_name (type);
7635 else
7636 res = create_tmp_reg (type);
7637 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7638 gimple_set_location (stmt, loc);
7639 gimple_seq_add_stmt_without_update (seq, stmt);
7640 return res;
7642 return builder->build ();
7645 /* Return true if the result of assignment STMT is known to be non-negative.
7646 If the return value is based on the assumption that signed overflow is
7647 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7648 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7650 static bool
7651 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7652 int depth)
7654 enum tree_code code = gimple_assign_rhs_code (stmt);
7655 switch (get_gimple_rhs_class (code))
7657 case GIMPLE_UNARY_RHS:
7658 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7659 gimple_expr_type (stmt),
7660 gimple_assign_rhs1 (stmt),
7661 strict_overflow_p, depth);
7662 case GIMPLE_BINARY_RHS:
7663 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7664 gimple_expr_type (stmt),
7665 gimple_assign_rhs1 (stmt),
7666 gimple_assign_rhs2 (stmt),
7667 strict_overflow_p, depth);
7668 case GIMPLE_TERNARY_RHS:
7669 return false;
7670 case GIMPLE_SINGLE_RHS:
7671 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7672 strict_overflow_p, depth);
7673 case GIMPLE_INVALID_RHS:
7674 break;
7676 gcc_unreachable ();
7679 /* Return true if return value of call STMT is known to be non-negative.
7680 If the return value is based on the assumption that signed overflow is
7681 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7682 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7684 static bool
7685 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7686 int depth)
7688 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7689 gimple_call_arg (stmt, 0) : NULL_TREE;
7690 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7691 gimple_call_arg (stmt, 1) : NULL_TREE;
7693 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7694 gimple_call_combined_fn (stmt),
7695 arg0,
7696 arg1,
7697 strict_overflow_p, depth);
7700 /* Return true if return value of call STMT is known to be non-negative.
7701 If the return value is based on the assumption that signed overflow is
7702 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7703 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7705 static bool
7706 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7707 int depth)
7709 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7711 tree arg = gimple_phi_arg_def (stmt, i);
7712 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7713 return false;
7715 return true;
7718 /* Return true if STMT is known to compute a non-negative value.
7719 If the return value is based on the assumption that signed overflow is
7720 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7721 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7723 bool
7724 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7725 int depth)
7727 switch (gimple_code (stmt))
7729 case GIMPLE_ASSIGN:
7730 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7731 depth);
7732 case GIMPLE_CALL:
7733 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7734 depth);
7735 case GIMPLE_PHI:
7736 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7737 depth);
7738 default:
7739 return false;
7743 /* Return true if the floating-point value computed by assignment STMT
7744 is known to have an integer value. We also allow +Inf, -Inf and NaN
7745 to be considered integer values. Return false for signaling NaN.
7747 DEPTH is the current nesting depth of the query. */
7749 static bool
7750 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7752 enum tree_code code = gimple_assign_rhs_code (stmt);
7753 switch (get_gimple_rhs_class (code))
7755 case GIMPLE_UNARY_RHS:
7756 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7757 gimple_assign_rhs1 (stmt), depth);
7758 case GIMPLE_BINARY_RHS:
7759 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7760 gimple_assign_rhs1 (stmt),
7761 gimple_assign_rhs2 (stmt), depth);
7762 case GIMPLE_TERNARY_RHS:
7763 return false;
7764 case GIMPLE_SINGLE_RHS:
7765 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7766 case GIMPLE_INVALID_RHS:
7767 break;
7769 gcc_unreachable ();
7772 /* Return true if the floating-point value computed by call STMT is known
7773 to have an integer value. We also allow +Inf, -Inf and NaN to be
7774 considered integer values. Return false for signaling NaN.
7776 DEPTH is the current nesting depth of the query. */
7778 static bool
7779 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7781 tree arg0 = (gimple_call_num_args (stmt) > 0
7782 ? gimple_call_arg (stmt, 0)
7783 : NULL_TREE);
7784 tree arg1 = (gimple_call_num_args (stmt) > 1
7785 ? gimple_call_arg (stmt, 1)
7786 : NULL_TREE);
7787 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7788 arg0, arg1, depth);
7791 /* Return true if the floating-point result of phi STMT is known to have
7792 an integer value. We also allow +Inf, -Inf and NaN to be considered
7793 integer values. Return false for signaling NaN.
7795 DEPTH is the current nesting depth of the query. */
7797 static bool
7798 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7800 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7802 tree arg = gimple_phi_arg_def (stmt, i);
7803 if (!integer_valued_real_single_p (arg, depth + 1))
7804 return false;
7806 return true;
7809 /* Return true if the floating-point value computed by STMT is known
7810 to have an integer value. We also allow +Inf, -Inf and NaN to be
7811 considered integer values. Return false for signaling NaN.
7813 DEPTH is the current nesting depth of the query. */
7815 bool
7816 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7818 switch (gimple_code (stmt))
7820 case GIMPLE_ASSIGN:
7821 return gimple_assign_integer_valued_real_p (stmt, depth);
7822 case GIMPLE_CALL:
7823 return gimple_call_integer_valued_real_p (stmt, depth);
7824 case GIMPLE_PHI:
7825 return gimple_phi_integer_valued_real_p (stmt, depth);
7826 default:
7827 return false;