[Ada] Wrong accessibility level under -gnat12
[official-gcc.git] / gcc / gimple-fold.c
blob118718a59ee7dc6cf8dbda0913abdeacd7bab80d
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2019 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
69 enum strlen_range_kind {
70 /* Compute the exact constant string length. */
71 SRK_STRLEN,
72 /* Compute the maximum constant string length. */
73 SRK_STRLENMAX,
74 /* Compute a range of string lengths bounded by object sizes. When
75 the length of a string cannot be determined, consider as the upper
76 bound the size of the enclosing object the string may be a member
77 or element of. Also determine the size of the largest character
78 array the string may refer to. */
79 SRK_LENRANGE,
80 /* Determine the integer value of the argument (not string length). */
81 SRK_INT_VALUE
84 static bool
85 get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
87 /* Return true when DECL can be referenced from current unit.
88 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
89 We can get declarations that are not possible to reference for various
90 reasons:
92 1) When analyzing C++ virtual tables.
93 C++ virtual tables do have known constructors even
94 when they are keyed to other compilation unit.
95 Those tables can contain pointers to methods and vars
96 in other units. Those methods have both STATIC and EXTERNAL
97 set.
98 2) In WHOPR mode devirtualization might lead to reference
99 to method that was partitioned elsehwere.
100 In this case we have static VAR_DECL or FUNCTION_DECL
101 that has no corresponding callgraph/varpool node
102 declaring the body.
103 3) COMDAT functions referred by external vtables that
104 we devirtualize only during final compilation stage.
105 At this time we already decided that we will not output
106 the function body and thus we can't reference the symbol
107 directly. */
109 static bool
110 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
112 varpool_node *vnode;
113 struct cgraph_node *node;
114 symtab_node *snode;
116 if (DECL_ABSTRACT_P (decl))
117 return false;
119 /* We are concerned only about static/external vars and functions. */
120 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
121 || !VAR_OR_FUNCTION_DECL_P (decl))
122 return true;
124 /* Static objects can be referred only if they are defined and not optimized
125 out yet. */
126 if (!TREE_PUBLIC (decl))
128 if (DECL_EXTERNAL (decl))
129 return false;
130 /* Before we start optimizing unreachable code we can be sure all
131 static objects are defined. */
132 if (symtab->function_flags_ready)
133 return true;
134 snode = symtab_node::get (decl);
135 if (!snode || !snode->definition)
136 return false;
137 node = dyn_cast <cgraph_node *> (snode);
138 return !node || !node->global.inlined_to;
141 /* We will later output the initializer, so we can refer to it.
142 So we are concerned only when DECL comes from initializer of
143 external var or var that has been optimized out. */
144 if (!from_decl
145 || !VAR_P (from_decl)
146 || (!DECL_EXTERNAL (from_decl)
147 && (vnode = varpool_node::get (from_decl)) != NULL
148 && vnode->definition)
149 || (flag_ltrans
150 && (vnode = varpool_node::get (from_decl)) != NULL
151 && vnode->in_other_partition))
152 return true;
153 /* We are folding reference from external vtable. The vtable may reffer
154 to a symbol keyed to other compilation unit. The other compilation
155 unit may be in separate DSO and the symbol may be hidden. */
156 if (DECL_VISIBILITY_SPECIFIED (decl)
157 && DECL_EXTERNAL (decl)
158 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
159 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
160 return false;
161 /* When function is public, we always can introduce new reference.
162 Exception are the COMDAT functions where introducing a direct
163 reference imply need to include function body in the curren tunit. */
164 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
165 return true;
166 /* We have COMDAT. We are going to check if we still have definition
167 or if the definition is going to be output in other partition.
168 Bypass this when gimplifying; all needed functions will be produced.
170 As observed in PR20991 for already optimized out comdat virtual functions
171 it may be tempting to not necessarily give up because the copy will be
172 output elsewhere when corresponding vtable is output.
173 This is however not possible - ABI specify that COMDATs are output in
174 units where they are used and when the other unit was compiled with LTO
175 it is possible that vtable was kept public while the function itself
176 was privatized. */
177 if (!symtab->function_flags_ready)
178 return true;
180 snode = symtab_node::get (decl);
181 if (!snode
182 || ((!snode->definition || DECL_EXTERNAL (decl))
183 && (!snode->in_other_partition
184 || (!snode->forced_by_abi && !snode->force_output))))
185 return false;
186 node = dyn_cast <cgraph_node *> (snode);
187 return !node || !node->global.inlined_to;
190 /* Create a temporary for TYPE for a statement STMT. If the current function
191 is in SSA form, a SSA name is created. Otherwise a temporary register
192 is made. */
194 tree
195 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
197 if (gimple_in_ssa_p (cfun))
198 return make_ssa_name (type, stmt);
199 else
200 return create_tmp_reg (type);
203 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
204 acceptable form for is_gimple_min_invariant.
205 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
207 tree
208 canonicalize_constructor_val (tree cval, tree from_decl)
210 if (CONSTANT_CLASS_P (cval))
211 return cval;
213 tree orig_cval = cval;
214 STRIP_NOPS (cval);
215 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
216 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
218 tree ptr = TREE_OPERAND (cval, 0);
219 if (is_gimple_min_invariant (ptr))
220 cval = build1_loc (EXPR_LOCATION (cval),
221 ADDR_EXPR, TREE_TYPE (ptr),
222 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
223 ptr,
224 fold_convert (ptr_type_node,
225 TREE_OPERAND (cval, 1))));
227 if (TREE_CODE (cval) == ADDR_EXPR)
229 tree base = NULL_TREE;
230 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
232 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
233 if (base)
234 TREE_OPERAND (cval, 0) = base;
236 else
237 base = get_base_address (TREE_OPERAND (cval, 0));
238 if (!base)
239 return NULL_TREE;
241 if (VAR_OR_FUNCTION_DECL_P (base)
242 && !can_refer_decl_in_current_unit_p (base, from_decl))
243 return NULL_TREE;
244 if (TREE_TYPE (base) == error_mark_node)
245 return NULL_TREE;
246 if (VAR_P (base))
247 TREE_ADDRESSABLE (base) = 1;
248 else if (TREE_CODE (base) == FUNCTION_DECL)
250 /* Make sure we create a cgraph node for functions we'll reference.
251 They can be non-existent if the reference comes from an entry
252 of an external vtable for example. */
253 cgraph_node::get_create (base);
255 /* Fixup types in global initializers. */
256 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
257 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
259 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
260 cval = fold_convert (TREE_TYPE (orig_cval), cval);
261 return cval;
263 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
264 if (TREE_CODE (cval) == INTEGER_CST)
266 if (TREE_OVERFLOW_P (cval))
267 cval = drop_tree_overflow (cval);
268 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
269 cval = fold_convert (TREE_TYPE (orig_cval), cval);
270 return cval;
272 return orig_cval;
275 /* If SYM is a constant variable with known value, return the value.
276 NULL_TREE is returned otherwise. */
278 tree
279 get_symbol_constant_value (tree sym)
281 tree val = ctor_for_folding (sym);
282 if (val != error_mark_node)
284 if (val)
286 val = canonicalize_constructor_val (unshare_expr (val), sym);
287 if (val && is_gimple_min_invariant (val))
288 return val;
289 else
290 return NULL_TREE;
292 /* Variables declared 'const' without an initializer
293 have zero as the initializer if they may not be
294 overridden at link or run time. */
295 if (!val
296 && is_gimple_reg_type (TREE_TYPE (sym)))
297 return build_zero_cst (TREE_TYPE (sym));
300 return NULL_TREE;
305 /* Subroutine of fold_stmt. We perform several simplifications of the
306 memory reference tree EXPR and make sure to re-gimplify them properly
307 after propagation of constant addresses. IS_LHS is true if the
308 reference is supposed to be an lvalue. */
310 static tree
311 maybe_fold_reference (tree expr, bool is_lhs)
313 tree result;
315 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
316 || TREE_CODE (expr) == REALPART_EXPR
317 || TREE_CODE (expr) == IMAGPART_EXPR)
318 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
319 return fold_unary_loc (EXPR_LOCATION (expr),
320 TREE_CODE (expr),
321 TREE_TYPE (expr),
322 TREE_OPERAND (expr, 0));
323 else if (TREE_CODE (expr) == BIT_FIELD_REF
324 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
325 return fold_ternary_loc (EXPR_LOCATION (expr),
326 TREE_CODE (expr),
327 TREE_TYPE (expr),
328 TREE_OPERAND (expr, 0),
329 TREE_OPERAND (expr, 1),
330 TREE_OPERAND (expr, 2));
332 if (!is_lhs
333 && (result = fold_const_aggregate_ref (expr))
334 && is_gimple_min_invariant (result))
335 return result;
337 return NULL_TREE;
341 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
342 replacement rhs for the statement or NULL_TREE if no simplification
343 could be made. It is assumed that the operands have been previously
344 folded. */
346 static tree
347 fold_gimple_assign (gimple_stmt_iterator *si)
349 gimple *stmt = gsi_stmt (*si);
350 enum tree_code subcode = gimple_assign_rhs_code (stmt);
351 location_t loc = gimple_location (stmt);
353 tree result = NULL_TREE;
355 switch (get_gimple_rhs_class (subcode))
357 case GIMPLE_SINGLE_RHS:
359 tree rhs = gimple_assign_rhs1 (stmt);
361 if (TREE_CLOBBER_P (rhs))
362 return NULL_TREE;
364 if (REFERENCE_CLASS_P (rhs))
365 return maybe_fold_reference (rhs, false);
367 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
369 tree val = OBJ_TYPE_REF_EXPR (rhs);
370 if (is_gimple_min_invariant (val))
371 return val;
372 else if (flag_devirtualize && virtual_method_call_p (rhs))
374 bool final;
375 vec <cgraph_node *>targets
376 = possible_polymorphic_call_targets (rhs, stmt, &final);
377 if (final && targets.length () <= 1 && dbg_cnt (devirt))
379 if (dump_enabled_p ())
381 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
382 "resolving virtual function address "
383 "reference to function %s\n",
384 targets.length () == 1
385 ? targets[0]->name ()
386 : "NULL");
388 if (targets.length () == 1)
390 val = fold_convert (TREE_TYPE (val),
391 build_fold_addr_expr_loc
392 (loc, targets[0]->decl));
393 STRIP_USELESS_TYPE_CONVERSION (val);
395 else
396 /* We cannot use __builtin_unreachable here because it
397 cannot have address taken. */
398 val = build_int_cst (TREE_TYPE (val), 0);
399 return val;
404 else if (TREE_CODE (rhs) == ADDR_EXPR)
406 tree ref = TREE_OPERAND (rhs, 0);
407 tree tem = maybe_fold_reference (ref, true);
408 if (tem
409 && TREE_CODE (tem) == MEM_REF
410 && integer_zerop (TREE_OPERAND (tem, 1)))
411 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
412 else if (tem)
413 result = fold_convert (TREE_TYPE (rhs),
414 build_fold_addr_expr_loc (loc, tem));
415 else if (TREE_CODE (ref) == MEM_REF
416 && integer_zerop (TREE_OPERAND (ref, 1)))
417 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
419 if (result)
421 /* Strip away useless type conversions. Both the
422 NON_LVALUE_EXPR that may have been added by fold, and
423 "useless" type conversions that might now be apparent
424 due to propagation. */
425 STRIP_USELESS_TYPE_CONVERSION (result);
427 if (result != rhs && valid_gimple_rhs_p (result))
428 return result;
432 else if (TREE_CODE (rhs) == CONSTRUCTOR
433 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
435 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
436 unsigned i;
437 tree val;
439 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
440 if (! CONSTANT_CLASS_P (val))
441 return NULL_TREE;
443 return build_vector_from_ctor (TREE_TYPE (rhs),
444 CONSTRUCTOR_ELTS (rhs));
447 else if (DECL_P (rhs))
448 return get_symbol_constant_value (rhs);
450 break;
452 case GIMPLE_UNARY_RHS:
453 break;
455 case GIMPLE_BINARY_RHS:
456 break;
458 case GIMPLE_TERNARY_RHS:
459 result = fold_ternary_loc (loc, subcode,
460 TREE_TYPE (gimple_assign_lhs (stmt)),
461 gimple_assign_rhs1 (stmt),
462 gimple_assign_rhs2 (stmt),
463 gimple_assign_rhs3 (stmt));
465 if (result)
467 STRIP_USELESS_TYPE_CONVERSION (result);
468 if (valid_gimple_rhs_p (result))
469 return result;
471 break;
473 case GIMPLE_INVALID_RHS:
474 gcc_unreachable ();
477 return NULL_TREE;
481 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
482 adjusting the replacement stmts location and virtual operands.
483 If the statement has a lhs the last stmt in the sequence is expected
484 to assign to that lhs. */
486 static void
487 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
489 gimple *stmt = gsi_stmt (*si_p);
491 if (gimple_has_location (stmt))
492 annotate_all_with_location (stmts, gimple_location (stmt));
494 /* First iterate over the replacement statements backward, assigning
495 virtual operands to their defining statements. */
496 gimple *laststore = NULL;
497 for (gimple_stmt_iterator i = gsi_last (stmts);
498 !gsi_end_p (i); gsi_prev (&i))
500 gimple *new_stmt = gsi_stmt (i);
501 if ((gimple_assign_single_p (new_stmt)
502 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
503 || (is_gimple_call (new_stmt)
504 && (gimple_call_flags (new_stmt)
505 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
507 tree vdef;
508 if (!laststore)
509 vdef = gimple_vdef (stmt);
510 else
511 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
512 gimple_set_vdef (new_stmt, vdef);
513 if (vdef && TREE_CODE (vdef) == SSA_NAME)
514 SSA_NAME_DEF_STMT (vdef) = new_stmt;
515 laststore = new_stmt;
519 /* Second iterate over the statements forward, assigning virtual
520 operands to their uses. */
521 tree reaching_vuse = gimple_vuse (stmt);
522 for (gimple_stmt_iterator i = gsi_start (stmts);
523 !gsi_end_p (i); gsi_next (&i))
525 gimple *new_stmt = gsi_stmt (i);
526 /* If the new statement possibly has a VUSE, update it with exact SSA
527 name we know will reach this one. */
528 if (gimple_has_mem_ops (new_stmt))
529 gimple_set_vuse (new_stmt, reaching_vuse);
530 gimple_set_modified (new_stmt, true);
531 if (gimple_vdef (new_stmt))
532 reaching_vuse = gimple_vdef (new_stmt);
535 /* If the new sequence does not do a store release the virtual
536 definition of the original statement. */
537 if (reaching_vuse
538 && reaching_vuse == gimple_vuse (stmt))
540 tree vdef = gimple_vdef (stmt);
541 if (vdef
542 && TREE_CODE (vdef) == SSA_NAME)
544 unlink_stmt_vdef (stmt);
545 release_ssa_name (vdef);
549 /* Finally replace the original statement with the sequence. */
550 gsi_replace_with_seq (si_p, stmts, false);
553 /* Convert EXPR into a GIMPLE value suitable for substitution on the
554 RHS of an assignment. Insert the necessary statements before
555 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
556 is replaced. If the call is expected to produces a result, then it
557 is replaced by an assignment of the new RHS to the result variable.
558 If the result is to be ignored, then the call is replaced by a
559 GIMPLE_NOP. A proper VDEF chain is retained by making the first
560 VUSE and the last VDEF of the whole sequence be the same as the replaced
561 statement and using new SSA names for stores in between. */
563 void
564 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
566 tree lhs;
567 gimple *stmt, *new_stmt;
568 gimple_stmt_iterator i;
569 gimple_seq stmts = NULL;
571 stmt = gsi_stmt (*si_p);
573 gcc_assert (is_gimple_call (stmt));
575 push_gimplify_context (gimple_in_ssa_p (cfun));
577 lhs = gimple_call_lhs (stmt);
578 if (lhs == NULL_TREE)
580 gimplify_and_add (expr, &stmts);
581 /* We can end up with folding a memcpy of an empty class assignment
582 which gets optimized away by C++ gimplification. */
583 if (gimple_seq_empty_p (stmts))
585 pop_gimplify_context (NULL);
586 if (gimple_in_ssa_p (cfun))
588 unlink_stmt_vdef (stmt);
589 release_defs (stmt);
591 gsi_replace (si_p, gimple_build_nop (), false);
592 return;
595 else
597 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
598 new_stmt = gimple_build_assign (lhs, tmp);
599 i = gsi_last (stmts);
600 gsi_insert_after_without_update (&i, new_stmt,
601 GSI_CONTINUE_LINKING);
604 pop_gimplify_context (NULL);
606 gsi_replace_with_seq_vops (si_p, stmts);
610 /* Replace the call at *GSI with the gimple value VAL. */
612 void
613 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
615 gimple *stmt = gsi_stmt (*gsi);
616 tree lhs = gimple_call_lhs (stmt);
617 gimple *repl;
618 if (lhs)
620 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
621 val = fold_convert (TREE_TYPE (lhs), val);
622 repl = gimple_build_assign (lhs, val);
624 else
625 repl = gimple_build_nop ();
626 tree vdef = gimple_vdef (stmt);
627 if (vdef && TREE_CODE (vdef) == SSA_NAME)
629 unlink_stmt_vdef (stmt);
630 release_ssa_name (vdef);
632 gsi_replace (gsi, repl, false);
635 /* Replace the call at *GSI with the new call REPL and fold that
636 again. */
638 static void
639 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
641 gimple *stmt = gsi_stmt (*gsi);
642 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
643 gimple_set_location (repl, gimple_location (stmt));
644 if (gimple_vdef (stmt)
645 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
647 gimple_set_vdef (repl, gimple_vdef (stmt));
648 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
650 if (gimple_vuse (stmt))
651 gimple_set_vuse (repl, gimple_vuse (stmt));
652 gsi_replace (gsi, repl, false);
653 fold_stmt (gsi);
656 /* Return true if VAR is a VAR_DECL or a component thereof. */
658 static bool
659 var_decl_component_p (tree var)
661 tree inner = var;
662 while (handled_component_p (inner))
663 inner = TREE_OPERAND (inner, 0);
664 return (DECL_P (inner)
665 || (TREE_CODE (inner) == MEM_REF
666 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
669 /* Return TRUE if the SIZE argument, representing the size of an
670 object, is in a range of values of which exactly zero is valid. */
672 static bool
673 size_must_be_zero_p (tree size)
675 if (integer_zerop (size))
676 return true;
678 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
679 return false;
681 tree type = TREE_TYPE (size);
682 int prec = TYPE_PRECISION (type);
684 /* Compute the value of SSIZE_MAX, the largest positive value that
685 can be stored in ssize_t, the signed counterpart of size_t. */
686 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
687 value_range_base valid_range (VR_RANGE,
688 build_int_cst (type, 0),
689 wide_int_to_tree (type, ssize_max));
690 value_range_base vr;
691 get_range_info (size, vr);
692 vr.intersect (&valid_range);
693 return vr.zero_p ();
696 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
697 diagnose (otherwise undefined) overlapping copies without preventing
698 folding. When folded, GCC guarantees that overlapping memcpy has
699 the same semantics as memmove. Call to the library memcpy need not
700 provide the same guarantee. Return false if no simplification can
701 be made. */
703 static bool
704 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
705 tree dest, tree src, enum built_in_function code)
707 gimple *stmt = gsi_stmt (*gsi);
708 tree lhs = gimple_call_lhs (stmt);
709 tree len = gimple_call_arg (stmt, 2);
710 tree destvar, srcvar;
711 location_t loc = gimple_location (stmt);
713 /* If the LEN parameter is a constant zero or in range where
714 the only valid value is zero, return DEST. */
715 if (size_must_be_zero_p (len))
717 gimple *repl;
718 if (gimple_call_lhs (stmt))
719 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
720 else
721 repl = gimple_build_nop ();
722 tree vdef = gimple_vdef (stmt);
723 if (vdef && TREE_CODE (vdef) == SSA_NAME)
725 unlink_stmt_vdef (stmt);
726 release_ssa_name (vdef);
728 gsi_replace (gsi, repl, false);
729 return true;
732 /* If SRC and DEST are the same (and not volatile), return
733 DEST{,+LEN,+LEN-1}. */
734 if (operand_equal_p (src, dest, 0))
736 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
737 It's safe and may even be emitted by GCC itself (see bug
738 32667). */
739 unlink_stmt_vdef (stmt);
740 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
741 release_ssa_name (gimple_vdef (stmt));
742 if (!lhs)
744 gsi_replace (gsi, gimple_build_nop (), false);
745 return true;
747 goto done;
749 else
751 tree srctype, desttype;
752 unsigned int src_align, dest_align;
753 tree off0;
754 const char *tmp_str;
755 unsigned HOST_WIDE_INT tmp_len;
757 /* Build accesses at offset zero with a ref-all character type. */
758 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
759 ptr_mode, true), 0);
761 /* If we can perform the copy efficiently with first doing all loads
762 and then all stores inline it that way. Currently efficiently
763 means that we can load all the memory into a single integer
764 register which is what MOVE_MAX gives us. */
765 src_align = get_pointer_alignment (src);
766 dest_align = get_pointer_alignment (dest);
767 if (tree_fits_uhwi_p (len)
768 && compare_tree_int (len, MOVE_MAX) <= 0
769 /* ??? Don't transform copies from strings with known length this
770 confuses the tree-ssa-strlen.c. This doesn't handle
771 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
772 reason. */
773 && !c_strlen (src, 2)
774 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
775 && memchr (tmp_str, 0, tmp_len) == NULL))
777 unsigned ilen = tree_to_uhwi (len);
778 if (pow2p_hwi (ilen))
780 /* Detect out-of-bounds accesses without issuing warnings.
781 Avoid folding out-of-bounds copies but to avoid false
782 positives for unreachable code defer warning until after
783 DCE has worked its magic.
784 -Wrestrict is still diagnosed. */
785 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
786 dest, src, len, len,
787 false, false))
788 if (warning != OPT_Wrestrict)
789 return false;
791 scalar_int_mode mode;
792 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
793 if (type
794 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
795 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
796 /* If the destination pointer is not aligned we must be able
797 to emit an unaligned store. */
798 && (dest_align >= GET_MODE_ALIGNMENT (mode)
799 || !targetm.slow_unaligned_access (mode, dest_align)
800 || (optab_handler (movmisalign_optab, mode)
801 != CODE_FOR_nothing)))
803 tree srctype = type;
804 tree desttype = type;
805 if (src_align < GET_MODE_ALIGNMENT (mode))
806 srctype = build_aligned_type (type, src_align);
807 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
808 tree tem = fold_const_aggregate_ref (srcmem);
809 if (tem)
810 srcmem = tem;
811 else if (src_align < GET_MODE_ALIGNMENT (mode)
812 && targetm.slow_unaligned_access (mode, src_align)
813 && (optab_handler (movmisalign_optab, mode)
814 == CODE_FOR_nothing))
815 srcmem = NULL_TREE;
816 if (srcmem)
818 gimple *new_stmt;
819 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
821 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
822 srcmem
823 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
824 new_stmt);
825 gimple_assign_set_lhs (new_stmt, srcmem);
826 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
827 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
829 if (dest_align < GET_MODE_ALIGNMENT (mode))
830 desttype = build_aligned_type (type, dest_align);
831 new_stmt
832 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
833 dest, off0),
834 srcmem);
835 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
836 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
837 if (gimple_vdef (new_stmt)
838 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
839 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
840 if (!lhs)
842 gsi_replace (gsi, new_stmt, false);
843 return true;
845 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
846 goto done;
852 if (code == BUILT_IN_MEMMOVE)
854 /* Both DEST and SRC must be pointer types.
855 ??? This is what old code did. Is the testing for pointer types
856 really mandatory?
858 If either SRC is readonly or length is 1, we can use memcpy. */
859 if (!dest_align || !src_align)
860 return false;
861 if (readonly_data_expr (src)
862 || (tree_fits_uhwi_p (len)
863 && (MIN (src_align, dest_align) / BITS_PER_UNIT
864 >= tree_to_uhwi (len))))
866 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
867 if (!fn)
868 return false;
869 gimple_call_set_fndecl (stmt, fn);
870 gimple_call_set_arg (stmt, 0, dest);
871 gimple_call_set_arg (stmt, 1, src);
872 fold_stmt (gsi);
873 return true;
876 /* If *src and *dest can't overlap, optimize into memcpy as well. */
877 if (TREE_CODE (src) == ADDR_EXPR
878 && TREE_CODE (dest) == ADDR_EXPR)
880 tree src_base, dest_base, fn;
881 poly_int64 src_offset = 0, dest_offset = 0;
882 poly_uint64 maxsize;
884 srcvar = TREE_OPERAND (src, 0);
885 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
886 if (src_base == NULL)
887 src_base = srcvar;
888 destvar = TREE_OPERAND (dest, 0);
889 dest_base = get_addr_base_and_unit_offset (destvar,
890 &dest_offset);
891 if (dest_base == NULL)
892 dest_base = destvar;
893 if (!poly_int_tree_p (len, &maxsize))
894 maxsize = -1;
895 if (SSA_VAR_P (src_base)
896 && SSA_VAR_P (dest_base))
898 if (operand_equal_p (src_base, dest_base, 0)
899 && ranges_maybe_overlap_p (src_offset, maxsize,
900 dest_offset, maxsize))
901 return false;
903 else if (TREE_CODE (src_base) == MEM_REF
904 && TREE_CODE (dest_base) == MEM_REF)
906 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
907 TREE_OPERAND (dest_base, 0), 0))
908 return false;
909 poly_offset_int full_src_offset
910 = mem_ref_offset (src_base) + src_offset;
911 poly_offset_int full_dest_offset
912 = mem_ref_offset (dest_base) + dest_offset;
913 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
914 full_dest_offset, maxsize))
915 return false;
917 else
918 return false;
920 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
921 if (!fn)
922 return false;
923 gimple_call_set_fndecl (stmt, fn);
924 gimple_call_set_arg (stmt, 0, dest);
925 gimple_call_set_arg (stmt, 1, src);
926 fold_stmt (gsi);
927 return true;
930 /* If the destination and source do not alias optimize into
931 memcpy as well. */
932 if ((is_gimple_min_invariant (dest)
933 || TREE_CODE (dest) == SSA_NAME)
934 && (is_gimple_min_invariant (src)
935 || TREE_CODE (src) == SSA_NAME))
937 ao_ref destr, srcr;
938 ao_ref_init_from_ptr_and_size (&destr, dest, len);
939 ao_ref_init_from_ptr_and_size (&srcr, src, len);
940 if (!refs_may_alias_p_1 (&destr, &srcr, false))
942 tree fn;
943 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
944 if (!fn)
945 return false;
946 gimple_call_set_fndecl (stmt, fn);
947 gimple_call_set_arg (stmt, 0, dest);
948 gimple_call_set_arg (stmt, 1, src);
949 fold_stmt (gsi);
950 return true;
954 return false;
957 if (!tree_fits_shwi_p (len))
958 return false;
959 if (!POINTER_TYPE_P (TREE_TYPE (src))
960 || !POINTER_TYPE_P (TREE_TYPE (dest)))
961 return false;
962 /* In the following try to find a type that is most natural to be
963 used for the memcpy source and destination and that allows
964 the most optimization when memcpy is turned into a plain assignment
965 using that type. In theory we could always use a char[len] type
966 but that only gains us that the destination and source possibly
967 no longer will have their address taken. */
968 srctype = TREE_TYPE (TREE_TYPE (src));
969 if (TREE_CODE (srctype) == ARRAY_TYPE
970 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
971 srctype = TREE_TYPE (srctype);
972 desttype = TREE_TYPE (TREE_TYPE (dest));
973 if (TREE_CODE (desttype) == ARRAY_TYPE
974 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
975 desttype = TREE_TYPE (desttype);
976 if (TREE_ADDRESSABLE (srctype)
977 || TREE_ADDRESSABLE (desttype))
978 return false;
980 /* Make sure we are not copying using a floating-point mode or
981 a type whose size possibly does not match its precision. */
982 if (FLOAT_MODE_P (TYPE_MODE (desttype))
983 || TREE_CODE (desttype) == BOOLEAN_TYPE
984 || TREE_CODE (desttype) == ENUMERAL_TYPE)
985 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
986 if (FLOAT_MODE_P (TYPE_MODE (srctype))
987 || TREE_CODE (srctype) == BOOLEAN_TYPE
988 || TREE_CODE (srctype) == ENUMERAL_TYPE)
989 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
990 if (!srctype)
991 srctype = desttype;
992 if (!desttype)
993 desttype = srctype;
994 if (!srctype)
995 return false;
997 src_align = get_pointer_alignment (src);
998 dest_align = get_pointer_alignment (dest);
999 if (dest_align < TYPE_ALIGN (desttype)
1000 || src_align < TYPE_ALIGN (srctype))
1001 return false;
1003 destvar = NULL_TREE;
1004 if (TREE_CODE (dest) == ADDR_EXPR
1005 && var_decl_component_p (TREE_OPERAND (dest, 0))
1006 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1007 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1009 srcvar = NULL_TREE;
1010 if (TREE_CODE (src) == ADDR_EXPR
1011 && var_decl_component_p (TREE_OPERAND (src, 0))
1012 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1014 if (!destvar
1015 || src_align >= TYPE_ALIGN (desttype))
1016 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1017 src, off0);
1018 else if (!STRICT_ALIGNMENT)
1020 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1021 src_align);
1022 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1026 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1027 return false;
1029 if (srcvar == NULL_TREE)
1031 if (src_align >= TYPE_ALIGN (desttype))
1032 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1033 else
1035 if (STRICT_ALIGNMENT)
1036 return false;
1037 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1038 src_align);
1039 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1042 else if (destvar == NULL_TREE)
1044 if (dest_align >= TYPE_ALIGN (srctype))
1045 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1046 else
1048 if (STRICT_ALIGNMENT)
1049 return false;
1050 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1051 dest_align);
1052 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1056 /* Same as above, detect out-of-bounds accesses without issuing
1057 warnings. Avoid folding out-of-bounds copies but to avoid
1058 false positives for unreachable code defer warning until
1059 after DCE has worked its magic.
1060 -Wrestrict is still diagnosed. */
1061 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1062 dest, src, len, len,
1063 false, false))
1064 if (warning != OPT_Wrestrict)
1065 return false;
1067 gimple *new_stmt;
1068 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1070 tree tem = fold_const_aggregate_ref (srcvar);
1071 if (tem)
1072 srcvar = tem;
1073 if (! is_gimple_min_invariant (srcvar))
1075 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1076 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1077 new_stmt);
1078 gimple_assign_set_lhs (new_stmt, srcvar);
1079 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1080 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1082 new_stmt = gimple_build_assign (destvar, srcvar);
1083 goto set_vop_and_replace;
1086 /* We get an aggregate copy. Use an unsigned char[] type to
1087 perform the copying to preserve padding and to avoid any issues
1088 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1089 desttype = build_array_type_nelts (unsigned_char_type_node,
1090 tree_to_uhwi (len));
1091 srctype = desttype;
1092 if (src_align > TYPE_ALIGN (srctype))
1093 srctype = build_aligned_type (srctype, src_align);
1094 if (dest_align > TYPE_ALIGN (desttype))
1095 desttype = build_aligned_type (desttype, dest_align);
1096 new_stmt
1097 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1098 fold_build2 (MEM_REF, srctype, src, off0));
1099 set_vop_and_replace:
1100 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1101 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1102 if (gimple_vdef (new_stmt)
1103 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1104 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1105 if (!lhs)
1107 gsi_replace (gsi, new_stmt, false);
1108 return true;
1110 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1113 done:
1114 gimple_seq stmts = NULL;
1115 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1116 len = NULL_TREE;
1117 else if (code == BUILT_IN_MEMPCPY)
1119 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1120 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1121 TREE_TYPE (dest), dest, len);
1123 else
1124 gcc_unreachable ();
1126 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1127 gimple *repl = gimple_build_assign (lhs, dest);
1128 gsi_replace (gsi, repl, false);
1129 return true;
1132 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1133 to built-in memcmp (a, b, len). */
1135 static bool
1136 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1138 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1140 if (!fn)
1141 return false;
1143 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1145 gimple *stmt = gsi_stmt (*gsi);
1146 tree a = gimple_call_arg (stmt, 0);
1147 tree b = gimple_call_arg (stmt, 1);
1148 tree len = gimple_call_arg (stmt, 2);
1150 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1151 replace_call_with_call_and_fold (gsi, repl);
1153 return true;
1156 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1157 to built-in memmove (dest, src, len). */
1159 static bool
1160 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1162 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1164 if (!fn)
1165 return false;
1167 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1168 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1169 len) into memmove (dest, src, len). */
1171 gimple *stmt = gsi_stmt (*gsi);
1172 tree src = gimple_call_arg (stmt, 0);
1173 tree dest = gimple_call_arg (stmt, 1);
1174 tree len = gimple_call_arg (stmt, 2);
1176 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1177 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1178 replace_call_with_call_and_fold (gsi, repl);
1180 return true;
1183 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1184 to built-in memset (dest, 0, len). */
1186 static bool
1187 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1189 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1191 if (!fn)
1192 return false;
1194 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1196 gimple *stmt = gsi_stmt (*gsi);
1197 tree dest = gimple_call_arg (stmt, 0);
1198 tree len = gimple_call_arg (stmt, 1);
1200 gimple_seq seq = NULL;
1201 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1202 gimple_seq_add_stmt_without_update (&seq, repl);
1203 gsi_replace_with_seq_vops (gsi, seq);
1204 fold_stmt (gsi);
1206 return true;
1209 /* Fold function call to builtin memset or bzero at *GSI setting the
1210 memory of size LEN to VAL. Return whether a simplification was made. */
1212 static bool
1213 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1215 gimple *stmt = gsi_stmt (*gsi);
1216 tree etype;
1217 unsigned HOST_WIDE_INT length, cval;
1219 /* If the LEN parameter is zero, return DEST. */
1220 if (integer_zerop (len))
1222 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1223 return true;
1226 if (! tree_fits_uhwi_p (len))
1227 return false;
1229 if (TREE_CODE (c) != INTEGER_CST)
1230 return false;
1232 tree dest = gimple_call_arg (stmt, 0);
1233 tree var = dest;
1234 if (TREE_CODE (var) != ADDR_EXPR)
1235 return false;
1237 var = TREE_OPERAND (var, 0);
1238 if (TREE_THIS_VOLATILE (var))
1239 return false;
1241 etype = TREE_TYPE (var);
1242 if (TREE_CODE (etype) == ARRAY_TYPE)
1243 etype = TREE_TYPE (etype);
1245 if (!INTEGRAL_TYPE_P (etype)
1246 && !POINTER_TYPE_P (etype))
1247 return NULL_TREE;
1249 if (! var_decl_component_p (var))
1250 return NULL_TREE;
1252 length = tree_to_uhwi (len);
1253 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1254 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1255 return NULL_TREE;
1257 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1258 return NULL_TREE;
1260 if (integer_zerop (c))
1261 cval = 0;
1262 else
1264 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1265 return NULL_TREE;
1267 cval = TREE_INT_CST_LOW (c);
1268 cval &= 0xff;
1269 cval |= cval << 8;
1270 cval |= cval << 16;
1271 cval |= (cval << 31) << 1;
1274 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1275 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1276 gimple_set_vuse (store, gimple_vuse (stmt));
1277 tree vdef = gimple_vdef (stmt);
1278 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1280 gimple_set_vdef (store, gimple_vdef (stmt));
1281 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1283 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1284 if (gimple_call_lhs (stmt))
1286 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1287 gsi_replace (gsi, asgn, false);
1289 else
1291 gimple_stmt_iterator gsi2 = *gsi;
1292 gsi_prev (gsi);
1293 gsi_remove (&gsi2, true);
1296 return true;
1299 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1301 static bool
1302 get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1303 c_strlen_data *pdata, unsigned eltsize)
1305 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1307 /* The length computed by this invocation of the function. */
1308 tree val = NULL_TREE;
1310 /* True if VAL is an optimistic (tight) bound determined from
1311 the size of the character array in which the string may be
1312 stored. In that case, the computed VAL is used to set
1313 PDATA->MAXBOUND. */
1314 bool tight_bound = false;
1316 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1317 if (TREE_CODE (arg) == ADDR_EXPR
1318 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1320 tree op = TREE_OPERAND (arg, 0);
1321 if (integer_zerop (TREE_OPERAND (op, 1)))
1323 tree aop0 = TREE_OPERAND (op, 0);
1324 if (TREE_CODE (aop0) == INDIRECT_REF
1325 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1326 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1327 pdata, eltsize);
1329 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1330 && rkind == SRK_LENRANGE)
1332 /* Fail if an array is the last member of a struct object
1333 since it could be treated as a (fake) flexible array
1334 member. */
1335 tree idx = TREE_OPERAND (op, 1);
1337 arg = TREE_OPERAND (op, 0);
1338 tree optype = TREE_TYPE (arg);
1339 if (tree dom = TYPE_DOMAIN (optype))
1340 if (tree bound = TYPE_MAX_VALUE (dom))
1341 if (TREE_CODE (bound) == INTEGER_CST
1342 && TREE_CODE (idx) == INTEGER_CST
1343 && tree_int_cst_lt (bound, idx))
1344 return false;
1348 if (rkind == SRK_INT_VALUE)
1350 /* We are computing the maximum value (not string length). */
1351 val = arg;
1352 if (TREE_CODE (val) != INTEGER_CST
1353 || tree_int_cst_sgn (val) < 0)
1354 return false;
1356 else
1358 c_strlen_data lendata = { };
1359 val = c_strlen (arg, 1, &lendata, eltsize);
1361 if (!val && lendata.decl)
1363 /* ARG refers to an unterminated const character array.
1364 DATA.DECL with size DATA.LEN. */
1365 val = lendata.minlen;
1366 pdata->decl = lendata.decl;
1370 if (!val && rkind == SRK_LENRANGE)
1372 if (TREE_CODE (arg) == ADDR_EXPR)
1373 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1374 pdata, eltsize);
1376 if (TREE_CODE (arg) == ARRAY_REF)
1378 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1380 /* Determine the "innermost" array type. */
1381 while (TREE_CODE (optype) == ARRAY_TYPE
1382 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1383 optype = TREE_TYPE (optype);
1385 /* Avoid arrays of pointers. */
1386 tree eltype = TREE_TYPE (optype);
1387 if (TREE_CODE (optype) != ARRAY_TYPE
1388 || !INTEGRAL_TYPE_P (eltype))
1389 return false;
1391 /* Fail when the array bound is unknown or zero. */
1392 val = TYPE_SIZE_UNIT (optype);
1393 if (!val || integer_zerop (val))
1394 return false;
1396 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1397 integer_one_node);
1399 /* Set the minimum size to zero since the string in
1400 the array could have zero length. */
1401 pdata->minlen = ssize_int (0);
1403 tight_bound = true;
1405 else if (TREE_CODE (arg) == COMPONENT_REF
1406 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1407 == ARRAY_TYPE))
1409 /* Use the type of the member array to determine the upper
1410 bound on the length of the array. This may be overly
1411 optimistic if the array itself isn't NUL-terminated and
1412 the caller relies on the subsequent member to contain
1413 the NUL but that would only be considered valid if
1414 the array were the last member of a struct. */
1416 tree fld = TREE_OPERAND (arg, 1);
1418 tree optype = TREE_TYPE (fld);
1420 /* Determine the "innermost" array type. */
1421 while (TREE_CODE (optype) == ARRAY_TYPE
1422 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1423 optype = TREE_TYPE (optype);
1425 /* Fail when the array bound is unknown or zero. */
1426 val = TYPE_SIZE_UNIT (optype);
1427 if (!val || integer_zerop (val))
1428 return false;
1429 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1430 integer_one_node);
1432 /* Set the minimum size to zero since the string in
1433 the array could have zero length. */
1434 pdata->minlen = ssize_int (0);
1436 /* The array size determined above is an optimistic bound
1437 on the length. If the array isn't nul-terminated the
1438 length computed by the library function would be greater.
1439 Even though using strlen to cross the subobject boundary
1440 is undefined, avoid drawing conclusions from the member
1441 type about the length here. */
1442 tight_bound = true;
1444 else if (VAR_P (arg))
1446 /* Avoid handling pointers to arrays. GCC might misuse
1447 a pointer to an array of one bound to point to an array
1448 object of a greater bound. */
1449 tree argtype = TREE_TYPE (arg);
1450 if (TREE_CODE (argtype) == ARRAY_TYPE)
1452 val = TYPE_SIZE_UNIT (argtype);
1453 if (!val
1454 || TREE_CODE (val) != INTEGER_CST
1455 || integer_zerop (val))
1456 return false;
1457 val = wide_int_to_tree (TREE_TYPE (val),
1458 wi::sub (wi::to_wide (val), 1));
1460 /* Set the minimum size to zero since the string in
1461 the array could have zero length. */
1462 pdata->minlen = ssize_int (0);
1467 if (!val)
1468 return false;
1470 /* Adjust the lower bound on the string length as necessary. */
1471 if (!pdata->minlen
1472 || (rkind != SRK_STRLEN
1473 && TREE_CODE (pdata->minlen) == INTEGER_CST
1474 && TREE_CODE (val) == INTEGER_CST
1475 && tree_int_cst_lt (val, pdata->minlen)))
1476 pdata->minlen = val;
1478 if (pdata->maxbound)
1480 /* Adjust the tighter (more optimistic) string length bound
1481 if necessary and proceed to adjust the more conservative
1482 bound. */
1483 if (TREE_CODE (val) == INTEGER_CST)
1485 if (TREE_CODE (pdata->maxbound) == INTEGER_CST)
1487 if (tree_int_cst_lt (pdata->maxbound, val))
1488 pdata->maxbound = val;
1490 else
1491 pdata->maxbound = build_all_ones_cst (size_type_node);
1493 else
1494 pdata->maxbound = val;
1496 else
1497 pdata->maxbound = val;
1499 if (tight_bound)
1501 /* VAL computed above represents an optimistically tight bound
1502 on the length of the string based on the referenced object's
1503 or subobject's type. Determine the conservative upper bound
1504 based on the enclosing object's size if possible. */
1505 if (rkind == SRK_LENRANGE)
1507 poly_int64 offset;
1508 tree base = get_addr_base_and_unit_offset (arg, &offset);
1509 if (!base)
1511 /* When the call above fails due to a non-constant offset
1512 assume the offset is zero and use the size of the whole
1513 enclosing object instead. */
1514 base = get_base_address (arg);
1515 offset = 0;
1517 /* If the base object is a pointer no upper bound on the length
1518 can be determined. Otherwise the maximum length is equal to
1519 the size of the enclosing object minus the offset of
1520 the referenced subobject minus 1 (for the terminating nul). */
1521 tree type = TREE_TYPE (base);
1522 if (TREE_CODE (type) == POINTER_TYPE
1523 || !VAR_P (base) || !(val = DECL_SIZE_UNIT (base)))
1524 val = build_all_ones_cst (size_type_node);
1525 else
1527 val = DECL_SIZE_UNIT (base);
1528 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1529 size_int (offset + 1));
1532 else
1533 return false;
1536 if (pdata->maxlen)
1538 /* Adjust the more conservative bound if possible/necessary
1539 and fail otherwise. */
1540 if (rkind != SRK_STRLEN)
1542 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1543 || TREE_CODE (val) != INTEGER_CST)
1544 return false;
1546 if (tree_int_cst_lt (pdata->maxlen, val))
1547 pdata->maxlen = val;
1548 return true;
1550 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1552 /* Fail if the length of this ARG is different from that
1553 previously determined from another ARG. */
1554 return false;
1558 pdata->maxlen = val;
1559 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1562 /* For an ARG referencing one or more strings, try to obtain the range
1563 of their lengths, or the size of the largest array ARG referes to if
1564 the range of lengths cannot be determined, and store all in *PDATA.
1565 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1566 the maximum constant value.
1567 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1568 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1569 length or if we are unable to determine the length, return false.
1570 VISITED is a bitmap of visited variables.
1571 RKIND determines the kind of value or range to obtain (see
1572 strlen_range_kind).
1573 Set PDATA->DECL if ARG refers to an unterminated constant array.
1574 On input, set ELTSIZE to 1 for normal single byte character strings,
1575 and either 2 or 4 for wide characer strings (the size of wchar_t).
1576 Return true if *PDATA was successfully populated and false otherwise. */
1578 static bool
1579 get_range_strlen (tree arg, bitmap *visited,
1580 strlen_range_kind rkind,
1581 c_strlen_data *pdata, unsigned eltsize)
1584 if (TREE_CODE (arg) != SSA_NAME)
1585 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1587 /* If ARG is registered for SSA update we cannot look at its defining
1588 statement. */
1589 if (name_registered_for_update_p (arg))
1590 return false;
1592 /* If we were already here, break the infinite cycle. */
1593 if (!*visited)
1594 *visited = BITMAP_ALLOC (NULL);
1595 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1596 return true;
1598 tree var = arg;
1599 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1601 switch (gimple_code (def_stmt))
1603 case GIMPLE_ASSIGN:
1604 /* The RHS of the statement defining VAR must either have a
1605 constant length or come from another SSA_NAME with a constant
1606 length. */
1607 if (gimple_assign_single_p (def_stmt)
1608 || gimple_assign_unary_nop_p (def_stmt))
1610 tree rhs = gimple_assign_rhs1 (def_stmt);
1611 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1613 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1615 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1616 gimple_assign_rhs3 (def_stmt) };
1618 for (unsigned int i = 0; i < 2; i++)
1619 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1621 if (rkind != SRK_LENRANGE)
1622 return false;
1623 /* Set the upper bound to the maximum to prevent
1624 it from being adjusted in the next iteration but
1625 leave MINLEN and the more conservative MAXBOUND
1626 determined so far alone (or leave them null if
1627 they haven't been set yet). That the MINLEN is
1628 in fact zero can be determined from MAXLEN being
1629 unbounded but the discovered minimum is used for
1630 diagnostics. */
1631 pdata->maxlen = build_all_ones_cst (size_type_node);
1633 return true;
1635 return false;
1637 case GIMPLE_PHI:
1638 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1639 must have a constant length. */
1640 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1642 tree arg = gimple_phi_arg (def_stmt, i)->def;
1644 /* If this PHI has itself as an argument, we cannot
1645 determine the string length of this argument. However,
1646 if we can find a constant string length for the other
1647 PHI args then we can still be sure that this is a
1648 constant string length. So be optimistic and just
1649 continue with the next argument. */
1650 if (arg == gimple_phi_result (def_stmt))
1651 continue;
1653 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1655 if (rkind != SRK_LENRANGE)
1656 return false;
1657 /* Set the upper bound to the maximum to prevent
1658 it from being adjusted in the next iteration but
1659 leave MINLEN and the more conservative MAXBOUND
1660 determined so far alone (or leave them null if
1661 they haven't been set yet). That the MINLEN is
1662 in fact zero can be determined from MAXLEN being
1663 unbounded but the discovered minimum is used for
1664 diagnostics. */
1665 pdata->maxlen = build_all_ones_cst (size_type_node);
1668 return true;
1670 default:
1671 return false;
1675 /* Try to obtain the range of the lengths of the string(s) referenced
1676 by ARG, or the size of the largest array ARG refers to if the range
1677 of lengths cannot be determined, and store all in *PDATA. ELTSIZE
1678 is the expected size of the string element in bytes: 1 for char and
1679 some power of 2 for wide characters.
1680 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1681 for optimization. Returning false means that a nonzero PDATA->MINLEN
1682 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1683 is -1 (in that case, the actual range is indeterminate, i.e.,
1684 [0, PTRDIFF_MAX - 2]. */
1686 bool
1687 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1689 bitmap visited = NULL;
1691 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
1693 /* On failure extend the length range to an impossible maximum
1694 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1695 members can stay unchanged regardless. */
1696 pdata->minlen = ssize_int (0);
1697 pdata->maxlen = build_all_ones_cst (size_type_node);
1699 else if (!pdata->minlen)
1700 pdata->minlen = ssize_int (0);
1702 /* Unless its null, leave the more conservative MAXBOUND unchanged. */
1703 if (!pdata->maxbound)
1704 pdata->maxbound = pdata->maxlen;
1706 if (visited)
1707 BITMAP_FREE (visited);
1709 return !integer_all_onesp (pdata->maxlen);
1712 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1713 For ARG of pointer types, NONSTR indicates if the caller is prepared
1714 to handle unterminated strings. For integer ARG and when RKIND ==
1715 SRK_INT_VALUE, NONSTR must be null.
1717 If an unterminated array is discovered and our caller handles
1718 unterminated arrays, then bubble up the offending DECL and
1719 return the maximum size. Otherwise return NULL. */
1721 static tree
1722 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1724 /* A non-null NONSTR is meaningless when determining the maximum
1725 value of an integer ARG. */
1726 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1727 /* ARG must have an integral type when RKIND says so. */
1728 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1730 bitmap visited = NULL;
1732 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1733 is unbounded. */
1734 c_strlen_data lendata = { };
1735 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
1736 lendata.maxlen = NULL_TREE;
1737 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1738 lendata.maxlen = NULL_TREE;
1740 if (visited)
1741 BITMAP_FREE (visited);
1743 if (nonstr)
1745 /* For callers prepared to handle unterminated arrays set
1746 *NONSTR to point to the declaration of the array and return
1747 the maximum length/size. */
1748 *nonstr = lendata.decl;
1749 return lendata.maxlen;
1752 /* Fail if the constant array isn't nul-terminated. */
1753 return lendata.decl ? NULL_TREE : lendata.maxlen;
1757 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1758 If LEN is not NULL, it represents the length of the string to be
1759 copied. Return NULL_TREE if no simplification can be made. */
1761 static bool
1762 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1763 tree dest, tree src)
1765 gimple *stmt = gsi_stmt (*gsi);
1766 location_t loc = gimple_location (stmt);
1767 tree fn;
1769 /* If SRC and DEST are the same (and not volatile), return DEST. */
1770 if (operand_equal_p (src, dest, 0))
1772 /* Issue -Wrestrict unless the pointers are null (those do
1773 not point to objects and so do not indicate an overlap;
1774 such calls could be the result of sanitization and jump
1775 threading). */
1776 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1778 tree func = gimple_call_fndecl (stmt);
1780 warning_at (loc, OPT_Wrestrict,
1781 "%qD source argument is the same as destination",
1782 func);
1785 replace_call_with_value (gsi, dest);
1786 return true;
1789 if (optimize_function_for_size_p (cfun))
1790 return false;
1792 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1793 if (!fn)
1794 return false;
1796 /* Set to non-null if ARG refers to an unterminated array. */
1797 tree nonstr = NULL;
1798 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
1800 if (nonstr)
1802 /* Avoid folding calls with unterminated arrays. */
1803 if (!gimple_no_warning_p (stmt))
1804 warn_string_no_nul (loc, "strcpy", src, nonstr);
1805 gimple_set_no_warning (stmt, true);
1806 return false;
1809 if (!len)
1810 return false;
1812 len = fold_convert_loc (loc, size_type_node, len);
1813 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1814 len = force_gimple_operand_gsi (gsi, len, true,
1815 NULL_TREE, true, GSI_SAME_STMT);
1816 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1817 replace_call_with_call_and_fold (gsi, repl);
1818 return true;
1821 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1822 If SLEN is not NULL, it represents the length of the source string.
1823 Return NULL_TREE if no simplification can be made. */
1825 static bool
1826 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1827 tree dest, tree src, tree len)
1829 gimple *stmt = gsi_stmt (*gsi);
1830 location_t loc = gimple_location (stmt);
1831 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1833 /* If the LEN parameter is zero, return DEST. */
1834 if (integer_zerop (len))
1836 /* Avoid warning if the destination refers to a an array/pointer
1837 decorate with attribute nonstring. */
1838 if (!nonstring)
1840 tree fndecl = gimple_call_fndecl (stmt);
1842 /* Warn about the lack of nul termination: the result is not
1843 a (nul-terminated) string. */
1844 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1845 if (slen && !integer_zerop (slen))
1846 warning_at (loc, OPT_Wstringop_truncation,
1847 "%G%qD destination unchanged after copying no bytes "
1848 "from a string of length %E",
1849 stmt, fndecl, slen);
1850 else
1851 warning_at (loc, OPT_Wstringop_truncation,
1852 "%G%qD destination unchanged after copying no bytes",
1853 stmt, fndecl);
1856 replace_call_with_value (gsi, dest);
1857 return true;
1860 /* We can't compare slen with len as constants below if len is not a
1861 constant. */
1862 if (TREE_CODE (len) != INTEGER_CST)
1863 return false;
1865 /* Now, we must be passed a constant src ptr parameter. */
1866 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1867 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1868 return false;
1870 /* The size of the source string including the terminating nul. */
1871 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1873 /* We do not support simplification of this case, though we do
1874 support it when expanding trees into RTL. */
1875 /* FIXME: generate a call to __builtin_memset. */
1876 if (tree_int_cst_lt (ssize, len))
1877 return false;
1879 /* Diagnose truncation that leaves the copy unterminated. */
1880 maybe_diag_stxncpy_trunc (*gsi, src, len);
1882 /* OK transform into builtin memcpy. */
1883 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1884 if (!fn)
1885 return false;
1887 len = fold_convert_loc (loc, size_type_node, len);
1888 len = force_gimple_operand_gsi (gsi, len, true,
1889 NULL_TREE, true, GSI_SAME_STMT);
1890 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1891 replace_call_with_call_and_fold (gsi, repl);
1893 return true;
1896 /* Fold function call to builtin strchr or strrchr.
1897 If both arguments are constant, evaluate and fold the result,
1898 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1899 In general strlen is significantly faster than strchr
1900 due to being a simpler operation. */
1901 static bool
1902 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1904 gimple *stmt = gsi_stmt (*gsi);
1905 tree str = gimple_call_arg (stmt, 0);
1906 tree c = gimple_call_arg (stmt, 1);
1907 location_t loc = gimple_location (stmt);
1908 const char *p;
1909 char ch;
1911 if (!gimple_call_lhs (stmt))
1912 return false;
1914 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1916 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1918 if (p1 == NULL)
1920 replace_call_with_value (gsi, integer_zero_node);
1921 return true;
1924 tree len = build_int_cst (size_type_node, p1 - p);
1925 gimple_seq stmts = NULL;
1926 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1927 POINTER_PLUS_EXPR, str, len);
1928 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1929 gsi_replace_with_seq_vops (gsi, stmts);
1930 return true;
1933 if (!integer_zerop (c))
1934 return false;
1936 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1937 if (is_strrchr && optimize_function_for_size_p (cfun))
1939 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1941 if (strchr_fn)
1943 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1944 replace_call_with_call_and_fold (gsi, repl);
1945 return true;
1948 return false;
1951 tree len;
1952 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1954 if (!strlen_fn)
1955 return false;
1957 /* Create newstr = strlen (str). */
1958 gimple_seq stmts = NULL;
1959 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1960 gimple_set_location (new_stmt, loc);
1961 len = create_tmp_reg_or_ssa_name (size_type_node);
1962 gimple_call_set_lhs (new_stmt, len);
1963 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1965 /* Create (str p+ strlen (str)). */
1966 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1967 POINTER_PLUS_EXPR, str, len);
1968 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1969 gsi_replace_with_seq_vops (gsi, stmts);
1970 /* gsi now points at the assignment to the lhs, get a
1971 stmt iterator to the strlen.
1972 ??? We can't use gsi_for_stmt as that doesn't work when the
1973 CFG isn't built yet. */
1974 gimple_stmt_iterator gsi2 = *gsi;
1975 gsi_prev (&gsi2);
1976 fold_stmt (&gsi2);
1977 return true;
1980 /* Fold function call to builtin strstr.
1981 If both arguments are constant, evaluate and fold the result,
1982 additionally fold strstr (x, "") into x and strstr (x, "c")
1983 into strchr (x, 'c'). */
1984 static bool
1985 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1987 gimple *stmt = gsi_stmt (*gsi);
1988 tree haystack = gimple_call_arg (stmt, 0);
1989 tree needle = gimple_call_arg (stmt, 1);
1990 const char *p, *q;
1992 if (!gimple_call_lhs (stmt))
1993 return false;
1995 q = c_getstr (needle);
1996 if (q == NULL)
1997 return false;
1999 if ((p = c_getstr (haystack)))
2001 const char *r = strstr (p, q);
2003 if (r == NULL)
2005 replace_call_with_value (gsi, integer_zero_node);
2006 return true;
2009 tree len = build_int_cst (size_type_node, r - p);
2010 gimple_seq stmts = NULL;
2011 gimple *new_stmt
2012 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2013 haystack, len);
2014 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2015 gsi_replace_with_seq_vops (gsi, stmts);
2016 return true;
2019 /* For strstr (x, "") return x. */
2020 if (q[0] == '\0')
2022 replace_call_with_value (gsi, haystack);
2023 return true;
2026 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2027 if (q[1] == '\0')
2029 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2030 if (strchr_fn)
2032 tree c = build_int_cst (integer_type_node, q[0]);
2033 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2034 replace_call_with_call_and_fold (gsi, repl);
2035 return true;
2039 return false;
2042 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2043 to the call.
2045 Return NULL_TREE if no simplification was possible, otherwise return the
2046 simplified form of the call as a tree.
2048 The simplified form may be a constant or other expression which
2049 computes the same value, but in a more efficient manner (including
2050 calls to other builtin functions).
2052 The call may contain arguments which need to be evaluated, but
2053 which are not useful to determine the result of the call. In
2054 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2055 COMPOUND_EXPR will be an argument which must be evaluated.
2056 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2057 COMPOUND_EXPR in the chain will contain the tree for the simplified
2058 form of the builtin function call. */
2060 static bool
2061 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2063 gimple *stmt = gsi_stmt (*gsi);
2064 location_t loc = gimple_location (stmt);
2066 const char *p = c_getstr (src);
2068 /* If the string length is zero, return the dst parameter. */
2069 if (p && *p == '\0')
2071 replace_call_with_value (gsi, dst);
2072 return true;
2075 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2076 return false;
2078 /* See if we can store by pieces into (dst + strlen(dst)). */
2079 tree newdst;
2080 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2081 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2083 if (!strlen_fn || !memcpy_fn)
2084 return false;
2086 /* If the length of the source string isn't computable don't
2087 split strcat into strlen and memcpy. */
2088 tree len = get_maxval_strlen (src, SRK_STRLEN);
2089 if (! len)
2090 return false;
2092 /* Create strlen (dst). */
2093 gimple_seq stmts = NULL, stmts2;
2094 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2095 gimple_set_location (repl, loc);
2096 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2097 gimple_call_set_lhs (repl, newdst);
2098 gimple_seq_add_stmt_without_update (&stmts, repl);
2100 /* Create (dst p+ strlen (dst)). */
2101 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2102 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2103 gimple_seq_add_seq_without_update (&stmts, stmts2);
2105 len = fold_convert_loc (loc, size_type_node, len);
2106 len = size_binop_loc (loc, PLUS_EXPR, len,
2107 build_int_cst (size_type_node, 1));
2108 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2109 gimple_seq_add_seq_without_update (&stmts, stmts2);
2111 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2112 gimple_seq_add_stmt_without_update (&stmts, repl);
2113 if (gimple_call_lhs (stmt))
2115 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2116 gimple_seq_add_stmt_without_update (&stmts, repl);
2117 gsi_replace_with_seq_vops (gsi, stmts);
2118 /* gsi now points at the assignment to the lhs, get a
2119 stmt iterator to the memcpy call.
2120 ??? We can't use gsi_for_stmt as that doesn't work when the
2121 CFG isn't built yet. */
2122 gimple_stmt_iterator gsi2 = *gsi;
2123 gsi_prev (&gsi2);
2124 fold_stmt (&gsi2);
2126 else
2128 gsi_replace_with_seq_vops (gsi, stmts);
2129 fold_stmt (gsi);
2131 return true;
2134 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2135 are the arguments to the call. */
2137 static bool
2138 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2140 gimple *stmt = gsi_stmt (*gsi);
2141 tree dest = gimple_call_arg (stmt, 0);
2142 tree src = gimple_call_arg (stmt, 1);
2143 tree size = gimple_call_arg (stmt, 2);
2144 tree fn;
2145 const char *p;
2148 p = c_getstr (src);
2149 /* If the SRC parameter is "", return DEST. */
2150 if (p && *p == '\0')
2152 replace_call_with_value (gsi, dest);
2153 return true;
2156 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2157 return false;
2159 /* If __builtin_strcat_chk is used, assume strcat is available. */
2160 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2161 if (!fn)
2162 return false;
2164 gimple *repl = gimple_build_call (fn, 2, dest, src);
2165 replace_call_with_call_and_fold (gsi, repl);
2166 return true;
2169 /* Simplify a call to the strncat builtin. */
2171 static bool
2172 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2174 gimple *stmt = gsi_stmt (*gsi);
2175 tree dst = gimple_call_arg (stmt, 0);
2176 tree src = gimple_call_arg (stmt, 1);
2177 tree len = gimple_call_arg (stmt, 2);
2179 const char *p = c_getstr (src);
2181 /* If the requested length is zero, or the src parameter string
2182 length is zero, return the dst parameter. */
2183 if (integer_zerop (len) || (p && *p == '\0'))
2185 replace_call_with_value (gsi, dst);
2186 return true;
2189 if (TREE_CODE (len) != INTEGER_CST || !p)
2190 return false;
2192 unsigned srclen = strlen (p);
2194 int cmpsrc = compare_tree_int (len, srclen);
2196 /* Return early if the requested len is less than the string length.
2197 Warnings will be issued elsewhere later. */
2198 if (cmpsrc < 0)
2199 return false;
2201 unsigned HOST_WIDE_INT dstsize;
2203 bool nowarn = gimple_no_warning_p (stmt);
2205 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2207 int cmpdst = compare_tree_int (len, dstsize);
2209 if (cmpdst >= 0)
2211 tree fndecl = gimple_call_fndecl (stmt);
2213 /* Strncat copies (at most) LEN bytes and always appends
2214 the terminating NUL so the specified bound should never
2215 be equal to (or greater than) the size of the destination.
2216 If it is, the copy could overflow. */
2217 location_t loc = gimple_location (stmt);
2218 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2219 cmpdst == 0
2220 ? G_("%G%qD specified bound %E equals "
2221 "destination size")
2222 : G_("%G%qD specified bound %E exceeds "
2223 "destination size %wu"),
2224 stmt, fndecl, len, dstsize);
2225 if (nowarn)
2226 gimple_set_no_warning (stmt, true);
2230 if (!nowarn && cmpsrc == 0)
2232 tree fndecl = gimple_call_fndecl (stmt);
2233 location_t loc = gimple_location (stmt);
2235 /* To avoid possible overflow the specified bound should also
2236 not be equal to the length of the source, even when the size
2237 of the destination is unknown (it's not an uncommon mistake
2238 to specify as the bound to strncpy the length of the source). */
2239 if (warning_at (loc, OPT_Wstringop_overflow_,
2240 "%G%qD specified bound %E equals source length",
2241 stmt, fndecl, len))
2242 gimple_set_no_warning (stmt, true);
2245 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2247 /* If the replacement _DECL isn't initialized, don't do the
2248 transformation. */
2249 if (!fn)
2250 return false;
2252 /* Otherwise, emit a call to strcat. */
2253 gcall *repl = gimple_build_call (fn, 2, dst, src);
2254 replace_call_with_call_and_fold (gsi, repl);
2255 return true;
2258 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2259 LEN, and SIZE. */
2261 static bool
2262 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2264 gimple *stmt = gsi_stmt (*gsi);
2265 tree dest = gimple_call_arg (stmt, 0);
2266 tree src = gimple_call_arg (stmt, 1);
2267 tree len = gimple_call_arg (stmt, 2);
2268 tree size = gimple_call_arg (stmt, 3);
2269 tree fn;
2270 const char *p;
2272 p = c_getstr (src);
2273 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2274 if ((p && *p == '\0')
2275 || integer_zerop (len))
2277 replace_call_with_value (gsi, dest);
2278 return true;
2281 if (! tree_fits_uhwi_p (size))
2282 return false;
2284 if (! integer_all_onesp (size))
2286 tree src_len = c_strlen (src, 1);
2287 if (src_len
2288 && tree_fits_uhwi_p (src_len)
2289 && tree_fits_uhwi_p (len)
2290 && ! tree_int_cst_lt (len, src_len))
2292 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2293 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2294 if (!fn)
2295 return false;
2297 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2298 replace_call_with_call_and_fold (gsi, repl);
2299 return true;
2301 return false;
2304 /* If __builtin_strncat_chk is used, assume strncat is available. */
2305 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2306 if (!fn)
2307 return false;
2309 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2310 replace_call_with_call_and_fold (gsi, repl);
2311 return true;
2314 /* Build and append gimple statements to STMTS that would load a first
2315 character of a memory location identified by STR. LOC is location
2316 of the statement. */
2318 static tree
2319 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2321 tree var;
2323 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2324 tree cst_uchar_ptr_node
2325 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2326 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2328 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2329 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2330 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2332 gimple_assign_set_lhs (stmt, var);
2333 gimple_seq_add_stmt_without_update (stmts, stmt);
2335 return var;
2338 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2339 FCODE is the name of the builtin. */
2341 static bool
2342 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2344 gimple *stmt = gsi_stmt (*gsi);
2345 tree callee = gimple_call_fndecl (stmt);
2346 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2348 tree type = integer_type_node;
2349 tree str1 = gimple_call_arg (stmt, 0);
2350 tree str2 = gimple_call_arg (stmt, 1);
2351 tree lhs = gimple_call_lhs (stmt);
2352 HOST_WIDE_INT length = -1;
2354 /* Handle strncmp and strncasecmp functions. */
2355 if (gimple_call_num_args (stmt) == 3)
2357 tree len = gimple_call_arg (stmt, 2);
2358 if (tree_fits_uhwi_p (len))
2359 length = tree_to_uhwi (len);
2362 /* If the LEN parameter is zero, return zero. */
2363 if (length == 0)
2365 replace_call_with_value (gsi, integer_zero_node);
2366 return true;
2369 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2370 if (operand_equal_p (str1, str2, 0))
2372 replace_call_with_value (gsi, integer_zero_node);
2373 return true;
2376 const char *p1 = c_getstr (str1);
2377 const char *p2 = c_getstr (str2);
2379 /* For known strings, return an immediate value. */
2380 if (p1 && p2)
2382 int r = 0;
2383 bool known_result = false;
2385 switch (fcode)
2387 case BUILT_IN_STRCMP:
2388 case BUILT_IN_STRCMP_EQ:
2390 r = strcmp (p1, p2);
2391 known_result = true;
2392 break;
2394 case BUILT_IN_STRNCMP:
2395 case BUILT_IN_STRNCMP_EQ:
2397 if (length == -1)
2398 break;
2399 r = strncmp (p1, p2, length);
2400 known_result = true;
2401 break;
2403 /* Only handleable situation is where the string are equal (result 0),
2404 which is already handled by operand_equal_p case. */
2405 case BUILT_IN_STRCASECMP:
2406 break;
2407 case BUILT_IN_STRNCASECMP:
2409 if (length == -1)
2410 break;
2411 r = strncmp (p1, p2, length);
2412 if (r == 0)
2413 known_result = true;
2414 break;
2416 default:
2417 gcc_unreachable ();
2420 if (known_result)
2422 replace_call_with_value (gsi, build_cmp_result (type, r));
2423 return true;
2427 bool nonzero_length = length >= 1
2428 || fcode == BUILT_IN_STRCMP
2429 || fcode == BUILT_IN_STRCMP_EQ
2430 || fcode == BUILT_IN_STRCASECMP;
2432 location_t loc = gimple_location (stmt);
2434 /* If the second arg is "", return *(const unsigned char*)arg1. */
2435 if (p2 && *p2 == '\0' && nonzero_length)
2437 gimple_seq stmts = NULL;
2438 tree var = gimple_load_first_char (loc, str1, &stmts);
2439 if (lhs)
2441 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2442 gimple_seq_add_stmt_without_update (&stmts, stmt);
2445 gsi_replace_with_seq_vops (gsi, stmts);
2446 return true;
2449 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2450 if (p1 && *p1 == '\0' && nonzero_length)
2452 gimple_seq stmts = NULL;
2453 tree var = gimple_load_first_char (loc, str2, &stmts);
2455 if (lhs)
2457 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2458 stmt = gimple_build_assign (c, NOP_EXPR, var);
2459 gimple_seq_add_stmt_without_update (&stmts, stmt);
2461 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2462 gimple_seq_add_stmt_without_update (&stmts, stmt);
2465 gsi_replace_with_seq_vops (gsi, stmts);
2466 return true;
2469 /* If len parameter is one, return an expression corresponding to
2470 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2471 if (fcode == BUILT_IN_STRNCMP && length == 1)
2473 gimple_seq stmts = NULL;
2474 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2475 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2477 if (lhs)
2479 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2480 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2481 gimple_seq_add_stmt_without_update (&stmts, convert1);
2483 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2484 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2485 gimple_seq_add_stmt_without_update (&stmts, convert2);
2487 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2488 gimple_seq_add_stmt_without_update (&stmts, stmt);
2491 gsi_replace_with_seq_vops (gsi, stmts);
2492 return true;
2495 /* If length is larger than the length of one constant string,
2496 replace strncmp with corresponding strcmp */
2497 if (fcode == BUILT_IN_STRNCMP
2498 && length > 0
2499 && ((p2 && (size_t) length > strlen (p2))
2500 || (p1 && (size_t) length > strlen (p1))))
2502 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2503 if (!fn)
2504 return false;
2505 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2506 replace_call_with_call_and_fold (gsi, repl);
2507 return true;
2510 return false;
2513 /* Fold a call to the memchr pointed by GSI iterator. */
2515 static bool
2516 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2518 gimple *stmt = gsi_stmt (*gsi);
2519 tree lhs = gimple_call_lhs (stmt);
2520 tree arg1 = gimple_call_arg (stmt, 0);
2521 tree arg2 = gimple_call_arg (stmt, 1);
2522 tree len = gimple_call_arg (stmt, 2);
2524 /* If the LEN parameter is zero, return zero. */
2525 if (integer_zerop (len))
2527 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2528 return true;
2531 char c;
2532 if (TREE_CODE (arg2) != INTEGER_CST
2533 || !tree_fits_uhwi_p (len)
2534 || !target_char_cst_p (arg2, &c))
2535 return false;
2537 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2538 unsigned HOST_WIDE_INT string_length;
2539 const char *p1 = c_getstr (arg1, &string_length);
2541 if (p1)
2543 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2544 if (r == NULL)
2546 tree mem_size, offset_node;
2547 string_constant (arg1, &offset_node, &mem_size, NULL);
2548 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2549 ? 0 : tree_to_uhwi (offset_node);
2550 /* MEM_SIZE is the size of the array the string literal
2551 is stored in. */
2552 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2553 gcc_checking_assert (string_length <= string_size);
2554 if (length <= string_size)
2556 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2557 return true;
2560 else
2562 unsigned HOST_WIDE_INT offset = r - p1;
2563 gimple_seq stmts = NULL;
2564 if (lhs != NULL_TREE)
2566 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2567 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2568 arg1, offset_cst);
2569 gimple_seq_add_stmt_without_update (&stmts, stmt);
2571 else
2572 gimple_seq_add_stmt_without_update (&stmts,
2573 gimple_build_nop ());
2575 gsi_replace_with_seq_vops (gsi, stmts);
2576 return true;
2580 return false;
2583 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2584 to the call. IGNORE is true if the value returned
2585 by the builtin will be ignored. UNLOCKED is true is true if this
2586 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2587 the known length of the string. Return NULL_TREE if no simplification
2588 was possible. */
2590 static bool
2591 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2592 tree arg0, tree arg1,
2593 bool unlocked)
2595 gimple *stmt = gsi_stmt (*gsi);
2597 /* If we're using an unlocked function, assume the other unlocked
2598 functions exist explicitly. */
2599 tree const fn_fputc = (unlocked
2600 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2601 : builtin_decl_implicit (BUILT_IN_FPUTC));
2602 tree const fn_fwrite = (unlocked
2603 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2604 : builtin_decl_implicit (BUILT_IN_FWRITE));
2606 /* If the return value is used, don't do the transformation. */
2607 if (gimple_call_lhs (stmt))
2608 return false;
2610 /* Get the length of the string passed to fputs. If the length
2611 can't be determined, punt. */
2612 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2613 if (!len
2614 || TREE_CODE (len) != INTEGER_CST)
2615 return false;
2617 switch (compare_tree_int (len, 1))
2619 case -1: /* length is 0, delete the call entirely . */
2620 replace_call_with_value (gsi, integer_zero_node);
2621 return true;
2623 case 0: /* length is 1, call fputc. */
2625 const char *p = c_getstr (arg0);
2626 if (p != NULL)
2628 if (!fn_fputc)
2629 return false;
2631 gimple *repl = gimple_build_call (fn_fputc, 2,
2632 build_int_cst
2633 (integer_type_node, p[0]), arg1);
2634 replace_call_with_call_and_fold (gsi, repl);
2635 return true;
2638 /* FALLTHROUGH */
2639 case 1: /* length is greater than 1, call fwrite. */
2641 /* If optimizing for size keep fputs. */
2642 if (optimize_function_for_size_p (cfun))
2643 return false;
2644 /* New argument list transforming fputs(string, stream) to
2645 fwrite(string, 1, len, stream). */
2646 if (!fn_fwrite)
2647 return false;
2649 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2650 size_one_node, len, arg1);
2651 replace_call_with_call_and_fold (gsi, repl);
2652 return true;
2654 default:
2655 gcc_unreachable ();
2657 return false;
2660 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2661 DEST, SRC, LEN, and SIZE are the arguments to the call.
2662 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2663 code of the builtin. If MAXLEN is not NULL, it is maximum length
2664 passed as third argument. */
2666 static bool
2667 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2668 tree dest, tree src, tree len, tree size,
2669 enum built_in_function fcode)
2671 gimple *stmt = gsi_stmt (*gsi);
2672 location_t loc = gimple_location (stmt);
2673 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2674 tree fn;
2676 /* If SRC and DEST are the same (and not volatile), return DEST
2677 (resp. DEST+LEN for __mempcpy_chk). */
2678 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2680 if (fcode != BUILT_IN_MEMPCPY_CHK)
2682 replace_call_with_value (gsi, dest);
2683 return true;
2685 else
2687 gimple_seq stmts = NULL;
2688 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2689 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2690 TREE_TYPE (dest), dest, len);
2691 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2692 replace_call_with_value (gsi, temp);
2693 return true;
2697 if (! tree_fits_uhwi_p (size))
2698 return false;
2700 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2701 if (! integer_all_onesp (size))
2703 if (! tree_fits_uhwi_p (len))
2705 /* If LEN is not constant, try MAXLEN too.
2706 For MAXLEN only allow optimizing into non-_ocs function
2707 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2708 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2710 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2712 /* (void) __mempcpy_chk () can be optimized into
2713 (void) __memcpy_chk (). */
2714 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2715 if (!fn)
2716 return false;
2718 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2719 replace_call_with_call_and_fold (gsi, repl);
2720 return true;
2722 return false;
2725 else
2726 maxlen = len;
2728 if (tree_int_cst_lt (size, maxlen))
2729 return false;
2732 fn = NULL_TREE;
2733 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2734 mem{cpy,pcpy,move,set} is available. */
2735 switch (fcode)
2737 case BUILT_IN_MEMCPY_CHK:
2738 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2739 break;
2740 case BUILT_IN_MEMPCPY_CHK:
2741 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2742 break;
2743 case BUILT_IN_MEMMOVE_CHK:
2744 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2745 break;
2746 case BUILT_IN_MEMSET_CHK:
2747 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2748 break;
2749 default:
2750 break;
2753 if (!fn)
2754 return false;
2756 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2757 replace_call_with_call_and_fold (gsi, repl);
2758 return true;
2761 /* Fold a call to the __st[rp]cpy_chk builtin.
2762 DEST, SRC, and SIZE are the arguments to the call.
2763 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2764 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2765 strings passed as second argument. */
2767 static bool
2768 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2769 tree dest,
2770 tree src, tree size,
2771 enum built_in_function fcode)
2773 gimple *stmt = gsi_stmt (*gsi);
2774 location_t loc = gimple_location (stmt);
2775 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2776 tree len, fn;
2778 /* If SRC and DEST are the same (and not volatile), return DEST. */
2779 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2781 /* Issue -Wrestrict unless the pointers are null (those do
2782 not point to objects and so do not indicate an overlap;
2783 such calls could be the result of sanitization and jump
2784 threading). */
2785 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2787 tree func = gimple_call_fndecl (stmt);
2789 warning_at (loc, OPT_Wrestrict,
2790 "%qD source argument is the same as destination",
2791 func);
2794 replace_call_with_value (gsi, dest);
2795 return true;
2798 if (! tree_fits_uhwi_p (size))
2799 return false;
2801 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
2802 if (! integer_all_onesp (size))
2804 len = c_strlen (src, 1);
2805 if (! len || ! tree_fits_uhwi_p (len))
2807 /* If LEN is not constant, try MAXLEN too.
2808 For MAXLEN only allow optimizing into non-_ocs function
2809 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2810 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2812 if (fcode == BUILT_IN_STPCPY_CHK)
2814 if (! ignore)
2815 return false;
2817 /* If return value of __stpcpy_chk is ignored,
2818 optimize into __strcpy_chk. */
2819 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2820 if (!fn)
2821 return false;
2823 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2824 replace_call_with_call_and_fold (gsi, repl);
2825 return true;
2828 if (! len || TREE_SIDE_EFFECTS (len))
2829 return false;
2831 /* If c_strlen returned something, but not a constant,
2832 transform __strcpy_chk into __memcpy_chk. */
2833 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2834 if (!fn)
2835 return false;
2837 gimple_seq stmts = NULL;
2838 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
2839 len = gimple_convert (&stmts, loc, size_type_node, len);
2840 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2841 build_int_cst (size_type_node, 1));
2842 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2843 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2844 replace_call_with_call_and_fold (gsi, repl);
2845 return true;
2848 else
2849 maxlen = len;
2851 if (! tree_int_cst_lt (maxlen, size))
2852 return false;
2855 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2856 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2857 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2858 if (!fn)
2859 return false;
2861 gimple *repl = gimple_build_call (fn, 2, dest, src);
2862 replace_call_with_call_and_fold (gsi, repl);
2863 return true;
2866 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2867 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2868 length passed as third argument. IGNORE is true if return value can be
2869 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2871 static bool
2872 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2873 tree dest, tree src,
2874 tree len, tree size,
2875 enum built_in_function fcode)
2877 gimple *stmt = gsi_stmt (*gsi);
2878 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2879 tree fn;
2881 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2883 /* If return value of __stpncpy_chk is ignored,
2884 optimize into __strncpy_chk. */
2885 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2886 if (fn)
2888 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2889 replace_call_with_call_and_fold (gsi, repl);
2890 return true;
2894 if (! tree_fits_uhwi_p (size))
2895 return false;
2897 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2898 if (! integer_all_onesp (size))
2900 if (! tree_fits_uhwi_p (len))
2902 /* If LEN is not constant, try MAXLEN too.
2903 For MAXLEN only allow optimizing into non-_ocs function
2904 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2905 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2906 return false;
2908 else
2909 maxlen = len;
2911 if (tree_int_cst_lt (size, maxlen))
2912 return false;
2915 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2916 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2917 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2918 if (!fn)
2919 return false;
2921 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2922 replace_call_with_call_and_fold (gsi, repl);
2923 return true;
2926 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2927 Return NULL_TREE if no simplification can be made. */
2929 static bool
2930 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2932 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2933 location_t loc = gimple_location (stmt);
2934 tree dest = gimple_call_arg (stmt, 0);
2935 tree src = gimple_call_arg (stmt, 1);
2936 tree fn, lenp1;
2938 /* If the result is unused, replace stpcpy with strcpy. */
2939 if (gimple_call_lhs (stmt) == NULL_TREE)
2941 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2942 if (!fn)
2943 return false;
2944 gimple_call_set_fndecl (stmt, fn);
2945 fold_stmt (gsi);
2946 return true;
2949 /* Set to non-null if ARG refers to an unterminated array. */
2950 c_strlen_data data = { };
2951 tree len = c_strlen (src, 1, &data, 1);
2952 if (!len
2953 || TREE_CODE (len) != INTEGER_CST)
2955 data.decl = unterminated_array (src);
2956 if (!data.decl)
2957 return false;
2960 if (data.decl)
2962 /* Avoid folding calls with unterminated arrays. */
2963 if (!gimple_no_warning_p (stmt))
2964 warn_string_no_nul (loc, "stpcpy", src, data.decl);
2965 gimple_set_no_warning (stmt, true);
2966 return false;
2969 if (optimize_function_for_size_p (cfun)
2970 /* If length is zero it's small enough. */
2971 && !integer_zerop (len))
2972 return false;
2974 /* If the source has a known length replace stpcpy with memcpy. */
2975 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2976 if (!fn)
2977 return false;
2979 gimple_seq stmts = NULL;
2980 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2981 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2982 tem, build_int_cst (size_type_node, 1));
2983 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2984 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2985 gimple_set_vuse (repl, gimple_vuse (stmt));
2986 gimple_set_vdef (repl, gimple_vdef (stmt));
2987 if (gimple_vdef (repl)
2988 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2989 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2990 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2991 /* Replace the result with dest + len. */
2992 stmts = NULL;
2993 tem = gimple_convert (&stmts, loc, sizetype, len);
2994 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2995 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2996 POINTER_PLUS_EXPR, dest, tem);
2997 gsi_replace (gsi, ret, false);
2998 /* Finally fold the memcpy call. */
2999 gimple_stmt_iterator gsi2 = *gsi;
3000 gsi_prev (&gsi2);
3001 fold_stmt (&gsi2);
3002 return true;
3005 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3006 NULL_TREE if a normal call should be emitted rather than expanding
3007 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3008 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3009 passed as second argument. */
3011 static bool
3012 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3013 enum built_in_function fcode)
3015 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3016 tree dest, size, len, fn, fmt, flag;
3017 const char *fmt_str;
3019 /* Verify the required arguments in the original call. */
3020 if (gimple_call_num_args (stmt) < 5)
3021 return false;
3023 dest = gimple_call_arg (stmt, 0);
3024 len = gimple_call_arg (stmt, 1);
3025 flag = gimple_call_arg (stmt, 2);
3026 size = gimple_call_arg (stmt, 3);
3027 fmt = gimple_call_arg (stmt, 4);
3029 if (! tree_fits_uhwi_p (size))
3030 return false;
3032 if (! integer_all_onesp (size))
3034 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3035 if (! tree_fits_uhwi_p (len))
3037 /* If LEN is not constant, try MAXLEN too.
3038 For MAXLEN only allow optimizing into non-_ocs function
3039 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3040 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3041 return false;
3043 else
3044 maxlen = len;
3046 if (tree_int_cst_lt (size, maxlen))
3047 return false;
3050 if (!init_target_chars ())
3051 return false;
3053 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3054 or if format doesn't contain % chars or is "%s". */
3055 if (! integer_zerop (flag))
3057 fmt_str = c_getstr (fmt);
3058 if (fmt_str == NULL)
3059 return false;
3060 if (strchr (fmt_str, target_percent) != NULL
3061 && strcmp (fmt_str, target_percent_s))
3062 return false;
3065 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3066 available. */
3067 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3068 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3069 if (!fn)
3070 return false;
3072 /* Replace the called function and the first 5 argument by 3 retaining
3073 trailing varargs. */
3074 gimple_call_set_fndecl (stmt, fn);
3075 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3076 gimple_call_set_arg (stmt, 0, dest);
3077 gimple_call_set_arg (stmt, 1, len);
3078 gimple_call_set_arg (stmt, 2, fmt);
3079 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3080 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3081 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3082 fold_stmt (gsi);
3083 return true;
3086 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3087 Return NULL_TREE if a normal call should be emitted rather than
3088 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3089 or BUILT_IN_VSPRINTF_CHK. */
3091 static bool
3092 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3093 enum built_in_function fcode)
3095 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3096 tree dest, size, len, fn, fmt, flag;
3097 const char *fmt_str;
3098 unsigned nargs = gimple_call_num_args (stmt);
3100 /* Verify the required arguments in the original call. */
3101 if (nargs < 4)
3102 return false;
3103 dest = gimple_call_arg (stmt, 0);
3104 flag = gimple_call_arg (stmt, 1);
3105 size = gimple_call_arg (stmt, 2);
3106 fmt = gimple_call_arg (stmt, 3);
3108 if (! tree_fits_uhwi_p (size))
3109 return false;
3111 len = NULL_TREE;
3113 if (!init_target_chars ())
3114 return false;
3116 /* Check whether the format is a literal string constant. */
3117 fmt_str = c_getstr (fmt);
3118 if (fmt_str != NULL)
3120 /* If the format doesn't contain % args or %%, we know the size. */
3121 if (strchr (fmt_str, target_percent) == 0)
3123 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3124 len = build_int_cstu (size_type_node, strlen (fmt_str));
3126 /* If the format is "%s" and first ... argument is a string literal,
3127 we know the size too. */
3128 else if (fcode == BUILT_IN_SPRINTF_CHK
3129 && strcmp (fmt_str, target_percent_s) == 0)
3131 tree arg;
3133 if (nargs == 5)
3135 arg = gimple_call_arg (stmt, 4);
3136 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3138 len = c_strlen (arg, 1);
3139 if (! len || ! tree_fits_uhwi_p (len))
3140 len = NULL_TREE;
3146 if (! integer_all_onesp (size))
3148 if (! len || ! tree_int_cst_lt (len, size))
3149 return false;
3152 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3153 or if format doesn't contain % chars or is "%s". */
3154 if (! integer_zerop (flag))
3156 if (fmt_str == NULL)
3157 return false;
3158 if (strchr (fmt_str, target_percent) != NULL
3159 && strcmp (fmt_str, target_percent_s))
3160 return false;
3163 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3164 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3165 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3166 if (!fn)
3167 return false;
3169 /* Replace the called function and the first 4 argument by 2 retaining
3170 trailing varargs. */
3171 gimple_call_set_fndecl (stmt, fn);
3172 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3173 gimple_call_set_arg (stmt, 0, dest);
3174 gimple_call_set_arg (stmt, 1, fmt);
3175 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3176 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3177 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3178 fold_stmt (gsi);
3179 return true;
3182 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3183 ORIG may be null if this is a 2-argument call. We don't attempt to
3184 simplify calls with more than 3 arguments.
3186 Return true if simplification was possible, otherwise false. */
3188 bool
3189 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3191 gimple *stmt = gsi_stmt (*gsi);
3192 tree dest = gimple_call_arg (stmt, 0);
3193 tree fmt = gimple_call_arg (stmt, 1);
3194 tree orig = NULL_TREE;
3195 const char *fmt_str = NULL;
3197 /* Verify the required arguments in the original call. We deal with two
3198 types of sprintf() calls: 'sprintf (str, fmt)' and
3199 'sprintf (dest, "%s", orig)'. */
3200 if (gimple_call_num_args (stmt) > 3)
3201 return false;
3203 if (gimple_call_num_args (stmt) == 3)
3204 orig = gimple_call_arg (stmt, 2);
3206 /* Check whether the format is a literal string constant. */
3207 fmt_str = c_getstr (fmt);
3208 if (fmt_str == NULL)
3209 return false;
3211 if (!init_target_chars ())
3212 return false;
3214 /* If the format doesn't contain % args or %%, use strcpy. */
3215 if (strchr (fmt_str, target_percent) == NULL)
3217 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3219 if (!fn)
3220 return false;
3222 /* Don't optimize sprintf (buf, "abc", ptr++). */
3223 if (orig)
3224 return false;
3226 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3227 'format' is known to contain no % formats. */
3228 gimple_seq stmts = NULL;
3229 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3231 /* Propagate the NO_WARNING bit to avoid issuing the same
3232 warning more than once. */
3233 if (gimple_no_warning_p (stmt))
3234 gimple_set_no_warning (repl, true);
3236 gimple_seq_add_stmt_without_update (&stmts, repl);
3237 if (tree lhs = gimple_call_lhs (stmt))
3239 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3240 strlen (fmt_str)));
3241 gimple_seq_add_stmt_without_update (&stmts, repl);
3242 gsi_replace_with_seq_vops (gsi, stmts);
3243 /* gsi now points at the assignment to the lhs, get a
3244 stmt iterator to the memcpy call.
3245 ??? We can't use gsi_for_stmt as that doesn't work when the
3246 CFG isn't built yet. */
3247 gimple_stmt_iterator gsi2 = *gsi;
3248 gsi_prev (&gsi2);
3249 fold_stmt (&gsi2);
3251 else
3253 gsi_replace_with_seq_vops (gsi, stmts);
3254 fold_stmt (gsi);
3256 return true;
3259 /* If the format is "%s", use strcpy if the result isn't used. */
3260 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3262 tree fn;
3263 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3265 if (!fn)
3266 return false;
3268 /* Don't crash on sprintf (str1, "%s"). */
3269 if (!orig)
3270 return false;
3272 tree orig_len = NULL_TREE;
3273 if (gimple_call_lhs (stmt))
3275 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3276 if (!orig_len)
3277 return false;
3280 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3281 gimple_seq stmts = NULL;
3282 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3284 /* Propagate the NO_WARNING bit to avoid issuing the same
3285 warning more than once. */
3286 if (gimple_no_warning_p (stmt))
3287 gimple_set_no_warning (repl, true);
3289 gimple_seq_add_stmt_without_update (&stmts, repl);
3290 if (tree lhs = gimple_call_lhs (stmt))
3292 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3293 TREE_TYPE (orig_len)))
3294 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3295 repl = gimple_build_assign (lhs, orig_len);
3296 gimple_seq_add_stmt_without_update (&stmts, repl);
3297 gsi_replace_with_seq_vops (gsi, stmts);
3298 /* gsi now points at the assignment to the lhs, get a
3299 stmt iterator to the memcpy call.
3300 ??? We can't use gsi_for_stmt as that doesn't work when the
3301 CFG isn't built yet. */
3302 gimple_stmt_iterator gsi2 = *gsi;
3303 gsi_prev (&gsi2);
3304 fold_stmt (&gsi2);
3306 else
3308 gsi_replace_with_seq_vops (gsi, stmts);
3309 fold_stmt (gsi);
3311 return true;
3313 return false;
3316 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3317 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3318 attempt to simplify calls with more than 4 arguments.
3320 Return true if simplification was possible, otherwise false. */
3322 bool
3323 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3325 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3326 tree dest = gimple_call_arg (stmt, 0);
3327 tree destsize = gimple_call_arg (stmt, 1);
3328 tree fmt = gimple_call_arg (stmt, 2);
3329 tree orig = NULL_TREE;
3330 const char *fmt_str = NULL;
3332 if (gimple_call_num_args (stmt) > 4)
3333 return false;
3335 if (gimple_call_num_args (stmt) == 4)
3336 orig = gimple_call_arg (stmt, 3);
3338 if (!tree_fits_uhwi_p (destsize))
3339 return false;
3340 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3342 /* Check whether the format is a literal string constant. */
3343 fmt_str = c_getstr (fmt);
3344 if (fmt_str == NULL)
3345 return false;
3347 if (!init_target_chars ())
3348 return false;
3350 /* If the format doesn't contain % args or %%, use strcpy. */
3351 if (strchr (fmt_str, target_percent) == NULL)
3353 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3354 if (!fn)
3355 return false;
3357 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3358 if (orig)
3359 return false;
3361 /* We could expand this as
3362 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3363 or to
3364 memcpy (str, fmt_with_nul_at_cstm1, cst);
3365 but in the former case that might increase code size
3366 and in the latter case grow .rodata section too much.
3367 So punt for now. */
3368 size_t len = strlen (fmt_str);
3369 if (len >= destlen)
3370 return false;
3372 gimple_seq stmts = NULL;
3373 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3374 gimple_seq_add_stmt_without_update (&stmts, repl);
3375 if (tree lhs = gimple_call_lhs (stmt))
3377 repl = gimple_build_assign (lhs,
3378 build_int_cst (TREE_TYPE (lhs), len));
3379 gimple_seq_add_stmt_without_update (&stmts, repl);
3380 gsi_replace_with_seq_vops (gsi, stmts);
3381 /* gsi now points at the assignment to the lhs, get a
3382 stmt iterator to the memcpy call.
3383 ??? We can't use gsi_for_stmt as that doesn't work when the
3384 CFG isn't built yet. */
3385 gimple_stmt_iterator gsi2 = *gsi;
3386 gsi_prev (&gsi2);
3387 fold_stmt (&gsi2);
3389 else
3391 gsi_replace_with_seq_vops (gsi, stmts);
3392 fold_stmt (gsi);
3394 return true;
3397 /* If the format is "%s", use strcpy if the result isn't used. */
3398 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3400 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3401 if (!fn)
3402 return false;
3404 /* Don't crash on snprintf (str1, cst, "%s"). */
3405 if (!orig)
3406 return false;
3408 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3409 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3410 return false;
3412 /* We could expand this as
3413 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3414 or to
3415 memcpy (str1, str2_with_nul_at_cstm1, cst);
3416 but in the former case that might increase code size
3417 and in the latter case grow .rodata section too much.
3418 So punt for now. */
3419 if (compare_tree_int (orig_len, destlen) >= 0)
3420 return false;
3422 /* Convert snprintf (str1, cst, "%s", str2) into
3423 strcpy (str1, str2) if strlen (str2) < cst. */
3424 gimple_seq stmts = NULL;
3425 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3426 gimple_seq_add_stmt_without_update (&stmts, repl);
3427 if (tree lhs = gimple_call_lhs (stmt))
3429 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3430 TREE_TYPE (orig_len)))
3431 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3432 repl = gimple_build_assign (lhs, orig_len);
3433 gimple_seq_add_stmt_without_update (&stmts, repl);
3434 gsi_replace_with_seq_vops (gsi, stmts);
3435 /* gsi now points at the assignment to the lhs, get a
3436 stmt iterator to the memcpy call.
3437 ??? We can't use gsi_for_stmt as that doesn't work when the
3438 CFG isn't built yet. */
3439 gimple_stmt_iterator gsi2 = *gsi;
3440 gsi_prev (&gsi2);
3441 fold_stmt (&gsi2);
3443 else
3445 gsi_replace_with_seq_vops (gsi, stmts);
3446 fold_stmt (gsi);
3448 return true;
3450 return false;
3453 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3454 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3455 more than 3 arguments, and ARG may be null in the 2-argument case.
3457 Return NULL_TREE if no simplification was possible, otherwise return the
3458 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3459 code of the function to be simplified. */
3461 static bool
3462 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3463 tree fp, tree fmt, tree arg,
3464 enum built_in_function fcode)
3466 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3467 tree fn_fputc, fn_fputs;
3468 const char *fmt_str = NULL;
3470 /* If the return value is used, don't do the transformation. */
3471 if (gimple_call_lhs (stmt) != NULL_TREE)
3472 return false;
3474 /* Check whether the format is a literal string constant. */
3475 fmt_str = c_getstr (fmt);
3476 if (fmt_str == NULL)
3477 return false;
3479 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3481 /* If we're using an unlocked function, assume the other
3482 unlocked functions exist explicitly. */
3483 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3484 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3486 else
3488 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3489 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3492 if (!init_target_chars ())
3493 return false;
3495 /* If the format doesn't contain % args or %%, use strcpy. */
3496 if (strchr (fmt_str, target_percent) == NULL)
3498 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3499 && arg)
3500 return false;
3502 /* If the format specifier was "", fprintf does nothing. */
3503 if (fmt_str[0] == '\0')
3505 replace_call_with_value (gsi, NULL_TREE);
3506 return true;
3509 /* When "string" doesn't contain %, replace all cases of
3510 fprintf (fp, string) with fputs (string, fp). The fputs
3511 builtin will take care of special cases like length == 1. */
3512 if (fn_fputs)
3514 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3515 replace_call_with_call_and_fold (gsi, repl);
3516 return true;
3520 /* The other optimizations can be done only on the non-va_list variants. */
3521 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3522 return false;
3524 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3525 else if (strcmp (fmt_str, target_percent_s) == 0)
3527 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3528 return false;
3529 if (fn_fputs)
3531 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3532 replace_call_with_call_and_fold (gsi, repl);
3533 return true;
3537 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3538 else if (strcmp (fmt_str, target_percent_c) == 0)
3540 if (!arg
3541 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3542 return false;
3543 if (fn_fputc)
3545 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3546 replace_call_with_call_and_fold (gsi, repl);
3547 return true;
3551 return false;
3554 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3555 FMT and ARG are the arguments to the call; we don't fold cases with
3556 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3558 Return NULL_TREE if no simplification was possible, otherwise return the
3559 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3560 code of the function to be simplified. */
3562 static bool
3563 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3564 tree arg, enum built_in_function fcode)
3566 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3567 tree fn_putchar, fn_puts, newarg;
3568 const char *fmt_str = NULL;
3570 /* If the return value is used, don't do the transformation. */
3571 if (gimple_call_lhs (stmt) != NULL_TREE)
3572 return false;
3574 /* Check whether the format is a literal string constant. */
3575 fmt_str = c_getstr (fmt);
3576 if (fmt_str == NULL)
3577 return false;
3579 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3581 /* If we're using an unlocked function, assume the other
3582 unlocked functions exist explicitly. */
3583 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3584 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3586 else
3588 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3589 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3592 if (!init_target_chars ())
3593 return false;
3595 if (strcmp (fmt_str, target_percent_s) == 0
3596 || strchr (fmt_str, target_percent) == NULL)
3598 const char *str;
3600 if (strcmp (fmt_str, target_percent_s) == 0)
3602 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3603 return false;
3605 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3606 return false;
3608 str = c_getstr (arg);
3609 if (str == NULL)
3610 return false;
3612 else
3614 /* The format specifier doesn't contain any '%' characters. */
3615 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3616 && arg)
3617 return false;
3618 str = fmt_str;
3621 /* If the string was "", printf does nothing. */
3622 if (str[0] == '\0')
3624 replace_call_with_value (gsi, NULL_TREE);
3625 return true;
3628 /* If the string has length of 1, call putchar. */
3629 if (str[1] == '\0')
3631 /* Given printf("c"), (where c is any one character,)
3632 convert "c"[0] to an int and pass that to the replacement
3633 function. */
3634 newarg = build_int_cst (integer_type_node, str[0]);
3635 if (fn_putchar)
3637 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3638 replace_call_with_call_and_fold (gsi, repl);
3639 return true;
3642 else
3644 /* If the string was "string\n", call puts("string"). */
3645 size_t len = strlen (str);
3646 if ((unsigned char)str[len - 1] == target_newline
3647 && (size_t) (int) len == len
3648 && (int) len > 0)
3650 char *newstr;
3652 /* Create a NUL-terminated string that's one char shorter
3653 than the original, stripping off the trailing '\n'. */
3654 newstr = xstrdup (str);
3655 newstr[len - 1] = '\0';
3656 newarg = build_string_literal (len, newstr);
3657 free (newstr);
3658 if (fn_puts)
3660 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3661 replace_call_with_call_and_fold (gsi, repl);
3662 return true;
3665 else
3666 /* We'd like to arrange to call fputs(string,stdout) here,
3667 but we need stdout and don't have a way to get it yet. */
3668 return false;
3672 /* The other optimizations can be done only on the non-va_list variants. */
3673 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3674 return false;
3676 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3677 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3679 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3680 return false;
3681 if (fn_puts)
3683 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3684 replace_call_with_call_and_fold (gsi, repl);
3685 return true;
3689 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3690 else if (strcmp (fmt_str, target_percent_c) == 0)
3692 if (!arg || ! useless_type_conversion_p (integer_type_node,
3693 TREE_TYPE (arg)))
3694 return false;
3695 if (fn_putchar)
3697 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3698 replace_call_with_call_and_fold (gsi, repl);
3699 return true;
3703 return false;
3708 /* Fold a call to __builtin_strlen with known length LEN. */
3710 static bool
3711 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3713 gimple *stmt = gsi_stmt (*gsi);
3714 tree arg = gimple_call_arg (stmt, 0);
3716 wide_int minlen;
3717 wide_int maxlen;
3719 c_strlen_data lendata = { };
3720 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
3721 && !lendata.decl
3722 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3723 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
3725 /* The range of lengths refers to either a single constant
3726 string or to the longest and shortest constant string
3727 referenced by the argument of the strlen() call, or to
3728 the strings that can possibly be stored in the arrays
3729 the argument refers to. */
3730 minlen = wi::to_wide (lendata.minlen);
3731 maxlen = wi::to_wide (lendata.maxlen);
3733 else
3735 unsigned prec = TYPE_PRECISION (sizetype);
3737 minlen = wi::shwi (0, prec);
3738 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3741 if (minlen == maxlen)
3743 /* Fold the strlen call to a constant. */
3744 tree type = TREE_TYPE (lendata.minlen);
3745 tree len = force_gimple_operand_gsi (gsi,
3746 wide_int_to_tree (type, minlen),
3747 true, NULL, true, GSI_SAME_STMT);
3748 replace_call_with_value (gsi, len);
3749 return true;
3752 /* Set the strlen() range to [0, MAXLEN]. */
3753 if (tree lhs = gimple_call_lhs (stmt))
3754 set_strlen_range (lhs, maxlen);
3756 return false;
3759 /* Fold a call to __builtin_acc_on_device. */
3761 static bool
3762 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3764 /* Defer folding until we know which compiler we're in. */
3765 if (symtab->state != EXPANSION)
3766 return false;
3768 unsigned val_host = GOMP_DEVICE_HOST;
3769 unsigned val_dev = GOMP_DEVICE_NONE;
3771 #ifdef ACCEL_COMPILER
3772 val_host = GOMP_DEVICE_NOT_HOST;
3773 val_dev = ACCEL_COMPILER_acc_device;
3774 #endif
3776 location_t loc = gimple_location (gsi_stmt (*gsi));
3778 tree host_eq = make_ssa_name (boolean_type_node);
3779 gimple *host_ass = gimple_build_assign
3780 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3781 gimple_set_location (host_ass, loc);
3782 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3784 tree dev_eq = make_ssa_name (boolean_type_node);
3785 gimple *dev_ass = gimple_build_assign
3786 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3787 gimple_set_location (dev_ass, loc);
3788 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3790 tree result = make_ssa_name (boolean_type_node);
3791 gimple *result_ass = gimple_build_assign
3792 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3793 gimple_set_location (result_ass, loc);
3794 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3796 replace_call_with_value (gsi, result);
3798 return true;
3801 /* Fold realloc (0, n) -> malloc (n). */
3803 static bool
3804 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3806 gimple *stmt = gsi_stmt (*gsi);
3807 tree arg = gimple_call_arg (stmt, 0);
3808 tree size = gimple_call_arg (stmt, 1);
3810 if (operand_equal_p (arg, null_pointer_node, 0))
3812 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3813 if (fn_malloc)
3815 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3816 replace_call_with_call_and_fold (gsi, repl);
3817 return true;
3820 return false;
3823 /* Fold the non-target builtin at *GSI and return whether any simplification
3824 was made. */
3826 static bool
3827 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3829 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3830 tree callee = gimple_call_fndecl (stmt);
3832 /* Give up for always_inline inline builtins until they are
3833 inlined. */
3834 if (avoid_folding_inline_builtin (callee))
3835 return false;
3837 unsigned n = gimple_call_num_args (stmt);
3838 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3839 switch (fcode)
3841 case BUILT_IN_BCMP:
3842 return gimple_fold_builtin_bcmp (gsi);
3843 case BUILT_IN_BCOPY:
3844 return gimple_fold_builtin_bcopy (gsi);
3845 case BUILT_IN_BZERO:
3846 return gimple_fold_builtin_bzero (gsi);
3848 case BUILT_IN_MEMSET:
3849 return gimple_fold_builtin_memset (gsi,
3850 gimple_call_arg (stmt, 1),
3851 gimple_call_arg (stmt, 2));
3852 case BUILT_IN_MEMCPY:
3853 case BUILT_IN_MEMPCPY:
3854 case BUILT_IN_MEMMOVE:
3855 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3856 gimple_call_arg (stmt, 1), fcode);
3857 case BUILT_IN_SPRINTF_CHK:
3858 case BUILT_IN_VSPRINTF_CHK:
3859 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3860 case BUILT_IN_STRCAT_CHK:
3861 return gimple_fold_builtin_strcat_chk (gsi);
3862 case BUILT_IN_STRNCAT_CHK:
3863 return gimple_fold_builtin_strncat_chk (gsi);
3864 case BUILT_IN_STRLEN:
3865 return gimple_fold_builtin_strlen (gsi);
3866 case BUILT_IN_STRCPY:
3867 return gimple_fold_builtin_strcpy (gsi,
3868 gimple_call_arg (stmt, 0),
3869 gimple_call_arg (stmt, 1));
3870 case BUILT_IN_STRNCPY:
3871 return gimple_fold_builtin_strncpy (gsi,
3872 gimple_call_arg (stmt, 0),
3873 gimple_call_arg (stmt, 1),
3874 gimple_call_arg (stmt, 2));
3875 case BUILT_IN_STRCAT:
3876 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3877 gimple_call_arg (stmt, 1));
3878 case BUILT_IN_STRNCAT:
3879 return gimple_fold_builtin_strncat (gsi);
3880 case BUILT_IN_INDEX:
3881 case BUILT_IN_STRCHR:
3882 return gimple_fold_builtin_strchr (gsi, false);
3883 case BUILT_IN_RINDEX:
3884 case BUILT_IN_STRRCHR:
3885 return gimple_fold_builtin_strchr (gsi, true);
3886 case BUILT_IN_STRSTR:
3887 return gimple_fold_builtin_strstr (gsi);
3888 case BUILT_IN_STRCMP:
3889 case BUILT_IN_STRCMP_EQ:
3890 case BUILT_IN_STRCASECMP:
3891 case BUILT_IN_STRNCMP:
3892 case BUILT_IN_STRNCMP_EQ:
3893 case BUILT_IN_STRNCASECMP:
3894 return gimple_fold_builtin_string_compare (gsi);
3895 case BUILT_IN_MEMCHR:
3896 return gimple_fold_builtin_memchr (gsi);
3897 case BUILT_IN_FPUTS:
3898 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3899 gimple_call_arg (stmt, 1), false);
3900 case BUILT_IN_FPUTS_UNLOCKED:
3901 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3902 gimple_call_arg (stmt, 1), true);
3903 case BUILT_IN_MEMCPY_CHK:
3904 case BUILT_IN_MEMPCPY_CHK:
3905 case BUILT_IN_MEMMOVE_CHK:
3906 case BUILT_IN_MEMSET_CHK:
3907 return gimple_fold_builtin_memory_chk (gsi,
3908 gimple_call_arg (stmt, 0),
3909 gimple_call_arg (stmt, 1),
3910 gimple_call_arg (stmt, 2),
3911 gimple_call_arg (stmt, 3),
3912 fcode);
3913 case BUILT_IN_STPCPY:
3914 return gimple_fold_builtin_stpcpy (gsi);
3915 case BUILT_IN_STRCPY_CHK:
3916 case BUILT_IN_STPCPY_CHK:
3917 return gimple_fold_builtin_stxcpy_chk (gsi,
3918 gimple_call_arg (stmt, 0),
3919 gimple_call_arg (stmt, 1),
3920 gimple_call_arg (stmt, 2),
3921 fcode);
3922 case BUILT_IN_STRNCPY_CHK:
3923 case BUILT_IN_STPNCPY_CHK:
3924 return gimple_fold_builtin_stxncpy_chk (gsi,
3925 gimple_call_arg (stmt, 0),
3926 gimple_call_arg (stmt, 1),
3927 gimple_call_arg (stmt, 2),
3928 gimple_call_arg (stmt, 3),
3929 fcode);
3930 case BUILT_IN_SNPRINTF_CHK:
3931 case BUILT_IN_VSNPRINTF_CHK:
3932 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3934 case BUILT_IN_FPRINTF:
3935 case BUILT_IN_FPRINTF_UNLOCKED:
3936 case BUILT_IN_VFPRINTF:
3937 if (n == 2 || n == 3)
3938 return gimple_fold_builtin_fprintf (gsi,
3939 gimple_call_arg (stmt, 0),
3940 gimple_call_arg (stmt, 1),
3941 n == 3
3942 ? gimple_call_arg (stmt, 2)
3943 : NULL_TREE,
3944 fcode);
3945 break;
3946 case BUILT_IN_FPRINTF_CHK:
3947 case BUILT_IN_VFPRINTF_CHK:
3948 if (n == 3 || n == 4)
3949 return gimple_fold_builtin_fprintf (gsi,
3950 gimple_call_arg (stmt, 0),
3951 gimple_call_arg (stmt, 2),
3952 n == 4
3953 ? gimple_call_arg (stmt, 3)
3954 : NULL_TREE,
3955 fcode);
3956 break;
3957 case BUILT_IN_PRINTF:
3958 case BUILT_IN_PRINTF_UNLOCKED:
3959 case BUILT_IN_VPRINTF:
3960 if (n == 1 || n == 2)
3961 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3962 n == 2
3963 ? gimple_call_arg (stmt, 1)
3964 : NULL_TREE, fcode);
3965 break;
3966 case BUILT_IN_PRINTF_CHK:
3967 case BUILT_IN_VPRINTF_CHK:
3968 if (n == 2 || n == 3)
3969 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3970 n == 3
3971 ? gimple_call_arg (stmt, 2)
3972 : NULL_TREE, fcode);
3973 break;
3974 case BUILT_IN_ACC_ON_DEVICE:
3975 return gimple_fold_builtin_acc_on_device (gsi,
3976 gimple_call_arg (stmt, 0));
3977 case BUILT_IN_REALLOC:
3978 return gimple_fold_builtin_realloc (gsi);
3980 default:;
3983 /* Try the generic builtin folder. */
3984 bool ignore = (gimple_call_lhs (stmt) == NULL);
3985 tree result = fold_call_stmt (stmt, ignore);
3986 if (result)
3988 if (ignore)
3989 STRIP_NOPS (result);
3990 else
3991 result = fold_convert (gimple_call_return_type (stmt), result);
3992 if (!update_call_from_tree (gsi, result))
3993 gimplify_and_update_call_from_tree (gsi, result);
3994 return true;
3997 return false;
4000 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
4001 function calls to constants, where possible. */
4003 static tree
4004 fold_internal_goacc_dim (const gimple *call)
4006 int axis = oacc_get_ifn_dim_arg (call);
4007 int size = oacc_get_fn_dim_size (current_function_decl, axis);
4008 tree result = NULL_TREE;
4009 tree type = TREE_TYPE (gimple_call_lhs (call));
4011 switch (gimple_call_internal_fn (call))
4013 case IFN_GOACC_DIM_POS:
4014 /* If the size is 1, we know the answer. */
4015 if (size == 1)
4016 result = build_int_cst (type, 0);
4017 break;
4018 case IFN_GOACC_DIM_SIZE:
4019 /* If the size is not dynamic, we know the answer. */
4020 if (size)
4021 result = build_int_cst (type, size);
4022 break;
4023 default:
4024 break;
4027 return result;
4030 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4031 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4032 &var where var is only addressable because of such calls. */
4034 bool
4035 optimize_atomic_compare_exchange_p (gimple *stmt)
4037 if (gimple_call_num_args (stmt) != 6
4038 || !flag_inline_atomics
4039 || !optimize
4040 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
4041 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4042 || !gimple_vdef (stmt)
4043 || !gimple_vuse (stmt))
4044 return false;
4046 tree fndecl = gimple_call_fndecl (stmt);
4047 switch (DECL_FUNCTION_CODE (fndecl))
4049 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
4050 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
4051 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
4052 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
4053 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
4054 break;
4055 default:
4056 return false;
4059 tree expected = gimple_call_arg (stmt, 1);
4060 if (TREE_CODE (expected) != ADDR_EXPR
4061 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
4062 return false;
4064 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
4065 if (!is_gimple_reg_type (etype)
4066 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
4067 || TREE_THIS_VOLATILE (etype)
4068 || VECTOR_TYPE_P (etype)
4069 || TREE_CODE (etype) == COMPLEX_TYPE
4070 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4071 might not preserve all the bits. See PR71716. */
4072 || SCALAR_FLOAT_TYPE_P (etype)
4073 || maybe_ne (TYPE_PRECISION (etype),
4074 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4075 return false;
4077 tree weak = gimple_call_arg (stmt, 3);
4078 if (!integer_zerop (weak) && !integer_onep (weak))
4079 return false;
4081 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4082 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4083 machine_mode mode = TYPE_MODE (itype);
4085 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
4086 == CODE_FOR_nothing
4087 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
4088 return false;
4090 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
4091 return false;
4093 return true;
4096 /* Fold
4097 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4098 into
4099 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4100 i = IMAGPART_EXPR <t>;
4101 r = (_Bool) i;
4102 e = REALPART_EXPR <t>; */
4104 void
4105 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
4107 gimple *stmt = gsi_stmt (*gsi);
4108 tree fndecl = gimple_call_fndecl (stmt);
4109 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4110 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4111 tree ctype = build_complex_type (itype);
4112 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
4113 bool throws = false;
4114 edge e = NULL;
4115 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4116 expected);
4117 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4118 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4119 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4121 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4122 build1 (VIEW_CONVERT_EXPR, itype,
4123 gimple_assign_lhs (g)));
4124 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4126 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4127 + int_size_in_bytes (itype);
4128 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4129 gimple_call_arg (stmt, 0),
4130 gimple_assign_lhs (g),
4131 gimple_call_arg (stmt, 2),
4132 build_int_cst (integer_type_node, flag),
4133 gimple_call_arg (stmt, 4),
4134 gimple_call_arg (stmt, 5));
4135 tree lhs = make_ssa_name (ctype);
4136 gimple_call_set_lhs (g, lhs);
4137 gimple_set_vdef (g, gimple_vdef (stmt));
4138 gimple_set_vuse (g, gimple_vuse (stmt));
4139 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
4140 tree oldlhs = gimple_call_lhs (stmt);
4141 if (stmt_can_throw_internal (cfun, stmt))
4143 throws = true;
4144 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4146 gimple_call_set_nothrow (as_a <gcall *> (g),
4147 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4148 gimple_call_set_lhs (stmt, NULL_TREE);
4149 gsi_replace (gsi, g, true);
4150 if (oldlhs)
4152 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4153 build1 (IMAGPART_EXPR, itype, lhs));
4154 if (throws)
4156 gsi_insert_on_edge_immediate (e, g);
4157 *gsi = gsi_for_stmt (g);
4159 else
4160 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4161 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4162 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4164 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4165 build1 (REALPART_EXPR, itype, lhs));
4166 if (throws && oldlhs == NULL_TREE)
4168 gsi_insert_on_edge_immediate (e, g);
4169 *gsi = gsi_for_stmt (g);
4171 else
4172 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4173 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4175 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4176 VIEW_CONVERT_EXPR,
4177 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4178 gimple_assign_lhs (g)));
4179 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4181 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4182 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4183 *gsi = gsiret;
4186 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4187 doesn't fit into TYPE. The test for overflow should be regardless of
4188 -fwrapv, and even for unsigned types. */
4190 bool
4191 arith_overflowed_p (enum tree_code code, const_tree type,
4192 const_tree arg0, const_tree arg1)
4194 widest2_int warg0 = widest2_int_cst (arg0);
4195 widest2_int warg1 = widest2_int_cst (arg1);
4196 widest2_int wres;
4197 switch (code)
4199 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4200 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4201 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4202 default: gcc_unreachable ();
4204 signop sign = TYPE_SIGN (type);
4205 if (sign == UNSIGNED && wi::neg_p (wres))
4206 return true;
4207 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4210 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4211 The statement may be replaced by another statement, e.g., if the call
4212 simplifies to a constant value. Return true if any changes were made.
4213 It is assumed that the operands have been previously folded. */
4215 static bool
4216 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4218 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4219 tree callee;
4220 bool changed = false;
4221 unsigned i;
4223 /* Fold *& in call arguments. */
4224 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4225 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4227 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4228 if (tmp)
4230 gimple_call_set_arg (stmt, i, tmp);
4231 changed = true;
4235 /* Check for virtual calls that became direct calls. */
4236 callee = gimple_call_fn (stmt);
4237 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4239 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4241 if (dump_file && virtual_method_call_p (callee)
4242 && !possible_polymorphic_call_target_p
4243 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4244 (OBJ_TYPE_REF_EXPR (callee)))))
4246 fprintf (dump_file,
4247 "Type inheritance inconsistent devirtualization of ");
4248 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4249 fprintf (dump_file, " to ");
4250 print_generic_expr (dump_file, callee, TDF_SLIM);
4251 fprintf (dump_file, "\n");
4254 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4255 changed = true;
4257 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4259 bool final;
4260 vec <cgraph_node *>targets
4261 = possible_polymorphic_call_targets (callee, stmt, &final);
4262 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4264 tree lhs = gimple_call_lhs (stmt);
4265 if (dump_enabled_p ())
4267 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4268 "folding virtual function call to %s\n",
4269 targets.length () == 1
4270 ? targets[0]->name ()
4271 : "__builtin_unreachable");
4273 if (targets.length () == 1)
4275 tree fndecl = targets[0]->decl;
4276 gimple_call_set_fndecl (stmt, fndecl);
4277 changed = true;
4278 /* If changing the call to __cxa_pure_virtual
4279 or similar noreturn function, adjust gimple_call_fntype
4280 too. */
4281 if (gimple_call_noreturn_p (stmt)
4282 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4283 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4284 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4285 == void_type_node))
4286 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4287 /* If the call becomes noreturn, remove the lhs. */
4288 if (lhs
4289 && gimple_call_noreturn_p (stmt)
4290 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4291 || should_remove_lhs_p (lhs)))
4293 if (TREE_CODE (lhs) == SSA_NAME)
4295 tree var = create_tmp_var (TREE_TYPE (lhs));
4296 tree def = get_or_create_ssa_default_def (cfun, var);
4297 gimple *new_stmt = gimple_build_assign (lhs, def);
4298 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4300 gimple_call_set_lhs (stmt, NULL_TREE);
4302 maybe_remove_unused_call_args (cfun, stmt);
4304 else
4306 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4307 gimple *new_stmt = gimple_build_call (fndecl, 0);
4308 gimple_set_location (new_stmt, gimple_location (stmt));
4309 /* If the call had a SSA name as lhs morph that into
4310 an uninitialized value. */
4311 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4313 tree var = create_tmp_var (TREE_TYPE (lhs));
4314 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4315 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4316 set_ssa_default_def (cfun, var, lhs);
4318 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4319 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4320 gsi_replace (gsi, new_stmt, false);
4321 return true;
4327 /* Check for indirect calls that became direct calls, and then
4328 no longer require a static chain. */
4329 if (gimple_call_chain (stmt))
4331 tree fn = gimple_call_fndecl (stmt);
4332 if (fn && !DECL_STATIC_CHAIN (fn))
4334 gimple_call_set_chain (stmt, NULL);
4335 changed = true;
4337 else
4339 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4340 if (tmp)
4342 gimple_call_set_chain (stmt, tmp);
4343 changed = true;
4348 if (inplace)
4349 return changed;
4351 /* Check for builtins that CCP can handle using information not
4352 available in the generic fold routines. */
4353 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4355 if (gimple_fold_builtin (gsi))
4356 changed = true;
4358 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4360 changed |= targetm.gimple_fold_builtin (gsi);
4362 else if (gimple_call_internal_p (stmt))
4364 enum tree_code subcode = ERROR_MARK;
4365 tree result = NULL_TREE;
4366 bool cplx_result = false;
4367 tree overflow = NULL_TREE;
4368 switch (gimple_call_internal_fn (stmt))
4370 case IFN_BUILTIN_EXPECT:
4371 result = fold_builtin_expect (gimple_location (stmt),
4372 gimple_call_arg (stmt, 0),
4373 gimple_call_arg (stmt, 1),
4374 gimple_call_arg (stmt, 2),
4375 NULL_TREE);
4376 break;
4377 case IFN_UBSAN_OBJECT_SIZE:
4379 tree offset = gimple_call_arg (stmt, 1);
4380 tree objsize = gimple_call_arg (stmt, 2);
4381 if (integer_all_onesp (objsize)
4382 || (TREE_CODE (offset) == INTEGER_CST
4383 && TREE_CODE (objsize) == INTEGER_CST
4384 && tree_int_cst_le (offset, objsize)))
4386 replace_call_with_value (gsi, NULL_TREE);
4387 return true;
4390 break;
4391 case IFN_UBSAN_PTR:
4392 if (integer_zerop (gimple_call_arg (stmt, 1)))
4394 replace_call_with_value (gsi, NULL_TREE);
4395 return true;
4397 break;
4398 case IFN_UBSAN_BOUNDS:
4400 tree index = gimple_call_arg (stmt, 1);
4401 tree bound = gimple_call_arg (stmt, 2);
4402 if (TREE_CODE (index) == INTEGER_CST
4403 && TREE_CODE (bound) == INTEGER_CST)
4405 index = fold_convert (TREE_TYPE (bound), index);
4406 if (TREE_CODE (index) == INTEGER_CST
4407 && tree_int_cst_le (index, bound))
4409 replace_call_with_value (gsi, NULL_TREE);
4410 return true;
4414 break;
4415 case IFN_GOACC_DIM_SIZE:
4416 case IFN_GOACC_DIM_POS:
4417 result = fold_internal_goacc_dim (stmt);
4418 break;
4419 case IFN_UBSAN_CHECK_ADD:
4420 subcode = PLUS_EXPR;
4421 break;
4422 case IFN_UBSAN_CHECK_SUB:
4423 subcode = MINUS_EXPR;
4424 break;
4425 case IFN_UBSAN_CHECK_MUL:
4426 subcode = MULT_EXPR;
4427 break;
4428 case IFN_ADD_OVERFLOW:
4429 subcode = PLUS_EXPR;
4430 cplx_result = true;
4431 break;
4432 case IFN_SUB_OVERFLOW:
4433 subcode = MINUS_EXPR;
4434 cplx_result = true;
4435 break;
4436 case IFN_MUL_OVERFLOW:
4437 subcode = MULT_EXPR;
4438 cplx_result = true;
4439 break;
4440 default:
4441 break;
4443 if (subcode != ERROR_MARK)
4445 tree arg0 = gimple_call_arg (stmt, 0);
4446 tree arg1 = gimple_call_arg (stmt, 1);
4447 tree type = TREE_TYPE (arg0);
4448 if (cplx_result)
4450 tree lhs = gimple_call_lhs (stmt);
4451 if (lhs == NULL_TREE)
4452 type = NULL_TREE;
4453 else
4454 type = TREE_TYPE (TREE_TYPE (lhs));
4456 if (type == NULL_TREE)
4458 /* x = y + 0; x = y - 0; x = y * 0; */
4459 else if (integer_zerop (arg1))
4460 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4461 /* x = 0 + y; x = 0 * y; */
4462 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4463 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4464 /* x = y - y; */
4465 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4466 result = integer_zero_node;
4467 /* x = y * 1; x = 1 * y; */
4468 else if (subcode == MULT_EXPR && integer_onep (arg1))
4469 result = arg0;
4470 else if (subcode == MULT_EXPR && integer_onep (arg0))
4471 result = arg1;
4472 else if (TREE_CODE (arg0) == INTEGER_CST
4473 && TREE_CODE (arg1) == INTEGER_CST)
4475 if (cplx_result)
4476 result = int_const_binop (subcode, fold_convert (type, arg0),
4477 fold_convert (type, arg1));
4478 else
4479 result = int_const_binop (subcode, arg0, arg1);
4480 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4482 if (cplx_result)
4483 overflow = build_one_cst (type);
4484 else
4485 result = NULL_TREE;
4488 if (result)
4490 if (result == integer_zero_node)
4491 result = build_zero_cst (type);
4492 else if (cplx_result && TREE_TYPE (result) != type)
4494 if (TREE_CODE (result) == INTEGER_CST)
4496 if (arith_overflowed_p (PLUS_EXPR, type, result,
4497 integer_zero_node))
4498 overflow = build_one_cst (type);
4500 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4501 && TYPE_UNSIGNED (type))
4502 || (TYPE_PRECISION (type)
4503 < (TYPE_PRECISION (TREE_TYPE (result))
4504 + (TYPE_UNSIGNED (TREE_TYPE (result))
4505 && !TYPE_UNSIGNED (type)))))
4506 result = NULL_TREE;
4507 if (result)
4508 result = fold_convert (type, result);
4513 if (result)
4515 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4516 result = drop_tree_overflow (result);
4517 if (cplx_result)
4519 if (overflow == NULL_TREE)
4520 overflow = build_zero_cst (TREE_TYPE (result));
4521 tree ctype = build_complex_type (TREE_TYPE (result));
4522 if (TREE_CODE (result) == INTEGER_CST
4523 && TREE_CODE (overflow) == INTEGER_CST)
4524 result = build_complex (ctype, result, overflow);
4525 else
4526 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4527 ctype, result, overflow);
4529 if (!update_call_from_tree (gsi, result))
4530 gimplify_and_update_call_from_tree (gsi, result);
4531 changed = true;
4535 return changed;
4539 /* Return true whether NAME has a use on STMT. */
4541 static bool
4542 has_use_on_stmt (tree name, gimple *stmt)
4544 imm_use_iterator iter;
4545 use_operand_p use_p;
4546 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4547 if (USE_STMT (use_p) == stmt)
4548 return true;
4549 return false;
4552 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4553 gimple_simplify.
4555 Replaces *GSI with the simplification result in RCODE and OPS
4556 and the associated statements in *SEQ. Does the replacement
4557 according to INPLACE and returns true if the operation succeeded. */
4559 static bool
4560 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4561 gimple_match_op *res_op,
4562 gimple_seq *seq, bool inplace)
4564 gimple *stmt = gsi_stmt (*gsi);
4565 tree *ops = res_op->ops;
4566 unsigned int num_ops = res_op->num_ops;
4568 /* Play safe and do not allow abnormals to be mentioned in
4569 newly created statements. See also maybe_push_res_to_seq.
4570 As an exception allow such uses if there was a use of the
4571 same SSA name on the old stmt. */
4572 for (unsigned int i = 0; i < num_ops; ++i)
4573 if (TREE_CODE (ops[i]) == SSA_NAME
4574 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4575 && !has_use_on_stmt (ops[i], stmt))
4576 return false;
4578 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4579 for (unsigned int i = 0; i < 2; ++i)
4580 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4581 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4582 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4583 return false;
4585 /* Don't insert new statements when INPLACE is true, even if we could
4586 reuse STMT for the final statement. */
4587 if (inplace && !gimple_seq_empty_p (*seq))
4588 return false;
4590 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4592 gcc_assert (res_op->code.is_tree_code ());
4593 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4594 /* GIMPLE_CONDs condition may not throw. */
4595 && (!flag_exceptions
4596 || !cfun->can_throw_non_call_exceptions
4597 || !operation_could_trap_p (res_op->code,
4598 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4599 false, NULL_TREE)))
4600 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4601 else if (res_op->code == SSA_NAME)
4602 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4603 build_zero_cst (TREE_TYPE (ops[0])));
4604 else if (res_op->code == INTEGER_CST)
4606 if (integer_zerop (ops[0]))
4607 gimple_cond_make_false (cond_stmt);
4608 else
4609 gimple_cond_make_true (cond_stmt);
4611 else if (!inplace)
4613 tree res = maybe_push_res_to_seq (res_op, seq);
4614 if (!res)
4615 return false;
4616 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4617 build_zero_cst (TREE_TYPE (res)));
4619 else
4620 return false;
4621 if (dump_file && (dump_flags & TDF_DETAILS))
4623 fprintf (dump_file, "gimple_simplified to ");
4624 if (!gimple_seq_empty_p (*seq))
4625 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4626 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4627 0, TDF_SLIM);
4629 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4630 return true;
4632 else if (is_gimple_assign (stmt)
4633 && res_op->code.is_tree_code ())
4635 if (!inplace
4636 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4638 maybe_build_generic_op (res_op);
4639 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4640 res_op->op_or_null (0),
4641 res_op->op_or_null (1),
4642 res_op->op_or_null (2));
4643 if (dump_file && (dump_flags & TDF_DETAILS))
4645 fprintf (dump_file, "gimple_simplified to ");
4646 if (!gimple_seq_empty_p (*seq))
4647 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4648 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4649 0, TDF_SLIM);
4651 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4652 return true;
4655 else if (res_op->code.is_fn_code ()
4656 && gimple_call_combined_fn (stmt) == res_op->code)
4658 gcc_assert (num_ops == gimple_call_num_args (stmt));
4659 for (unsigned int i = 0; i < num_ops; ++i)
4660 gimple_call_set_arg (stmt, i, ops[i]);
4661 if (dump_file && (dump_flags & TDF_DETAILS))
4663 fprintf (dump_file, "gimple_simplified to ");
4664 if (!gimple_seq_empty_p (*seq))
4665 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4666 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4668 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4669 return true;
4671 else if (!inplace)
4673 if (gimple_has_lhs (stmt))
4675 tree lhs = gimple_get_lhs (stmt);
4676 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4677 return false;
4678 if (dump_file && (dump_flags & TDF_DETAILS))
4680 fprintf (dump_file, "gimple_simplified to ");
4681 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4683 gsi_replace_with_seq_vops (gsi, *seq);
4684 return true;
4686 else
4687 gcc_unreachable ();
4690 return false;
4693 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4695 static bool
4696 maybe_canonicalize_mem_ref_addr (tree *t)
4698 bool res = false;
4700 if (TREE_CODE (*t) == ADDR_EXPR)
4701 t = &TREE_OPERAND (*t, 0);
4703 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4704 generic vector extension. The actual vector referenced is
4705 view-converted to an array type for this purpose. If the index
4706 is constant the canonical representation in the middle-end is a
4707 BIT_FIELD_REF so re-write the former to the latter here. */
4708 if (TREE_CODE (*t) == ARRAY_REF
4709 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4710 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4711 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4713 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4714 if (VECTOR_TYPE_P (vtype))
4716 tree low = array_ref_low_bound (*t);
4717 if (TREE_CODE (low) == INTEGER_CST)
4719 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4721 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4722 wi::to_widest (low));
4723 idx = wi::mul (idx, wi::to_widest
4724 (TYPE_SIZE (TREE_TYPE (*t))));
4725 widest_int ext
4726 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4727 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4729 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4730 TREE_TYPE (*t),
4731 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4732 TYPE_SIZE (TREE_TYPE (*t)),
4733 wide_int_to_tree (bitsizetype, idx));
4734 res = true;
4741 while (handled_component_p (*t))
4742 t = &TREE_OPERAND (*t, 0);
4744 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4745 of invariant addresses into a SSA name MEM_REF address. */
4746 if (TREE_CODE (*t) == MEM_REF
4747 || TREE_CODE (*t) == TARGET_MEM_REF)
4749 tree addr = TREE_OPERAND (*t, 0);
4750 if (TREE_CODE (addr) == ADDR_EXPR
4751 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4752 || handled_component_p (TREE_OPERAND (addr, 0))))
4754 tree base;
4755 poly_int64 coffset;
4756 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4757 &coffset);
4758 if (!base)
4759 gcc_unreachable ();
4761 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4762 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4763 TREE_OPERAND (*t, 1),
4764 size_int (coffset));
4765 res = true;
4767 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4768 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4771 /* Canonicalize back MEM_REFs to plain reference trees if the object
4772 accessed is a decl that has the same access semantics as the MEM_REF. */
4773 if (TREE_CODE (*t) == MEM_REF
4774 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4775 && integer_zerop (TREE_OPERAND (*t, 1))
4776 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4778 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4779 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4780 if (/* Same volatile qualification. */
4781 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4782 /* Same TBAA behavior with -fstrict-aliasing. */
4783 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4784 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4785 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4786 /* Same alignment. */
4787 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4788 /* We have to look out here to not drop a required conversion
4789 from the rhs to the lhs if *t appears on the lhs or vice-versa
4790 if it appears on the rhs. Thus require strict type
4791 compatibility. */
4792 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4794 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4795 res = true;
4799 /* Canonicalize TARGET_MEM_REF in particular with respect to
4800 the indexes becoming constant. */
4801 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4803 tree tem = maybe_fold_tmr (*t);
4804 if (tem)
4806 *t = tem;
4807 res = true;
4811 return res;
4814 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4815 distinguishes both cases. */
4817 static bool
4818 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4820 bool changed = false;
4821 gimple *stmt = gsi_stmt (*gsi);
4822 bool nowarning = gimple_no_warning_p (stmt);
4823 unsigned i;
4824 fold_defer_overflow_warnings ();
4826 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4827 after propagation.
4828 ??? This shouldn't be done in generic folding but in the
4829 propagation helpers which also know whether an address was
4830 propagated.
4831 Also canonicalize operand order. */
4832 switch (gimple_code (stmt))
4834 case GIMPLE_ASSIGN:
4835 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4837 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4838 if ((REFERENCE_CLASS_P (*rhs)
4839 || TREE_CODE (*rhs) == ADDR_EXPR)
4840 && maybe_canonicalize_mem_ref_addr (rhs))
4841 changed = true;
4842 tree *lhs = gimple_assign_lhs_ptr (stmt);
4843 if (REFERENCE_CLASS_P (*lhs)
4844 && maybe_canonicalize_mem_ref_addr (lhs))
4845 changed = true;
4847 else
4849 /* Canonicalize operand order. */
4850 enum tree_code code = gimple_assign_rhs_code (stmt);
4851 if (TREE_CODE_CLASS (code) == tcc_comparison
4852 || commutative_tree_code (code)
4853 || commutative_ternary_tree_code (code))
4855 tree rhs1 = gimple_assign_rhs1 (stmt);
4856 tree rhs2 = gimple_assign_rhs2 (stmt);
4857 if (tree_swap_operands_p (rhs1, rhs2))
4859 gimple_assign_set_rhs1 (stmt, rhs2);
4860 gimple_assign_set_rhs2 (stmt, rhs1);
4861 if (TREE_CODE_CLASS (code) == tcc_comparison)
4862 gimple_assign_set_rhs_code (stmt,
4863 swap_tree_comparison (code));
4864 changed = true;
4868 break;
4869 case GIMPLE_CALL:
4871 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4873 tree *arg = gimple_call_arg_ptr (stmt, i);
4874 if (REFERENCE_CLASS_P (*arg)
4875 && maybe_canonicalize_mem_ref_addr (arg))
4876 changed = true;
4878 tree *lhs = gimple_call_lhs_ptr (stmt);
4879 if (*lhs
4880 && REFERENCE_CLASS_P (*lhs)
4881 && maybe_canonicalize_mem_ref_addr (lhs))
4882 changed = true;
4883 break;
4885 case GIMPLE_ASM:
4887 gasm *asm_stmt = as_a <gasm *> (stmt);
4888 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4890 tree link = gimple_asm_output_op (asm_stmt, i);
4891 tree op = TREE_VALUE (link);
4892 if (REFERENCE_CLASS_P (op)
4893 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4894 changed = true;
4896 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4898 tree link = gimple_asm_input_op (asm_stmt, i);
4899 tree op = TREE_VALUE (link);
4900 if ((REFERENCE_CLASS_P (op)
4901 || TREE_CODE (op) == ADDR_EXPR)
4902 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4903 changed = true;
4906 break;
4907 case GIMPLE_DEBUG:
4908 if (gimple_debug_bind_p (stmt))
4910 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4911 if (*val
4912 && (REFERENCE_CLASS_P (*val)
4913 || TREE_CODE (*val) == ADDR_EXPR)
4914 && maybe_canonicalize_mem_ref_addr (val))
4915 changed = true;
4917 break;
4918 case GIMPLE_COND:
4920 /* Canonicalize operand order. */
4921 tree lhs = gimple_cond_lhs (stmt);
4922 tree rhs = gimple_cond_rhs (stmt);
4923 if (tree_swap_operands_p (lhs, rhs))
4925 gcond *gc = as_a <gcond *> (stmt);
4926 gimple_cond_set_lhs (gc, rhs);
4927 gimple_cond_set_rhs (gc, lhs);
4928 gimple_cond_set_code (gc,
4929 swap_tree_comparison (gimple_cond_code (gc)));
4930 changed = true;
4933 default:;
4936 /* Dispatch to pattern-based folding. */
4937 if (!inplace
4938 || is_gimple_assign (stmt)
4939 || gimple_code (stmt) == GIMPLE_COND)
4941 gimple_seq seq = NULL;
4942 gimple_match_op res_op;
4943 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
4944 valueize, valueize))
4946 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
4947 changed = true;
4948 else
4949 gimple_seq_discard (seq);
4953 stmt = gsi_stmt (*gsi);
4955 /* Fold the main computation performed by the statement. */
4956 switch (gimple_code (stmt))
4958 case GIMPLE_ASSIGN:
4960 /* Try to canonicalize for boolean-typed X the comparisons
4961 X == 0, X == 1, X != 0, and X != 1. */
4962 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4963 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4965 tree lhs = gimple_assign_lhs (stmt);
4966 tree op1 = gimple_assign_rhs1 (stmt);
4967 tree op2 = gimple_assign_rhs2 (stmt);
4968 tree type = TREE_TYPE (op1);
4970 /* Check whether the comparison operands are of the same boolean
4971 type as the result type is.
4972 Check that second operand is an integer-constant with value
4973 one or zero. */
4974 if (TREE_CODE (op2) == INTEGER_CST
4975 && (integer_zerop (op2) || integer_onep (op2))
4976 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4978 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4979 bool is_logical_not = false;
4981 /* X == 0 and X != 1 is a logical-not.of X
4982 X == 1 and X != 0 is X */
4983 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4984 || (cmp_code == NE_EXPR && integer_onep (op2)))
4985 is_logical_not = true;
4987 if (is_logical_not == false)
4988 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4989 /* Only for one-bit precision typed X the transformation
4990 !X -> ~X is valied. */
4991 else if (TYPE_PRECISION (type) == 1)
4992 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4993 /* Otherwise we use !X -> X ^ 1. */
4994 else
4995 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4996 build_int_cst (type, 1));
4997 changed = true;
4998 break;
5002 unsigned old_num_ops = gimple_num_ops (stmt);
5003 tree lhs = gimple_assign_lhs (stmt);
5004 tree new_rhs = fold_gimple_assign (gsi);
5005 if (new_rhs
5006 && !useless_type_conversion_p (TREE_TYPE (lhs),
5007 TREE_TYPE (new_rhs)))
5008 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
5009 if (new_rhs
5010 && (!inplace
5011 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
5013 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
5014 changed = true;
5016 break;
5019 case GIMPLE_CALL:
5020 changed |= gimple_fold_call (gsi, inplace);
5021 break;
5023 case GIMPLE_ASM:
5024 /* Fold *& in asm operands. */
5026 gasm *asm_stmt = as_a <gasm *> (stmt);
5027 size_t noutputs;
5028 const char **oconstraints;
5029 const char *constraint;
5030 bool allows_mem, allows_reg;
5032 noutputs = gimple_asm_noutputs (asm_stmt);
5033 oconstraints = XALLOCAVEC (const char *, noutputs);
5035 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
5037 tree link = gimple_asm_output_op (asm_stmt, i);
5038 tree op = TREE_VALUE (link);
5039 oconstraints[i]
5040 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5041 if (REFERENCE_CLASS_P (op)
5042 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
5044 TREE_VALUE (link) = op;
5045 changed = true;
5048 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5050 tree link = gimple_asm_input_op (asm_stmt, i);
5051 tree op = TREE_VALUE (link);
5052 constraint
5053 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5054 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5055 oconstraints, &allows_mem, &allows_reg);
5056 if (REFERENCE_CLASS_P (op)
5057 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
5058 != NULL_TREE)
5060 TREE_VALUE (link) = op;
5061 changed = true;
5065 break;
5067 case GIMPLE_DEBUG:
5068 if (gimple_debug_bind_p (stmt))
5070 tree val = gimple_debug_bind_get_value (stmt);
5071 if (val
5072 && REFERENCE_CLASS_P (val))
5074 tree tem = maybe_fold_reference (val, false);
5075 if (tem)
5077 gimple_debug_bind_set_value (stmt, tem);
5078 changed = true;
5081 else if (val
5082 && TREE_CODE (val) == ADDR_EXPR)
5084 tree ref = TREE_OPERAND (val, 0);
5085 tree tem = maybe_fold_reference (ref, false);
5086 if (tem)
5088 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
5089 gimple_debug_bind_set_value (stmt, tem);
5090 changed = true;
5094 break;
5096 case GIMPLE_RETURN:
5098 greturn *ret_stmt = as_a<greturn *> (stmt);
5099 tree ret = gimple_return_retval(ret_stmt);
5101 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
5103 tree val = valueize (ret);
5104 if (val && val != ret
5105 && may_propagate_copy (ret, val))
5107 gimple_return_set_retval (ret_stmt, val);
5108 changed = true;
5112 break;
5114 default:;
5117 stmt = gsi_stmt (*gsi);
5119 /* Fold *& on the lhs. */
5120 if (gimple_has_lhs (stmt))
5122 tree lhs = gimple_get_lhs (stmt);
5123 if (lhs && REFERENCE_CLASS_P (lhs))
5125 tree new_lhs = maybe_fold_reference (lhs, true);
5126 if (new_lhs)
5128 gimple_set_lhs (stmt, new_lhs);
5129 changed = true;
5134 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
5135 return changed;
5138 /* Valueziation callback that ends up not following SSA edges. */
5140 tree
5141 no_follow_ssa_edges (tree)
5143 return NULL_TREE;
5146 /* Valueization callback that ends up following single-use SSA edges only. */
5148 tree
5149 follow_single_use_edges (tree val)
5151 if (TREE_CODE (val) == SSA_NAME
5152 && !has_single_use (val))
5153 return NULL_TREE;
5154 return val;
5157 /* Valueization callback that follows all SSA edges. */
5159 tree
5160 follow_all_ssa_edges (tree val)
5162 return val;
5165 /* Fold the statement pointed to by GSI. In some cases, this function may
5166 replace the whole statement with a new one. Returns true iff folding
5167 makes any changes.
5168 The statement pointed to by GSI should be in valid gimple form but may
5169 be in unfolded state as resulting from for example constant propagation
5170 which can produce *&x = 0. */
5172 bool
5173 fold_stmt (gimple_stmt_iterator *gsi)
5175 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5178 bool
5179 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5181 return fold_stmt_1 (gsi, false, valueize);
5184 /* Perform the minimal folding on statement *GSI. Only operations like
5185 *&x created by constant propagation are handled. The statement cannot
5186 be replaced with a new one. Return true if the statement was
5187 changed, false otherwise.
5188 The statement *GSI should be in valid gimple form but may
5189 be in unfolded state as resulting from for example constant propagation
5190 which can produce *&x = 0. */
5192 bool
5193 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5195 gimple *stmt = gsi_stmt (*gsi);
5196 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5197 gcc_assert (gsi_stmt (*gsi) == stmt);
5198 return changed;
5201 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5202 if EXPR is null or we don't know how.
5203 If non-null, the result always has boolean type. */
5205 static tree
5206 canonicalize_bool (tree expr, bool invert)
5208 if (!expr)
5209 return NULL_TREE;
5210 else if (invert)
5212 if (integer_nonzerop (expr))
5213 return boolean_false_node;
5214 else if (integer_zerop (expr))
5215 return boolean_true_node;
5216 else if (TREE_CODE (expr) == SSA_NAME)
5217 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5218 build_int_cst (TREE_TYPE (expr), 0));
5219 else if (COMPARISON_CLASS_P (expr))
5220 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5221 boolean_type_node,
5222 TREE_OPERAND (expr, 0),
5223 TREE_OPERAND (expr, 1));
5224 else
5225 return NULL_TREE;
5227 else
5229 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5230 return expr;
5231 if (integer_nonzerop (expr))
5232 return boolean_true_node;
5233 else if (integer_zerop (expr))
5234 return boolean_false_node;
5235 else if (TREE_CODE (expr) == SSA_NAME)
5236 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5237 build_int_cst (TREE_TYPE (expr), 0));
5238 else if (COMPARISON_CLASS_P (expr))
5239 return fold_build2 (TREE_CODE (expr),
5240 boolean_type_node,
5241 TREE_OPERAND (expr, 0),
5242 TREE_OPERAND (expr, 1));
5243 else
5244 return NULL_TREE;
5248 /* Check to see if a boolean expression EXPR is logically equivalent to the
5249 comparison (OP1 CODE OP2). Check for various identities involving
5250 SSA_NAMEs. */
5252 static bool
5253 same_bool_comparison_p (const_tree expr, enum tree_code code,
5254 const_tree op1, const_tree op2)
5256 gimple *s;
5258 /* The obvious case. */
5259 if (TREE_CODE (expr) == code
5260 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5261 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5262 return true;
5264 /* Check for comparing (name, name != 0) and the case where expr
5265 is an SSA_NAME with a definition matching the comparison. */
5266 if (TREE_CODE (expr) == SSA_NAME
5267 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5269 if (operand_equal_p (expr, op1, 0))
5270 return ((code == NE_EXPR && integer_zerop (op2))
5271 || (code == EQ_EXPR && integer_nonzerop (op2)));
5272 s = SSA_NAME_DEF_STMT (expr);
5273 if (is_gimple_assign (s)
5274 && gimple_assign_rhs_code (s) == code
5275 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5276 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5277 return true;
5280 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5281 of name is a comparison, recurse. */
5282 if (TREE_CODE (op1) == SSA_NAME
5283 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5285 s = SSA_NAME_DEF_STMT (op1);
5286 if (is_gimple_assign (s)
5287 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5289 enum tree_code c = gimple_assign_rhs_code (s);
5290 if ((c == NE_EXPR && integer_zerop (op2))
5291 || (c == EQ_EXPR && integer_nonzerop (op2)))
5292 return same_bool_comparison_p (expr, c,
5293 gimple_assign_rhs1 (s),
5294 gimple_assign_rhs2 (s));
5295 if ((c == EQ_EXPR && integer_zerop (op2))
5296 || (c == NE_EXPR && integer_nonzerop (op2)))
5297 return same_bool_comparison_p (expr,
5298 invert_tree_comparison (c, false),
5299 gimple_assign_rhs1 (s),
5300 gimple_assign_rhs2 (s));
5303 return false;
5306 /* Check to see if two boolean expressions OP1 and OP2 are logically
5307 equivalent. */
5309 static bool
5310 same_bool_result_p (const_tree op1, const_tree op2)
5312 /* Simple cases first. */
5313 if (operand_equal_p (op1, op2, 0))
5314 return true;
5316 /* Check the cases where at least one of the operands is a comparison.
5317 These are a bit smarter than operand_equal_p in that they apply some
5318 identifies on SSA_NAMEs. */
5319 if (COMPARISON_CLASS_P (op2)
5320 && same_bool_comparison_p (op1, TREE_CODE (op2),
5321 TREE_OPERAND (op2, 0),
5322 TREE_OPERAND (op2, 1)))
5323 return true;
5324 if (COMPARISON_CLASS_P (op1)
5325 && same_bool_comparison_p (op2, TREE_CODE (op1),
5326 TREE_OPERAND (op1, 0),
5327 TREE_OPERAND (op1, 1)))
5328 return true;
5330 /* Default case. */
5331 return false;
5334 /* Forward declarations for some mutually recursive functions. */
5336 static tree
5337 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5338 enum tree_code code2, tree op2a, tree op2b);
5339 static tree
5340 and_var_with_comparison (tree var, bool invert,
5341 enum tree_code code2, tree op2a, tree op2b);
5342 static tree
5343 and_var_with_comparison_1 (gimple *stmt,
5344 enum tree_code code2, tree op2a, tree op2b);
5345 static tree
5346 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5347 enum tree_code code2, tree op2a, tree op2b);
5348 static tree
5349 or_var_with_comparison (tree var, bool invert,
5350 enum tree_code code2, tree op2a, tree op2b);
5351 static tree
5352 or_var_with_comparison_1 (gimple *stmt,
5353 enum tree_code code2, tree op2a, tree op2b);
5355 /* Helper function for and_comparisons_1: try to simplify the AND of the
5356 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5357 If INVERT is true, invert the value of the VAR before doing the AND.
5358 Return NULL_EXPR if we can't simplify this to a single expression. */
5360 static tree
5361 and_var_with_comparison (tree var, bool invert,
5362 enum tree_code code2, tree op2a, tree op2b)
5364 tree t;
5365 gimple *stmt = SSA_NAME_DEF_STMT (var);
5367 /* We can only deal with variables whose definitions are assignments. */
5368 if (!is_gimple_assign (stmt))
5369 return NULL_TREE;
5371 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5372 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5373 Then we only have to consider the simpler non-inverted cases. */
5374 if (invert)
5375 t = or_var_with_comparison_1 (stmt,
5376 invert_tree_comparison (code2, false),
5377 op2a, op2b);
5378 else
5379 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5380 return canonicalize_bool (t, invert);
5383 /* Try to simplify the AND of the ssa variable defined by the assignment
5384 STMT with the comparison specified by (OP2A CODE2 OP2B).
5385 Return NULL_EXPR if we can't simplify this to a single expression. */
5387 static tree
5388 and_var_with_comparison_1 (gimple *stmt,
5389 enum tree_code code2, tree op2a, tree op2b)
5391 tree var = gimple_assign_lhs (stmt);
5392 tree true_test_var = NULL_TREE;
5393 tree false_test_var = NULL_TREE;
5394 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5396 /* Check for identities like (var AND (var == 0)) => false. */
5397 if (TREE_CODE (op2a) == SSA_NAME
5398 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5400 if ((code2 == NE_EXPR && integer_zerop (op2b))
5401 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5403 true_test_var = op2a;
5404 if (var == true_test_var)
5405 return var;
5407 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5408 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5410 false_test_var = op2a;
5411 if (var == false_test_var)
5412 return boolean_false_node;
5416 /* If the definition is a comparison, recurse on it. */
5417 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5419 tree t = and_comparisons_1 (innercode,
5420 gimple_assign_rhs1 (stmt),
5421 gimple_assign_rhs2 (stmt),
5422 code2,
5423 op2a,
5424 op2b);
5425 if (t)
5426 return t;
5429 /* If the definition is an AND or OR expression, we may be able to
5430 simplify by reassociating. */
5431 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5432 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5434 tree inner1 = gimple_assign_rhs1 (stmt);
5435 tree inner2 = gimple_assign_rhs2 (stmt);
5436 gimple *s;
5437 tree t;
5438 tree partial = NULL_TREE;
5439 bool is_and = (innercode == BIT_AND_EXPR);
5441 /* Check for boolean identities that don't require recursive examination
5442 of inner1/inner2:
5443 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5444 inner1 AND (inner1 OR inner2) => inner1
5445 !inner1 AND (inner1 AND inner2) => false
5446 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5447 Likewise for similar cases involving inner2. */
5448 if (inner1 == true_test_var)
5449 return (is_and ? var : inner1);
5450 else if (inner2 == true_test_var)
5451 return (is_and ? var : inner2);
5452 else if (inner1 == false_test_var)
5453 return (is_and
5454 ? boolean_false_node
5455 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5456 else if (inner2 == false_test_var)
5457 return (is_and
5458 ? boolean_false_node
5459 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5461 /* Next, redistribute/reassociate the AND across the inner tests.
5462 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5463 if (TREE_CODE (inner1) == SSA_NAME
5464 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5465 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5466 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5467 gimple_assign_rhs1 (s),
5468 gimple_assign_rhs2 (s),
5469 code2, op2a, op2b)))
5471 /* Handle the AND case, where we are reassociating:
5472 (inner1 AND inner2) AND (op2a code2 op2b)
5473 => (t AND inner2)
5474 If the partial result t is a constant, we win. Otherwise
5475 continue on to try reassociating with the other inner test. */
5476 if (is_and)
5478 if (integer_onep (t))
5479 return inner2;
5480 else if (integer_zerop (t))
5481 return boolean_false_node;
5484 /* Handle the OR case, where we are redistributing:
5485 (inner1 OR inner2) AND (op2a code2 op2b)
5486 => (t OR (inner2 AND (op2a code2 op2b))) */
5487 else if (integer_onep (t))
5488 return boolean_true_node;
5490 /* Save partial result for later. */
5491 partial = t;
5494 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5495 if (TREE_CODE (inner2) == SSA_NAME
5496 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5497 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5498 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5499 gimple_assign_rhs1 (s),
5500 gimple_assign_rhs2 (s),
5501 code2, op2a, op2b)))
5503 /* Handle the AND case, where we are reassociating:
5504 (inner1 AND inner2) AND (op2a code2 op2b)
5505 => (inner1 AND t) */
5506 if (is_and)
5508 if (integer_onep (t))
5509 return inner1;
5510 else if (integer_zerop (t))
5511 return boolean_false_node;
5512 /* If both are the same, we can apply the identity
5513 (x AND x) == x. */
5514 else if (partial && same_bool_result_p (t, partial))
5515 return t;
5518 /* Handle the OR case. where we are redistributing:
5519 (inner1 OR inner2) AND (op2a code2 op2b)
5520 => (t OR (inner1 AND (op2a code2 op2b)))
5521 => (t OR partial) */
5522 else
5524 if (integer_onep (t))
5525 return boolean_true_node;
5526 else if (partial)
5528 /* We already got a simplification for the other
5529 operand to the redistributed OR expression. The
5530 interesting case is when at least one is false.
5531 Or, if both are the same, we can apply the identity
5532 (x OR x) == x. */
5533 if (integer_zerop (partial))
5534 return t;
5535 else if (integer_zerop (t))
5536 return partial;
5537 else if (same_bool_result_p (t, partial))
5538 return t;
5543 return NULL_TREE;
5546 /* Try to simplify the AND of two comparisons defined by
5547 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5548 If this can be done without constructing an intermediate value,
5549 return the resulting tree; otherwise NULL_TREE is returned.
5550 This function is deliberately asymmetric as it recurses on SSA_DEFs
5551 in the first comparison but not the second. */
5553 static tree
5554 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5555 enum tree_code code2, tree op2a, tree op2b)
5557 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5559 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5560 if (operand_equal_p (op1a, op2a, 0)
5561 && operand_equal_p (op1b, op2b, 0))
5563 /* Result will be either NULL_TREE, or a combined comparison. */
5564 tree t = combine_comparisons (UNKNOWN_LOCATION,
5565 TRUTH_ANDIF_EXPR, code1, code2,
5566 truth_type, op1a, op1b);
5567 if (t)
5568 return t;
5571 /* Likewise the swapped case of the above. */
5572 if (operand_equal_p (op1a, op2b, 0)
5573 && operand_equal_p (op1b, op2a, 0))
5575 /* Result will be either NULL_TREE, or a combined comparison. */
5576 tree t = combine_comparisons (UNKNOWN_LOCATION,
5577 TRUTH_ANDIF_EXPR, code1,
5578 swap_tree_comparison (code2),
5579 truth_type, op1a, op1b);
5580 if (t)
5581 return t;
5584 /* If both comparisons are of the same value against constants, we might
5585 be able to merge them. */
5586 if (operand_equal_p (op1a, op2a, 0)
5587 && TREE_CODE (op1b) == INTEGER_CST
5588 && TREE_CODE (op2b) == INTEGER_CST)
5590 int cmp = tree_int_cst_compare (op1b, op2b);
5592 /* If we have (op1a == op1b), we should either be able to
5593 return that or FALSE, depending on whether the constant op1b
5594 also satisfies the other comparison against op2b. */
5595 if (code1 == EQ_EXPR)
5597 bool done = true;
5598 bool val;
5599 switch (code2)
5601 case EQ_EXPR: val = (cmp == 0); break;
5602 case NE_EXPR: val = (cmp != 0); break;
5603 case LT_EXPR: val = (cmp < 0); break;
5604 case GT_EXPR: val = (cmp > 0); break;
5605 case LE_EXPR: val = (cmp <= 0); break;
5606 case GE_EXPR: val = (cmp >= 0); break;
5607 default: done = false;
5609 if (done)
5611 if (val)
5612 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5613 else
5614 return boolean_false_node;
5617 /* Likewise if the second comparison is an == comparison. */
5618 else if (code2 == EQ_EXPR)
5620 bool done = true;
5621 bool val;
5622 switch (code1)
5624 case EQ_EXPR: val = (cmp == 0); break;
5625 case NE_EXPR: val = (cmp != 0); break;
5626 case LT_EXPR: val = (cmp > 0); break;
5627 case GT_EXPR: val = (cmp < 0); break;
5628 case LE_EXPR: val = (cmp >= 0); break;
5629 case GE_EXPR: val = (cmp <= 0); break;
5630 default: done = false;
5632 if (done)
5634 if (val)
5635 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5636 else
5637 return boolean_false_node;
5641 /* Same business with inequality tests. */
5642 else if (code1 == NE_EXPR)
5644 bool val;
5645 switch (code2)
5647 case EQ_EXPR: val = (cmp != 0); break;
5648 case NE_EXPR: val = (cmp == 0); break;
5649 case LT_EXPR: val = (cmp >= 0); break;
5650 case GT_EXPR: val = (cmp <= 0); break;
5651 case LE_EXPR: val = (cmp > 0); break;
5652 case GE_EXPR: val = (cmp < 0); break;
5653 default:
5654 val = false;
5656 if (val)
5657 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5659 else if (code2 == NE_EXPR)
5661 bool val;
5662 switch (code1)
5664 case EQ_EXPR: val = (cmp == 0); break;
5665 case NE_EXPR: val = (cmp != 0); break;
5666 case LT_EXPR: val = (cmp <= 0); break;
5667 case GT_EXPR: val = (cmp >= 0); break;
5668 case LE_EXPR: val = (cmp < 0); break;
5669 case GE_EXPR: val = (cmp > 0); break;
5670 default:
5671 val = false;
5673 if (val)
5674 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5677 /* Chose the more restrictive of two < or <= comparisons. */
5678 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5679 && (code2 == LT_EXPR || code2 == LE_EXPR))
5681 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5682 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5683 else
5684 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5687 /* Likewise chose the more restrictive of two > or >= comparisons. */
5688 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5689 && (code2 == GT_EXPR || code2 == GE_EXPR))
5691 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5692 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5693 else
5694 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5697 /* Check for singleton ranges. */
5698 else if (cmp == 0
5699 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5700 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5701 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5703 /* Check for disjoint ranges. */
5704 else if (cmp <= 0
5705 && (code1 == LT_EXPR || code1 == LE_EXPR)
5706 && (code2 == GT_EXPR || code2 == GE_EXPR))
5707 return boolean_false_node;
5708 else if (cmp >= 0
5709 && (code1 == GT_EXPR || code1 == GE_EXPR)
5710 && (code2 == LT_EXPR || code2 == LE_EXPR))
5711 return boolean_false_node;
5714 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5715 NAME's definition is a truth value. See if there are any simplifications
5716 that can be done against the NAME's definition. */
5717 if (TREE_CODE (op1a) == SSA_NAME
5718 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5719 && (integer_zerop (op1b) || integer_onep (op1b)))
5721 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5722 || (code1 == NE_EXPR && integer_onep (op1b)));
5723 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5724 switch (gimple_code (stmt))
5726 case GIMPLE_ASSIGN:
5727 /* Try to simplify by copy-propagating the definition. */
5728 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5730 case GIMPLE_PHI:
5731 /* If every argument to the PHI produces the same result when
5732 ANDed with the second comparison, we win.
5733 Do not do this unless the type is bool since we need a bool
5734 result here anyway. */
5735 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5737 tree result = NULL_TREE;
5738 unsigned i;
5739 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5741 tree arg = gimple_phi_arg_def (stmt, i);
5743 /* If this PHI has itself as an argument, ignore it.
5744 If all the other args produce the same result,
5745 we're still OK. */
5746 if (arg == gimple_phi_result (stmt))
5747 continue;
5748 else if (TREE_CODE (arg) == INTEGER_CST)
5750 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5752 if (!result)
5753 result = boolean_false_node;
5754 else if (!integer_zerop (result))
5755 return NULL_TREE;
5757 else if (!result)
5758 result = fold_build2 (code2, boolean_type_node,
5759 op2a, op2b);
5760 else if (!same_bool_comparison_p (result,
5761 code2, op2a, op2b))
5762 return NULL_TREE;
5764 else if (TREE_CODE (arg) == SSA_NAME
5765 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5767 tree temp;
5768 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5769 /* In simple cases we can look through PHI nodes,
5770 but we have to be careful with loops.
5771 See PR49073. */
5772 if (! dom_info_available_p (CDI_DOMINATORS)
5773 || gimple_bb (def_stmt) == gimple_bb (stmt)
5774 || dominated_by_p (CDI_DOMINATORS,
5775 gimple_bb (def_stmt),
5776 gimple_bb (stmt)))
5777 return NULL_TREE;
5778 temp = and_var_with_comparison (arg, invert, code2,
5779 op2a, op2b);
5780 if (!temp)
5781 return NULL_TREE;
5782 else if (!result)
5783 result = temp;
5784 else if (!same_bool_result_p (result, temp))
5785 return NULL_TREE;
5787 else
5788 return NULL_TREE;
5790 return result;
5793 default:
5794 break;
5797 return NULL_TREE;
5800 /* Try to simplify the AND of two comparisons, specified by
5801 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5802 If this can be simplified to a single expression (without requiring
5803 introducing more SSA variables to hold intermediate values),
5804 return the resulting tree. Otherwise return NULL_TREE.
5805 If the result expression is non-null, it has boolean type. */
5807 tree
5808 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5809 enum tree_code code2, tree op2a, tree op2b)
5811 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5812 if (t)
5813 return t;
5814 else
5815 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5818 /* Helper function for or_comparisons_1: try to simplify the OR of the
5819 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5820 If INVERT is true, invert the value of VAR before doing the OR.
5821 Return NULL_EXPR if we can't simplify this to a single expression. */
5823 static tree
5824 or_var_with_comparison (tree var, bool invert,
5825 enum tree_code code2, tree op2a, tree op2b)
5827 tree t;
5828 gimple *stmt = SSA_NAME_DEF_STMT (var);
5830 /* We can only deal with variables whose definitions are assignments. */
5831 if (!is_gimple_assign (stmt))
5832 return NULL_TREE;
5834 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5835 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5836 Then we only have to consider the simpler non-inverted cases. */
5837 if (invert)
5838 t = and_var_with_comparison_1 (stmt,
5839 invert_tree_comparison (code2, false),
5840 op2a, op2b);
5841 else
5842 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5843 return canonicalize_bool (t, invert);
5846 /* Try to simplify the OR of the ssa variable defined by the assignment
5847 STMT with the comparison specified by (OP2A CODE2 OP2B).
5848 Return NULL_EXPR if we can't simplify this to a single expression. */
5850 static tree
5851 or_var_with_comparison_1 (gimple *stmt,
5852 enum tree_code code2, tree op2a, tree op2b)
5854 tree var = gimple_assign_lhs (stmt);
5855 tree true_test_var = NULL_TREE;
5856 tree false_test_var = NULL_TREE;
5857 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5859 /* Check for identities like (var OR (var != 0)) => true . */
5860 if (TREE_CODE (op2a) == SSA_NAME
5861 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5863 if ((code2 == NE_EXPR && integer_zerop (op2b))
5864 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5866 true_test_var = op2a;
5867 if (var == true_test_var)
5868 return var;
5870 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5871 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5873 false_test_var = op2a;
5874 if (var == false_test_var)
5875 return boolean_true_node;
5879 /* If the definition is a comparison, recurse on it. */
5880 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5882 tree t = or_comparisons_1 (innercode,
5883 gimple_assign_rhs1 (stmt),
5884 gimple_assign_rhs2 (stmt),
5885 code2,
5886 op2a,
5887 op2b);
5888 if (t)
5889 return t;
5892 /* If the definition is an AND or OR expression, we may be able to
5893 simplify by reassociating. */
5894 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5895 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5897 tree inner1 = gimple_assign_rhs1 (stmt);
5898 tree inner2 = gimple_assign_rhs2 (stmt);
5899 gimple *s;
5900 tree t;
5901 tree partial = NULL_TREE;
5902 bool is_or = (innercode == BIT_IOR_EXPR);
5904 /* Check for boolean identities that don't require recursive examination
5905 of inner1/inner2:
5906 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5907 inner1 OR (inner1 AND inner2) => inner1
5908 !inner1 OR (inner1 OR inner2) => true
5909 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5911 if (inner1 == true_test_var)
5912 return (is_or ? var : inner1);
5913 else if (inner2 == true_test_var)
5914 return (is_or ? var : inner2);
5915 else if (inner1 == false_test_var)
5916 return (is_or
5917 ? boolean_true_node
5918 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5919 else if (inner2 == false_test_var)
5920 return (is_or
5921 ? boolean_true_node
5922 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5924 /* Next, redistribute/reassociate the OR across the inner tests.
5925 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5926 if (TREE_CODE (inner1) == SSA_NAME
5927 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5928 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5929 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5930 gimple_assign_rhs1 (s),
5931 gimple_assign_rhs2 (s),
5932 code2, op2a, op2b)))
5934 /* Handle the OR case, where we are reassociating:
5935 (inner1 OR inner2) OR (op2a code2 op2b)
5936 => (t OR inner2)
5937 If the partial result t is a constant, we win. Otherwise
5938 continue on to try reassociating with the other inner test. */
5939 if (is_or)
5941 if (integer_onep (t))
5942 return boolean_true_node;
5943 else if (integer_zerop (t))
5944 return inner2;
5947 /* Handle the AND case, where we are redistributing:
5948 (inner1 AND inner2) OR (op2a code2 op2b)
5949 => (t AND (inner2 OR (op2a code op2b))) */
5950 else if (integer_zerop (t))
5951 return boolean_false_node;
5953 /* Save partial result for later. */
5954 partial = t;
5957 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5958 if (TREE_CODE (inner2) == SSA_NAME
5959 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5960 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5961 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5962 gimple_assign_rhs1 (s),
5963 gimple_assign_rhs2 (s),
5964 code2, op2a, op2b)))
5966 /* Handle the OR case, where we are reassociating:
5967 (inner1 OR inner2) OR (op2a code2 op2b)
5968 => (inner1 OR t)
5969 => (t OR partial) */
5970 if (is_or)
5972 if (integer_zerop (t))
5973 return inner1;
5974 else if (integer_onep (t))
5975 return boolean_true_node;
5976 /* If both are the same, we can apply the identity
5977 (x OR x) == x. */
5978 else if (partial && same_bool_result_p (t, partial))
5979 return t;
5982 /* Handle the AND case, where we are redistributing:
5983 (inner1 AND inner2) OR (op2a code2 op2b)
5984 => (t AND (inner1 OR (op2a code2 op2b)))
5985 => (t AND partial) */
5986 else
5988 if (integer_zerop (t))
5989 return boolean_false_node;
5990 else if (partial)
5992 /* We already got a simplification for the other
5993 operand to the redistributed AND expression. The
5994 interesting case is when at least one is true.
5995 Or, if both are the same, we can apply the identity
5996 (x AND x) == x. */
5997 if (integer_onep (partial))
5998 return t;
5999 else if (integer_onep (t))
6000 return partial;
6001 else if (same_bool_result_p (t, partial))
6002 return t;
6007 return NULL_TREE;
6010 /* Try to simplify the OR of two comparisons defined by
6011 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6012 If this can be done without constructing an intermediate value,
6013 return the resulting tree; otherwise NULL_TREE is returned.
6014 This function is deliberately asymmetric as it recurses on SSA_DEFs
6015 in the first comparison but not the second. */
6017 static tree
6018 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
6019 enum tree_code code2, tree op2a, tree op2b)
6021 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6023 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
6024 if (operand_equal_p (op1a, op2a, 0)
6025 && operand_equal_p (op1b, op2b, 0))
6027 /* Result will be either NULL_TREE, or a combined comparison. */
6028 tree t = combine_comparisons (UNKNOWN_LOCATION,
6029 TRUTH_ORIF_EXPR, code1, code2,
6030 truth_type, op1a, op1b);
6031 if (t)
6032 return t;
6035 /* Likewise the swapped case of the above. */
6036 if (operand_equal_p (op1a, op2b, 0)
6037 && operand_equal_p (op1b, op2a, 0))
6039 /* Result will be either NULL_TREE, or a combined comparison. */
6040 tree t = combine_comparisons (UNKNOWN_LOCATION,
6041 TRUTH_ORIF_EXPR, code1,
6042 swap_tree_comparison (code2),
6043 truth_type, op1a, op1b);
6044 if (t)
6045 return t;
6048 /* If both comparisons are of the same value against constants, we might
6049 be able to merge them. */
6050 if (operand_equal_p (op1a, op2a, 0)
6051 && TREE_CODE (op1b) == INTEGER_CST
6052 && TREE_CODE (op2b) == INTEGER_CST)
6054 int cmp = tree_int_cst_compare (op1b, op2b);
6056 /* If we have (op1a != op1b), we should either be able to
6057 return that or TRUE, depending on whether the constant op1b
6058 also satisfies the other comparison against op2b. */
6059 if (code1 == NE_EXPR)
6061 bool done = true;
6062 bool val;
6063 switch (code2)
6065 case EQ_EXPR: val = (cmp == 0); break;
6066 case NE_EXPR: val = (cmp != 0); break;
6067 case LT_EXPR: val = (cmp < 0); break;
6068 case GT_EXPR: val = (cmp > 0); break;
6069 case LE_EXPR: val = (cmp <= 0); break;
6070 case GE_EXPR: val = (cmp >= 0); break;
6071 default: done = false;
6073 if (done)
6075 if (val)
6076 return boolean_true_node;
6077 else
6078 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6081 /* Likewise if the second comparison is a != comparison. */
6082 else if (code2 == NE_EXPR)
6084 bool done = true;
6085 bool val;
6086 switch (code1)
6088 case EQ_EXPR: val = (cmp == 0); break;
6089 case NE_EXPR: val = (cmp != 0); break;
6090 case LT_EXPR: val = (cmp > 0); break;
6091 case GT_EXPR: val = (cmp < 0); break;
6092 case LE_EXPR: val = (cmp >= 0); break;
6093 case GE_EXPR: val = (cmp <= 0); break;
6094 default: done = false;
6096 if (done)
6098 if (val)
6099 return boolean_true_node;
6100 else
6101 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6105 /* See if an equality test is redundant with the other comparison. */
6106 else if (code1 == EQ_EXPR)
6108 bool val;
6109 switch (code2)
6111 case EQ_EXPR: val = (cmp == 0); break;
6112 case NE_EXPR: val = (cmp != 0); break;
6113 case LT_EXPR: val = (cmp < 0); break;
6114 case GT_EXPR: val = (cmp > 0); break;
6115 case LE_EXPR: val = (cmp <= 0); break;
6116 case GE_EXPR: val = (cmp >= 0); break;
6117 default:
6118 val = false;
6120 if (val)
6121 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6123 else if (code2 == EQ_EXPR)
6125 bool val;
6126 switch (code1)
6128 case EQ_EXPR: val = (cmp == 0); break;
6129 case NE_EXPR: val = (cmp != 0); break;
6130 case LT_EXPR: val = (cmp > 0); break;
6131 case GT_EXPR: val = (cmp < 0); break;
6132 case LE_EXPR: val = (cmp >= 0); break;
6133 case GE_EXPR: val = (cmp <= 0); break;
6134 default:
6135 val = false;
6137 if (val)
6138 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6141 /* Chose the less restrictive of two < or <= comparisons. */
6142 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
6143 && (code2 == LT_EXPR || code2 == LE_EXPR))
6145 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
6146 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6147 else
6148 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6151 /* Likewise chose the less restrictive of two > or >= comparisons. */
6152 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
6153 && (code2 == GT_EXPR || code2 == GE_EXPR))
6155 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
6156 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6157 else
6158 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6161 /* Check for singleton ranges. */
6162 else if (cmp == 0
6163 && ((code1 == LT_EXPR && code2 == GT_EXPR)
6164 || (code1 == GT_EXPR && code2 == LT_EXPR)))
6165 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
6167 /* Check for less/greater pairs that don't restrict the range at all. */
6168 else if (cmp >= 0
6169 && (code1 == LT_EXPR || code1 == LE_EXPR)
6170 && (code2 == GT_EXPR || code2 == GE_EXPR))
6171 return boolean_true_node;
6172 else if (cmp <= 0
6173 && (code1 == GT_EXPR || code1 == GE_EXPR)
6174 && (code2 == LT_EXPR || code2 == LE_EXPR))
6175 return boolean_true_node;
6178 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6179 NAME's definition is a truth value. See if there are any simplifications
6180 that can be done against the NAME's definition. */
6181 if (TREE_CODE (op1a) == SSA_NAME
6182 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6183 && (integer_zerop (op1b) || integer_onep (op1b)))
6185 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6186 || (code1 == NE_EXPR && integer_onep (op1b)));
6187 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6188 switch (gimple_code (stmt))
6190 case GIMPLE_ASSIGN:
6191 /* Try to simplify by copy-propagating the definition. */
6192 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
6194 case GIMPLE_PHI:
6195 /* If every argument to the PHI produces the same result when
6196 ORed with the second comparison, we win.
6197 Do not do this unless the type is bool since we need a bool
6198 result here anyway. */
6199 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6201 tree result = NULL_TREE;
6202 unsigned i;
6203 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6205 tree arg = gimple_phi_arg_def (stmt, i);
6207 /* If this PHI has itself as an argument, ignore it.
6208 If all the other args produce the same result,
6209 we're still OK. */
6210 if (arg == gimple_phi_result (stmt))
6211 continue;
6212 else if (TREE_CODE (arg) == INTEGER_CST)
6214 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6216 if (!result)
6217 result = boolean_true_node;
6218 else if (!integer_onep (result))
6219 return NULL_TREE;
6221 else if (!result)
6222 result = fold_build2 (code2, boolean_type_node,
6223 op2a, op2b);
6224 else if (!same_bool_comparison_p (result,
6225 code2, op2a, op2b))
6226 return NULL_TREE;
6228 else if (TREE_CODE (arg) == SSA_NAME
6229 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6231 tree temp;
6232 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6233 /* In simple cases we can look through PHI nodes,
6234 but we have to be careful with loops.
6235 See PR49073. */
6236 if (! dom_info_available_p (CDI_DOMINATORS)
6237 || gimple_bb (def_stmt) == gimple_bb (stmt)
6238 || dominated_by_p (CDI_DOMINATORS,
6239 gimple_bb (def_stmt),
6240 gimple_bb (stmt)))
6241 return NULL_TREE;
6242 temp = or_var_with_comparison (arg, invert, code2,
6243 op2a, op2b);
6244 if (!temp)
6245 return NULL_TREE;
6246 else if (!result)
6247 result = temp;
6248 else if (!same_bool_result_p (result, temp))
6249 return NULL_TREE;
6251 else
6252 return NULL_TREE;
6254 return result;
6257 default:
6258 break;
6261 return NULL_TREE;
6264 /* Try to simplify the OR of two comparisons, specified by
6265 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6266 If this can be simplified to a single expression (without requiring
6267 introducing more SSA variables to hold intermediate values),
6268 return the resulting tree. Otherwise return NULL_TREE.
6269 If the result expression is non-null, it has boolean type. */
6271 tree
6272 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6273 enum tree_code code2, tree op2a, tree op2b)
6275 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6276 if (t)
6277 return t;
6278 else
6279 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6283 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6285 Either NULL_TREE, a simplified but non-constant or a constant
6286 is returned.
6288 ??? This should go into a gimple-fold-inline.h file to be eventually
6289 privatized with the single valueize function used in the various TUs
6290 to avoid the indirect function call overhead. */
6292 tree
6293 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6294 tree (*gvalueize) (tree))
6296 gimple_match_op res_op;
6297 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6298 edges if there are intermediate VARYING defs. For this reason
6299 do not follow SSA edges here even though SCCVN can technically
6300 just deal fine with that. */
6301 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6303 tree res = NULL_TREE;
6304 if (gimple_simplified_result_is_gimple_val (&res_op))
6305 res = res_op.ops[0];
6306 else if (mprts_hook)
6307 res = mprts_hook (&res_op);
6308 if (res)
6310 if (dump_file && dump_flags & TDF_DETAILS)
6312 fprintf (dump_file, "Match-and-simplified ");
6313 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6314 fprintf (dump_file, " to ");
6315 print_generic_expr (dump_file, res);
6316 fprintf (dump_file, "\n");
6318 return res;
6322 location_t loc = gimple_location (stmt);
6323 switch (gimple_code (stmt))
6325 case GIMPLE_ASSIGN:
6327 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6329 switch (get_gimple_rhs_class (subcode))
6331 case GIMPLE_SINGLE_RHS:
6333 tree rhs = gimple_assign_rhs1 (stmt);
6334 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6336 if (TREE_CODE (rhs) == SSA_NAME)
6338 /* If the RHS is an SSA_NAME, return its known constant value,
6339 if any. */
6340 return (*valueize) (rhs);
6342 /* Handle propagating invariant addresses into address
6343 operations. */
6344 else if (TREE_CODE (rhs) == ADDR_EXPR
6345 && !is_gimple_min_invariant (rhs))
6347 poly_int64 offset = 0;
6348 tree base;
6349 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6350 &offset,
6351 valueize);
6352 if (base
6353 && (CONSTANT_CLASS_P (base)
6354 || decl_address_invariant_p (base)))
6355 return build_invariant_address (TREE_TYPE (rhs),
6356 base, offset);
6358 else if (TREE_CODE (rhs) == CONSTRUCTOR
6359 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6360 && known_eq (CONSTRUCTOR_NELTS (rhs),
6361 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6363 unsigned i, nelts;
6364 tree val;
6366 nelts = CONSTRUCTOR_NELTS (rhs);
6367 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6368 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6370 val = (*valueize) (val);
6371 if (TREE_CODE (val) == INTEGER_CST
6372 || TREE_CODE (val) == REAL_CST
6373 || TREE_CODE (val) == FIXED_CST)
6374 vec.quick_push (val);
6375 else
6376 return NULL_TREE;
6379 return vec.build ();
6381 if (subcode == OBJ_TYPE_REF)
6383 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6384 /* If callee is constant, we can fold away the wrapper. */
6385 if (is_gimple_min_invariant (val))
6386 return val;
6389 if (kind == tcc_reference)
6391 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6392 || TREE_CODE (rhs) == REALPART_EXPR
6393 || TREE_CODE (rhs) == IMAGPART_EXPR)
6394 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6396 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6397 return fold_unary_loc (EXPR_LOCATION (rhs),
6398 TREE_CODE (rhs),
6399 TREE_TYPE (rhs), val);
6401 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6402 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6404 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6405 return fold_ternary_loc (EXPR_LOCATION (rhs),
6406 TREE_CODE (rhs),
6407 TREE_TYPE (rhs), val,
6408 TREE_OPERAND (rhs, 1),
6409 TREE_OPERAND (rhs, 2));
6411 else if (TREE_CODE (rhs) == MEM_REF
6412 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6414 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6415 if (TREE_CODE (val) == ADDR_EXPR
6416 && is_gimple_min_invariant (val))
6418 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6419 unshare_expr (val),
6420 TREE_OPERAND (rhs, 1));
6421 if (tem)
6422 rhs = tem;
6425 return fold_const_aggregate_ref_1 (rhs, valueize);
6427 else if (kind == tcc_declaration)
6428 return get_symbol_constant_value (rhs);
6429 return rhs;
6432 case GIMPLE_UNARY_RHS:
6433 return NULL_TREE;
6435 case GIMPLE_BINARY_RHS:
6436 /* Translate &x + CST into an invariant form suitable for
6437 further propagation. */
6438 if (subcode == POINTER_PLUS_EXPR)
6440 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6441 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6442 if (TREE_CODE (op0) == ADDR_EXPR
6443 && TREE_CODE (op1) == INTEGER_CST)
6445 tree off = fold_convert (ptr_type_node, op1);
6446 return build_fold_addr_expr_loc
6447 (loc,
6448 fold_build2 (MEM_REF,
6449 TREE_TYPE (TREE_TYPE (op0)),
6450 unshare_expr (op0), off));
6453 /* Canonicalize bool != 0 and bool == 0 appearing after
6454 valueization. While gimple_simplify handles this
6455 it can get confused by the ~X == 1 -> X == 0 transform
6456 which we cant reduce to a SSA name or a constant
6457 (and we have no way to tell gimple_simplify to not
6458 consider those transforms in the first place). */
6459 else if (subcode == EQ_EXPR
6460 || subcode == NE_EXPR)
6462 tree lhs = gimple_assign_lhs (stmt);
6463 tree op0 = gimple_assign_rhs1 (stmt);
6464 if (useless_type_conversion_p (TREE_TYPE (lhs),
6465 TREE_TYPE (op0)))
6467 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6468 op0 = (*valueize) (op0);
6469 if (TREE_CODE (op0) == INTEGER_CST)
6470 std::swap (op0, op1);
6471 if (TREE_CODE (op1) == INTEGER_CST
6472 && ((subcode == NE_EXPR && integer_zerop (op1))
6473 || (subcode == EQ_EXPR && integer_onep (op1))))
6474 return op0;
6477 return NULL_TREE;
6479 case GIMPLE_TERNARY_RHS:
6481 /* Handle ternary operators that can appear in GIMPLE form. */
6482 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6483 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6484 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6485 return fold_ternary_loc (loc, subcode,
6486 gimple_expr_type (stmt), op0, op1, op2);
6489 default:
6490 gcc_unreachable ();
6494 case GIMPLE_CALL:
6496 tree fn;
6497 gcall *call_stmt = as_a <gcall *> (stmt);
6499 if (gimple_call_internal_p (stmt))
6501 enum tree_code subcode = ERROR_MARK;
6502 switch (gimple_call_internal_fn (stmt))
6504 case IFN_UBSAN_CHECK_ADD:
6505 subcode = PLUS_EXPR;
6506 break;
6507 case IFN_UBSAN_CHECK_SUB:
6508 subcode = MINUS_EXPR;
6509 break;
6510 case IFN_UBSAN_CHECK_MUL:
6511 subcode = MULT_EXPR;
6512 break;
6513 case IFN_BUILTIN_EXPECT:
6515 tree arg0 = gimple_call_arg (stmt, 0);
6516 tree op0 = (*valueize) (arg0);
6517 if (TREE_CODE (op0) == INTEGER_CST)
6518 return op0;
6519 return NULL_TREE;
6521 default:
6522 return NULL_TREE;
6524 tree arg0 = gimple_call_arg (stmt, 0);
6525 tree arg1 = gimple_call_arg (stmt, 1);
6526 tree op0 = (*valueize) (arg0);
6527 tree op1 = (*valueize) (arg1);
6529 if (TREE_CODE (op0) != INTEGER_CST
6530 || TREE_CODE (op1) != INTEGER_CST)
6532 switch (subcode)
6534 case MULT_EXPR:
6535 /* x * 0 = 0 * x = 0 without overflow. */
6536 if (integer_zerop (op0) || integer_zerop (op1))
6537 return build_zero_cst (TREE_TYPE (arg0));
6538 break;
6539 case MINUS_EXPR:
6540 /* y - y = 0 without overflow. */
6541 if (operand_equal_p (op0, op1, 0))
6542 return build_zero_cst (TREE_TYPE (arg0));
6543 break;
6544 default:
6545 break;
6548 tree res
6549 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6550 if (res
6551 && TREE_CODE (res) == INTEGER_CST
6552 && !TREE_OVERFLOW (res))
6553 return res;
6554 return NULL_TREE;
6557 fn = (*valueize) (gimple_call_fn (stmt));
6558 if (TREE_CODE (fn) == ADDR_EXPR
6559 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
6560 && gimple_builtin_call_types_compatible_p (stmt,
6561 TREE_OPERAND (fn, 0)))
6563 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6564 tree retval;
6565 unsigned i;
6566 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6567 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6568 retval = fold_builtin_call_array (loc,
6569 gimple_call_return_type (call_stmt),
6570 fn, gimple_call_num_args (stmt), args);
6571 if (retval)
6573 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6574 STRIP_NOPS (retval);
6575 retval = fold_convert (gimple_call_return_type (call_stmt),
6576 retval);
6578 return retval;
6580 return NULL_TREE;
6583 default:
6584 return NULL_TREE;
6588 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6589 Returns NULL_TREE if folding to a constant is not possible, otherwise
6590 returns a constant according to is_gimple_min_invariant. */
6592 tree
6593 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6595 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6596 if (res && is_gimple_min_invariant (res))
6597 return res;
6598 return NULL_TREE;
6602 /* The following set of functions are supposed to fold references using
6603 their constant initializers. */
6605 /* See if we can find constructor defining value of BASE.
6606 When we know the consructor with constant offset (such as
6607 base is array[40] and we do know constructor of array), then
6608 BIT_OFFSET is adjusted accordingly.
6610 As a special case, return error_mark_node when constructor
6611 is not explicitly available, but it is known to be zero
6612 such as 'static const int a;'. */
6613 static tree
6614 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6615 tree (*valueize)(tree))
6617 poly_int64 bit_offset2, size, max_size;
6618 bool reverse;
6620 if (TREE_CODE (base) == MEM_REF)
6622 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6623 if (!boff.to_shwi (bit_offset))
6624 return NULL_TREE;
6626 if (valueize
6627 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6628 base = valueize (TREE_OPERAND (base, 0));
6629 if (!base || TREE_CODE (base) != ADDR_EXPR)
6630 return NULL_TREE;
6631 base = TREE_OPERAND (base, 0);
6633 else if (valueize
6634 && TREE_CODE (base) == SSA_NAME)
6635 base = valueize (base);
6637 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6638 DECL_INITIAL. If BASE is a nested reference into another
6639 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6640 the inner reference. */
6641 switch (TREE_CODE (base))
6643 case VAR_DECL:
6644 case CONST_DECL:
6646 tree init = ctor_for_folding (base);
6648 /* Our semantic is exact opposite of ctor_for_folding;
6649 NULL means unknown, while error_mark_node is 0. */
6650 if (init == error_mark_node)
6651 return NULL_TREE;
6652 if (!init)
6653 return error_mark_node;
6654 return init;
6657 case VIEW_CONVERT_EXPR:
6658 return get_base_constructor (TREE_OPERAND (base, 0),
6659 bit_offset, valueize);
6661 case ARRAY_REF:
6662 case COMPONENT_REF:
6663 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6664 &reverse);
6665 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6666 return NULL_TREE;
6667 *bit_offset += bit_offset2;
6668 return get_base_constructor (base, bit_offset, valueize);
6670 case CONSTRUCTOR:
6671 return base;
6673 default:
6674 if (CONSTANT_CLASS_P (base))
6675 return base;
6677 return NULL_TREE;
6681 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6682 to the memory at bit OFFSET. When non-null, TYPE is the expected
6683 type of the reference; otherwise the type of the referenced element
6684 is used instead. When SIZE is zero, attempt to fold a reference to
6685 the entire element which OFFSET refers to. Increment *SUBOFF by
6686 the bit offset of the accessed element. */
6688 static tree
6689 fold_array_ctor_reference (tree type, tree ctor,
6690 unsigned HOST_WIDE_INT offset,
6691 unsigned HOST_WIDE_INT size,
6692 tree from_decl,
6693 unsigned HOST_WIDE_INT *suboff)
6695 offset_int low_bound;
6696 offset_int elt_size;
6697 offset_int access_index;
6698 tree domain_type = NULL_TREE;
6699 HOST_WIDE_INT inner_offset;
6701 /* Compute low bound and elt size. */
6702 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6703 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6704 if (domain_type && TYPE_MIN_VALUE (domain_type))
6706 /* Static constructors for variably sized objects make no sense. */
6707 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6708 return NULL_TREE;
6709 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6711 else
6712 low_bound = 0;
6713 /* Static constructors for variably sized objects make no sense. */
6714 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6715 return NULL_TREE;
6716 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6718 /* When TYPE is non-null, verify that it specifies a constant-sized
6719 accessed not larger than size of array element. Avoid division
6720 by zero below when ELT_SIZE is zero, such as with the result of
6721 an initializer for a zero-length array or an empty struct. */
6722 if (elt_size == 0
6723 || (type
6724 && (!TYPE_SIZE_UNIT (type)
6725 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6726 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type)))))
6727 return NULL_TREE;
6729 /* Compute the array index we look for. */
6730 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6731 elt_size);
6732 access_index += low_bound;
6734 /* And offset within the access. */
6735 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6737 /* See if the array field is large enough to span whole access. We do not
6738 care to fold accesses spanning multiple array indexes. */
6739 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6740 return NULL_TREE;
6741 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6743 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6745 /* For the final reference to the entire accessed element
6746 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6747 may be null) in favor of the type of the element, and set
6748 SIZE to the size of the accessed element. */
6749 inner_offset = 0;
6750 type = TREE_TYPE (val);
6751 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6754 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6755 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6756 suboff);
6759 /* Memory not explicitly mentioned in constructor is 0 (or
6760 the reference is out of range). */
6761 return type ? build_zero_cst (type) : NULL_TREE;
6764 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6765 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6766 is the expected type of the reference; otherwise the type of
6767 the referenced member is used instead. When SIZE is zero,
6768 attempt to fold a reference to the entire member which OFFSET
6769 refers to; in this case. Increment *SUBOFF by the bit offset
6770 of the accessed member. */
6772 static tree
6773 fold_nonarray_ctor_reference (tree type, tree ctor,
6774 unsigned HOST_WIDE_INT offset,
6775 unsigned HOST_WIDE_INT size,
6776 tree from_decl,
6777 unsigned HOST_WIDE_INT *suboff)
6779 unsigned HOST_WIDE_INT cnt;
6780 tree cfield, cval;
6782 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6783 cval)
6785 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6786 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6787 tree field_size = DECL_SIZE (cfield);
6789 if (!field_size)
6791 /* Determine the size of the flexible array member from
6792 the size of the initializer provided for it. */
6793 field_size = TYPE_SIZE (TREE_TYPE (cval));
6796 /* Variable sized objects in static constructors makes no sense,
6797 but field_size can be NULL for flexible array members. */
6798 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6799 && TREE_CODE (byte_offset) == INTEGER_CST
6800 && (field_size != NULL_TREE
6801 ? TREE_CODE (field_size) == INTEGER_CST
6802 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6804 /* Compute bit offset of the field. */
6805 offset_int bitoffset
6806 = (wi::to_offset (field_offset)
6807 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6808 /* Compute bit offset where the field ends. */
6809 offset_int bitoffset_end;
6810 if (field_size != NULL_TREE)
6811 bitoffset_end = bitoffset + wi::to_offset (field_size);
6812 else
6813 bitoffset_end = 0;
6815 /* Compute the bit offset of the end of the desired access.
6816 As a special case, if the size of the desired access is
6817 zero, assume the access is to the entire field (and let
6818 the caller make any necessary adjustments by storing
6819 the actual bounds of the field in FIELDBOUNDS). */
6820 offset_int access_end = offset_int (offset);
6821 if (size)
6822 access_end += size;
6823 else
6824 access_end = bitoffset_end;
6826 /* Is there any overlap between the desired access at
6827 [OFFSET, OFFSET+SIZE) and the offset of the field within
6828 the object at [BITOFFSET, BITOFFSET_END)? */
6829 if (wi::cmps (access_end, bitoffset) > 0
6830 && (field_size == NULL_TREE
6831 || wi::lts_p (offset, bitoffset_end)))
6833 *suboff += bitoffset.to_uhwi ();
6835 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6837 /* For the final reference to the entire accessed member
6838 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6839 be null) in favor of the type of the member, and set
6840 SIZE to the size of the accessed member. */
6841 offset = bitoffset.to_uhwi ();
6842 type = TREE_TYPE (cval);
6843 size = (bitoffset_end - bitoffset).to_uhwi ();
6846 /* We do have overlap. Now see if the field is large enough
6847 to cover the access. Give up for accesses that extend
6848 beyond the end of the object or that span multiple fields. */
6849 if (wi::cmps (access_end, bitoffset_end) > 0)
6850 return NULL_TREE;
6851 if (offset < bitoffset)
6852 return NULL_TREE;
6854 offset_int inner_offset = offset_int (offset) - bitoffset;
6855 return fold_ctor_reference (type, cval,
6856 inner_offset.to_uhwi (), size,
6857 from_decl, suboff);
6860 /* Memory not explicitly mentioned in constructor is 0. */
6861 return type ? build_zero_cst (type) : NULL_TREE;
6864 /* CTOR is value initializing memory. Fold a reference of TYPE and
6865 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
6866 is zero, attempt to fold a reference to the entire subobject
6867 which OFFSET refers to. This is used when folding accesses to
6868 string members of aggregates. When non-null, set *SUBOFF to
6869 the bit offset of the accessed subobject. */
6871 tree
6872 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6873 const poly_uint64 &poly_size, tree from_decl,
6874 unsigned HOST_WIDE_INT *suboff /* = NULL */)
6876 tree ret;
6878 /* We found the field with exact match. */
6879 if (type
6880 && useless_type_conversion_p (type, TREE_TYPE (ctor))
6881 && known_eq (poly_offset, 0U))
6882 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6884 /* The remaining optimizations need a constant size and offset. */
6885 unsigned HOST_WIDE_INT size, offset;
6886 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6887 return NULL_TREE;
6889 /* We are at the end of walk, see if we can view convert the
6890 result. */
6891 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6892 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6893 && !compare_tree_int (TYPE_SIZE (type), size)
6894 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6896 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6897 if (ret)
6899 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6900 if (ret)
6901 STRIP_USELESS_TYPE_CONVERSION (ret);
6903 return ret;
6905 /* For constants and byte-aligned/sized reads try to go through
6906 native_encode/interpret. */
6907 if (CONSTANT_CLASS_P (ctor)
6908 && BITS_PER_UNIT == 8
6909 && offset % BITS_PER_UNIT == 0
6910 && size % BITS_PER_UNIT == 0
6911 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6913 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6914 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6915 offset / BITS_PER_UNIT);
6916 if (len > 0)
6917 return native_interpret_expr (type, buf, len);
6919 if (TREE_CODE (ctor) == CONSTRUCTOR)
6921 unsigned HOST_WIDE_INT dummy = 0;
6922 if (!suboff)
6923 suboff = &dummy;
6925 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6926 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6927 return fold_array_ctor_reference (type, ctor, offset, size,
6928 from_decl, suboff);
6930 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6931 from_decl, suboff);
6934 return NULL_TREE;
6937 /* Return the tree representing the element referenced by T if T is an
6938 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6939 names using VALUEIZE. Return NULL_TREE otherwise. */
6941 tree
6942 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6944 tree ctor, idx, base;
6945 poly_int64 offset, size, max_size;
6946 tree tem;
6947 bool reverse;
6949 if (TREE_THIS_VOLATILE (t))
6950 return NULL_TREE;
6952 if (DECL_P (t))
6953 return get_symbol_constant_value (t);
6955 tem = fold_read_from_constant_string (t);
6956 if (tem)
6957 return tem;
6959 switch (TREE_CODE (t))
6961 case ARRAY_REF:
6962 case ARRAY_RANGE_REF:
6963 /* Constant indexes are handled well by get_base_constructor.
6964 Only special case variable offsets.
6965 FIXME: This code can't handle nested references with variable indexes
6966 (they will be handled only by iteration of ccp). Perhaps we can bring
6967 get_ref_base_and_extent here and make it use a valueize callback. */
6968 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6969 && valueize
6970 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6971 && poly_int_tree_p (idx))
6973 tree low_bound, unit_size;
6975 /* If the resulting bit-offset is constant, track it. */
6976 if ((low_bound = array_ref_low_bound (t),
6977 poly_int_tree_p (low_bound))
6978 && (unit_size = array_ref_element_size (t),
6979 tree_fits_uhwi_p (unit_size)))
6981 poly_offset_int woffset
6982 = wi::sext (wi::to_poly_offset (idx)
6983 - wi::to_poly_offset (low_bound),
6984 TYPE_PRECISION (TREE_TYPE (idx)));
6985 woffset *= tree_to_uhwi (unit_size);
6986 woffset *= BITS_PER_UNIT;
6987 if (woffset.to_shwi (&offset))
6989 base = TREE_OPERAND (t, 0);
6990 ctor = get_base_constructor (base, &offset, valueize);
6991 /* Empty constructor. Always fold to 0. */
6992 if (ctor == error_mark_node)
6993 return build_zero_cst (TREE_TYPE (t));
6994 /* Out of bound array access. Value is undefined,
6995 but don't fold. */
6996 if (maybe_lt (offset, 0))
6997 return NULL_TREE;
6998 /* We cannot determine ctor. */
6999 if (!ctor)
7000 return NULL_TREE;
7001 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
7002 tree_to_uhwi (unit_size)
7003 * BITS_PER_UNIT,
7004 base);
7008 /* Fallthru. */
7010 case COMPONENT_REF:
7011 case BIT_FIELD_REF:
7012 case TARGET_MEM_REF:
7013 case MEM_REF:
7014 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
7015 ctor = get_base_constructor (base, &offset, valueize);
7017 /* Empty constructor. Always fold to 0. */
7018 if (ctor == error_mark_node)
7019 return build_zero_cst (TREE_TYPE (t));
7020 /* We do not know precise address. */
7021 if (!known_size_p (max_size) || maybe_ne (max_size, size))
7022 return NULL_TREE;
7023 /* We cannot determine ctor. */
7024 if (!ctor)
7025 return NULL_TREE;
7027 /* Out of bound array access. Value is undefined, but don't fold. */
7028 if (maybe_lt (offset, 0))
7029 return NULL_TREE;
7031 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
7032 base);
7034 case REALPART_EXPR:
7035 case IMAGPART_EXPR:
7037 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
7038 if (c && TREE_CODE (c) == COMPLEX_CST)
7039 return fold_build1_loc (EXPR_LOCATION (t),
7040 TREE_CODE (t), TREE_TYPE (t), c);
7041 break;
7044 default:
7045 break;
7048 return NULL_TREE;
7051 tree
7052 fold_const_aggregate_ref (tree t)
7054 return fold_const_aggregate_ref_1 (t, NULL);
7057 /* Lookup virtual method with index TOKEN in a virtual table V
7058 at OFFSET.
7059 Set CAN_REFER if non-NULL to false if method
7060 is not referable or if the virtual table is ill-formed (such as rewriten
7061 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7063 tree
7064 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
7065 tree v,
7066 unsigned HOST_WIDE_INT offset,
7067 bool *can_refer)
7069 tree vtable = v, init, fn;
7070 unsigned HOST_WIDE_INT size;
7071 unsigned HOST_WIDE_INT elt_size, access_index;
7072 tree domain_type;
7074 if (can_refer)
7075 *can_refer = true;
7077 /* First of all double check we have virtual table. */
7078 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
7080 /* Pass down that we lost track of the target. */
7081 if (can_refer)
7082 *can_refer = false;
7083 return NULL_TREE;
7086 init = ctor_for_folding (v);
7088 /* The virtual tables should always be born with constructors
7089 and we always should assume that they are avaialble for
7090 folding. At the moment we do not stream them in all cases,
7091 but it should never happen that ctor seem unreachable. */
7092 gcc_assert (init);
7093 if (init == error_mark_node)
7095 /* Pass down that we lost track of the target. */
7096 if (can_refer)
7097 *can_refer = false;
7098 return NULL_TREE;
7100 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
7101 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
7102 offset *= BITS_PER_UNIT;
7103 offset += token * size;
7105 /* Lookup the value in the constructor that is assumed to be array.
7106 This is equivalent to
7107 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7108 offset, size, NULL);
7109 but in a constant time. We expect that frontend produced a simple
7110 array without indexed initializers. */
7112 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7113 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7114 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7115 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7117 access_index = offset / BITS_PER_UNIT / elt_size;
7118 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7120 /* The C++ FE can now produce indexed fields, and we check if the indexes
7121 match. */
7122 if (access_index < CONSTRUCTOR_NELTS (init))
7124 fn = CONSTRUCTOR_ELT (init, access_index)->value;
7125 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7126 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
7127 STRIP_NOPS (fn);
7129 else
7130 fn = NULL;
7132 /* For type inconsistent program we may end up looking up virtual method
7133 in virtual table that does not contain TOKEN entries. We may overrun
7134 the virtual table and pick up a constant or RTTI info pointer.
7135 In any case the call is undefined. */
7136 if (!fn
7137 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7138 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7139 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7140 else
7142 fn = TREE_OPERAND (fn, 0);
7144 /* When cgraph node is missing and function is not public, we cannot
7145 devirtualize. This can happen in WHOPR when the actual method
7146 ends up in other partition, because we found devirtualization
7147 possibility too late. */
7148 if (!can_refer_decl_in_current_unit_p (fn, vtable))
7150 if (can_refer)
7152 *can_refer = false;
7153 return fn;
7155 return NULL_TREE;
7159 /* Make sure we create a cgraph node for functions we'll reference.
7160 They can be non-existent if the reference comes from an entry
7161 of an external vtable for example. */
7162 cgraph_node::get_create (fn);
7164 return fn;
7167 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7168 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7169 KNOWN_BINFO carries the binfo describing the true type of
7170 OBJ_TYPE_REF_OBJECT(REF).
7171 Set CAN_REFER if non-NULL to false if method
7172 is not referable or if the virtual table is ill-formed (such as rewriten
7173 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7175 tree
7176 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7177 bool *can_refer)
7179 unsigned HOST_WIDE_INT offset;
7180 tree v;
7182 v = BINFO_VTABLE (known_binfo);
7183 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7184 if (!v)
7185 return NULL_TREE;
7187 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
7189 if (can_refer)
7190 *can_refer = false;
7191 return NULL_TREE;
7193 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
7196 /* Given a pointer value T, return a simplified version of an
7197 indirection through T, or NULL_TREE if no simplification is
7198 possible. Note that the resulting type may be different from
7199 the type pointed to in the sense that it is still compatible
7200 from the langhooks point of view. */
7202 tree
7203 gimple_fold_indirect_ref (tree t)
7205 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7206 tree sub = t;
7207 tree subtype;
7209 STRIP_NOPS (sub);
7210 subtype = TREE_TYPE (sub);
7211 if (!POINTER_TYPE_P (subtype)
7212 || TYPE_REF_CAN_ALIAS_ALL (ptype))
7213 return NULL_TREE;
7215 if (TREE_CODE (sub) == ADDR_EXPR)
7217 tree op = TREE_OPERAND (sub, 0);
7218 tree optype = TREE_TYPE (op);
7219 /* *&p => p */
7220 if (useless_type_conversion_p (type, optype))
7221 return op;
7223 /* *(foo *)&fooarray => fooarray[0] */
7224 if (TREE_CODE (optype) == ARRAY_TYPE
7225 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7226 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7228 tree type_domain = TYPE_DOMAIN (optype);
7229 tree min_val = size_zero_node;
7230 if (type_domain && TYPE_MIN_VALUE (type_domain))
7231 min_val = TYPE_MIN_VALUE (type_domain);
7232 if (TREE_CODE (min_val) == INTEGER_CST)
7233 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7235 /* *(foo *)&complexfoo => __real__ complexfoo */
7236 else if (TREE_CODE (optype) == COMPLEX_TYPE
7237 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7238 return fold_build1 (REALPART_EXPR, type, op);
7239 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7240 else if (TREE_CODE (optype) == VECTOR_TYPE
7241 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7243 tree part_width = TYPE_SIZE (type);
7244 tree index = bitsize_int (0);
7245 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7249 /* *(p + CST) -> ... */
7250 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7251 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7253 tree addr = TREE_OPERAND (sub, 0);
7254 tree off = TREE_OPERAND (sub, 1);
7255 tree addrtype;
7257 STRIP_NOPS (addr);
7258 addrtype = TREE_TYPE (addr);
7260 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7261 if (TREE_CODE (addr) == ADDR_EXPR
7262 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7263 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7264 && tree_fits_uhwi_p (off))
7266 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7267 tree part_width = TYPE_SIZE (type);
7268 unsigned HOST_WIDE_INT part_widthi
7269 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7270 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7271 tree index = bitsize_int (indexi);
7272 if (known_lt (offset / part_widthi,
7273 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7274 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7275 part_width, index);
7278 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7279 if (TREE_CODE (addr) == ADDR_EXPR
7280 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7281 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7283 tree size = TYPE_SIZE_UNIT (type);
7284 if (tree_int_cst_equal (size, off))
7285 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7288 /* *(p + CST) -> MEM_REF <p, CST>. */
7289 if (TREE_CODE (addr) != ADDR_EXPR
7290 || DECL_P (TREE_OPERAND (addr, 0)))
7291 return fold_build2 (MEM_REF, type,
7292 addr,
7293 wide_int_to_tree (ptype, wi::to_wide (off)));
7296 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7297 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7298 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7299 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7301 tree type_domain;
7302 tree min_val = size_zero_node;
7303 tree osub = sub;
7304 sub = gimple_fold_indirect_ref (sub);
7305 if (! sub)
7306 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7307 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7308 if (type_domain && TYPE_MIN_VALUE (type_domain))
7309 min_val = TYPE_MIN_VALUE (type_domain);
7310 if (TREE_CODE (min_val) == INTEGER_CST)
7311 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7314 return NULL_TREE;
7317 /* Return true if CODE is an operation that when operating on signed
7318 integer types involves undefined behavior on overflow and the
7319 operation can be expressed with unsigned arithmetic. */
7321 bool
7322 arith_code_with_undefined_signed_overflow (tree_code code)
7324 switch (code)
7326 case ABS_EXPR:
7327 case PLUS_EXPR:
7328 case MINUS_EXPR:
7329 case MULT_EXPR:
7330 case NEGATE_EXPR:
7331 case POINTER_PLUS_EXPR:
7332 return true;
7333 default:
7334 return false;
7338 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7339 operation that can be transformed to unsigned arithmetic by converting
7340 its operand, carrying out the operation in the corresponding unsigned
7341 type and converting the result back to the original type.
7343 Returns a sequence of statements that replace STMT and also contain
7344 a modified form of STMT itself. */
7346 gimple_seq
7347 rewrite_to_defined_overflow (gimple *stmt)
7349 if (dump_file && (dump_flags & TDF_DETAILS))
7351 fprintf (dump_file, "rewriting stmt with undefined signed "
7352 "overflow ");
7353 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7356 tree lhs = gimple_assign_lhs (stmt);
7357 tree type = unsigned_type_for (TREE_TYPE (lhs));
7358 gimple_seq stmts = NULL;
7359 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
7360 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
7361 else
7362 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7364 tree op = gimple_op (stmt, i);
7365 op = gimple_convert (&stmts, type, op);
7366 gimple_set_op (stmt, i, op);
7368 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7369 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7370 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7371 gimple_seq_add_stmt (&stmts, stmt);
7372 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7373 gimple_seq_add_stmt (&stmts, cvt);
7375 return stmts;
7379 /* The valueization hook we use for the gimple_build API simplification.
7380 This makes us match fold_buildN behavior by only combining with
7381 statements in the sequence(s) we are currently building. */
7383 static tree
7384 gimple_build_valueize (tree op)
7386 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7387 return op;
7388 return NULL_TREE;
7391 /* Build the expression CODE OP0 of type TYPE with location LOC,
7392 simplifying it first if possible. Returns the built
7393 expression value and appends statements possibly defining it
7394 to SEQ. */
7396 tree
7397 gimple_build (gimple_seq *seq, location_t loc,
7398 enum tree_code code, tree type, tree op0)
7400 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7401 if (!res)
7403 res = create_tmp_reg_or_ssa_name (type);
7404 gimple *stmt;
7405 if (code == REALPART_EXPR
7406 || code == IMAGPART_EXPR
7407 || code == VIEW_CONVERT_EXPR)
7408 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7409 else
7410 stmt = gimple_build_assign (res, code, op0);
7411 gimple_set_location (stmt, loc);
7412 gimple_seq_add_stmt_without_update (seq, stmt);
7414 return res;
7417 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7418 simplifying it first if possible. Returns the built
7419 expression value and appends statements possibly defining it
7420 to SEQ. */
7422 tree
7423 gimple_build (gimple_seq *seq, location_t loc,
7424 enum tree_code code, tree type, tree op0, tree op1)
7426 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7427 if (!res)
7429 res = create_tmp_reg_or_ssa_name (type);
7430 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7431 gimple_set_location (stmt, loc);
7432 gimple_seq_add_stmt_without_update (seq, stmt);
7434 return res;
7437 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7438 simplifying it first if possible. Returns the built
7439 expression value and appends statements possibly defining it
7440 to SEQ. */
7442 tree
7443 gimple_build (gimple_seq *seq, location_t loc,
7444 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7446 tree res = gimple_simplify (code, type, op0, op1, op2,
7447 seq, gimple_build_valueize);
7448 if (!res)
7450 res = create_tmp_reg_or_ssa_name (type);
7451 gimple *stmt;
7452 if (code == BIT_FIELD_REF)
7453 stmt = gimple_build_assign (res, code,
7454 build3 (code, type, op0, op1, op2));
7455 else
7456 stmt = gimple_build_assign (res, code, op0, op1, op2);
7457 gimple_set_location (stmt, loc);
7458 gimple_seq_add_stmt_without_update (seq, stmt);
7460 return res;
7463 /* Build the call FN (ARG0) with a result of type TYPE
7464 (or no result if TYPE is void) with location LOC,
7465 simplifying it first if possible. Returns the built
7466 expression value (or NULL_TREE if TYPE is void) and appends
7467 statements possibly defining it to SEQ. */
7469 tree
7470 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7471 tree type, tree arg0)
7473 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7474 if (!res)
7476 gcall *stmt;
7477 if (internal_fn_p (fn))
7478 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7479 else
7481 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7482 stmt = gimple_build_call (decl, 1, arg0);
7484 if (!VOID_TYPE_P (type))
7486 res = create_tmp_reg_or_ssa_name (type);
7487 gimple_call_set_lhs (stmt, res);
7489 gimple_set_location (stmt, loc);
7490 gimple_seq_add_stmt_without_update (seq, stmt);
7492 return res;
7495 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7496 (or no result if TYPE is void) with location LOC,
7497 simplifying it first if possible. Returns the built
7498 expression value (or NULL_TREE if TYPE is void) and appends
7499 statements possibly defining it to SEQ. */
7501 tree
7502 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7503 tree type, tree arg0, tree arg1)
7505 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7506 if (!res)
7508 gcall *stmt;
7509 if (internal_fn_p (fn))
7510 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7511 else
7513 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7514 stmt = gimple_build_call (decl, 2, arg0, arg1);
7516 if (!VOID_TYPE_P (type))
7518 res = create_tmp_reg_or_ssa_name (type);
7519 gimple_call_set_lhs (stmt, res);
7521 gimple_set_location (stmt, loc);
7522 gimple_seq_add_stmt_without_update (seq, stmt);
7524 return res;
7527 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7528 (or no result if TYPE is void) with location LOC,
7529 simplifying it first if possible. Returns the built
7530 expression value (or NULL_TREE if TYPE is void) and appends
7531 statements possibly defining it to SEQ. */
7533 tree
7534 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7535 tree type, tree arg0, tree arg1, tree arg2)
7537 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7538 seq, gimple_build_valueize);
7539 if (!res)
7541 gcall *stmt;
7542 if (internal_fn_p (fn))
7543 stmt = gimple_build_call_internal (as_internal_fn (fn),
7544 3, arg0, arg1, arg2);
7545 else
7547 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7548 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7550 if (!VOID_TYPE_P (type))
7552 res = create_tmp_reg_or_ssa_name (type);
7553 gimple_call_set_lhs (stmt, res);
7555 gimple_set_location (stmt, loc);
7556 gimple_seq_add_stmt_without_update (seq, stmt);
7558 return res;
7561 /* Build the conversion (TYPE) OP with a result of type TYPE
7562 with location LOC if such conversion is neccesary in GIMPLE,
7563 simplifying it first.
7564 Returns the built expression value and appends
7565 statements possibly defining it to SEQ. */
7567 tree
7568 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7570 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7571 return op;
7572 return gimple_build (seq, loc, NOP_EXPR, type, op);
7575 /* Build the conversion (ptrofftype) OP with a result of a type
7576 compatible with ptrofftype with location LOC if such conversion
7577 is neccesary in GIMPLE, simplifying it first.
7578 Returns the built expression value and appends
7579 statements possibly defining it to SEQ. */
7581 tree
7582 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7584 if (ptrofftype_p (TREE_TYPE (op)))
7585 return op;
7586 return gimple_convert (seq, loc, sizetype, op);
7589 /* Build a vector of type TYPE in which each element has the value OP.
7590 Return a gimple value for the result, appending any new statements
7591 to SEQ. */
7593 tree
7594 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7595 tree op)
7597 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7598 && !CONSTANT_CLASS_P (op))
7599 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7601 tree res, vec = build_vector_from_val (type, op);
7602 if (is_gimple_val (vec))
7603 return vec;
7604 if (gimple_in_ssa_p (cfun))
7605 res = make_ssa_name (type);
7606 else
7607 res = create_tmp_reg (type);
7608 gimple *stmt = gimple_build_assign (res, vec);
7609 gimple_set_location (stmt, loc);
7610 gimple_seq_add_stmt_without_update (seq, stmt);
7611 return res;
7614 /* Build a vector from BUILDER, handling the case in which some elements
7615 are non-constant. Return a gimple value for the result, appending any
7616 new instructions to SEQ.
7618 BUILDER must not have a stepped encoding on entry. This is because
7619 the function is not geared up to handle the arithmetic that would
7620 be needed in the variable case, and any code building a vector that
7621 is known to be constant should use BUILDER->build () directly. */
7623 tree
7624 gimple_build_vector (gimple_seq *seq, location_t loc,
7625 tree_vector_builder *builder)
7627 gcc_assert (builder->nelts_per_pattern () <= 2);
7628 unsigned int encoded_nelts = builder->encoded_nelts ();
7629 for (unsigned int i = 0; i < encoded_nelts; ++i)
7630 if (!TREE_CONSTANT ((*builder)[i]))
7632 tree type = builder->type ();
7633 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7634 vec<constructor_elt, va_gc> *v;
7635 vec_alloc (v, nelts);
7636 for (i = 0; i < nelts; ++i)
7637 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7639 tree res;
7640 if (gimple_in_ssa_p (cfun))
7641 res = make_ssa_name (type);
7642 else
7643 res = create_tmp_reg (type);
7644 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7645 gimple_set_location (stmt, loc);
7646 gimple_seq_add_stmt_without_update (seq, stmt);
7647 return res;
7649 return builder->build ();
7652 /* Return true if the result of assignment STMT is known to be non-negative.
7653 If the return value is based on the assumption that signed overflow is
7654 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7655 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7657 static bool
7658 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7659 int depth)
7661 enum tree_code code = gimple_assign_rhs_code (stmt);
7662 switch (get_gimple_rhs_class (code))
7664 case GIMPLE_UNARY_RHS:
7665 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7666 gimple_expr_type (stmt),
7667 gimple_assign_rhs1 (stmt),
7668 strict_overflow_p, depth);
7669 case GIMPLE_BINARY_RHS:
7670 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7671 gimple_expr_type (stmt),
7672 gimple_assign_rhs1 (stmt),
7673 gimple_assign_rhs2 (stmt),
7674 strict_overflow_p, depth);
7675 case GIMPLE_TERNARY_RHS:
7676 return false;
7677 case GIMPLE_SINGLE_RHS:
7678 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7679 strict_overflow_p, depth);
7680 case GIMPLE_INVALID_RHS:
7681 break;
7683 gcc_unreachable ();
7686 /* Return true if return value of call STMT is known to be non-negative.
7687 If the return value is based on the assumption that signed overflow is
7688 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7689 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7691 static bool
7692 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7693 int depth)
7695 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7696 gimple_call_arg (stmt, 0) : NULL_TREE;
7697 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7698 gimple_call_arg (stmt, 1) : NULL_TREE;
7700 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7701 gimple_call_combined_fn (stmt),
7702 arg0,
7703 arg1,
7704 strict_overflow_p, depth);
7707 /* Return true if return value of call STMT is known to be non-negative.
7708 If the return value is based on the assumption that signed overflow is
7709 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7710 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7712 static bool
7713 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7714 int depth)
7716 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7718 tree arg = gimple_phi_arg_def (stmt, i);
7719 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7720 return false;
7722 return true;
7725 /* Return true if STMT is known to compute a non-negative value.
7726 If the return value is based on the assumption that signed overflow is
7727 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7728 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7730 bool
7731 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7732 int depth)
7734 switch (gimple_code (stmt))
7736 case GIMPLE_ASSIGN:
7737 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7738 depth);
7739 case GIMPLE_CALL:
7740 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7741 depth);
7742 case GIMPLE_PHI:
7743 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7744 depth);
7745 default:
7746 return false;
7750 /* Return true if the floating-point value computed by assignment STMT
7751 is known to have an integer value. We also allow +Inf, -Inf and NaN
7752 to be considered integer values. Return false for signaling NaN.
7754 DEPTH is the current nesting depth of the query. */
7756 static bool
7757 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7759 enum tree_code code = gimple_assign_rhs_code (stmt);
7760 switch (get_gimple_rhs_class (code))
7762 case GIMPLE_UNARY_RHS:
7763 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7764 gimple_assign_rhs1 (stmt), depth);
7765 case GIMPLE_BINARY_RHS:
7766 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7767 gimple_assign_rhs1 (stmt),
7768 gimple_assign_rhs2 (stmt), depth);
7769 case GIMPLE_TERNARY_RHS:
7770 return false;
7771 case GIMPLE_SINGLE_RHS:
7772 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7773 case GIMPLE_INVALID_RHS:
7774 break;
7776 gcc_unreachable ();
7779 /* Return true if the floating-point value computed by call STMT is known
7780 to have an integer value. We also allow +Inf, -Inf and NaN to be
7781 considered integer values. Return false for signaling NaN.
7783 DEPTH is the current nesting depth of the query. */
7785 static bool
7786 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7788 tree arg0 = (gimple_call_num_args (stmt) > 0
7789 ? gimple_call_arg (stmt, 0)
7790 : NULL_TREE);
7791 tree arg1 = (gimple_call_num_args (stmt) > 1
7792 ? gimple_call_arg (stmt, 1)
7793 : NULL_TREE);
7794 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7795 arg0, arg1, depth);
7798 /* Return true if the floating-point result of phi STMT is known to have
7799 an integer value. We also allow +Inf, -Inf and NaN to be considered
7800 integer values. Return false for signaling NaN.
7802 DEPTH is the current nesting depth of the query. */
7804 static bool
7805 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7807 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7809 tree arg = gimple_phi_arg_def (stmt, i);
7810 if (!integer_valued_real_single_p (arg, depth + 1))
7811 return false;
7813 return true;
7816 /* Return true if the floating-point value computed by STMT is known
7817 to have an integer value. We also allow +Inf, -Inf and NaN to be
7818 considered integer values. Return false for signaling NaN.
7820 DEPTH is the current nesting depth of the query. */
7822 bool
7823 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7825 switch (gimple_code (stmt))
7827 case GIMPLE_ASSIGN:
7828 return gimple_assign_integer_valued_real_p (stmt, depth);
7829 case GIMPLE_CALL:
7830 return gimple_call_integer_valued_real_p (stmt, depth);
7831 case GIMPLE_PHI:
7832 return gimple_phi_integer_valued_real_p (stmt, depth);
7833 default:
7834 return false;