typeck.c (cp_build_function_call_vec): When mark_used fails unconditionally return...
[official-gcc.git] / gcc / gimple-fold.c
blob1b10bae55caa35b44ffe83ca40d67ca64063ace6
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2019 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
69 enum strlen_range_kind {
70 /* Compute the exact constant string length. */
71 SRK_STRLEN,
72 /* Compute the maximum constant string length. */
73 SRK_STRLENMAX,
74 /* Compute a range of string lengths bounded by object sizes. When
75 the length of a string cannot be determined, consider as the upper
76 bound the size of the enclosing object the string may be a member
77 or element of. Also determine the size of the largest character
78 array the string may refer to. */
79 SRK_LENRANGE,
80 /* Determine the integer value of the argument (not string length). */
81 SRK_INT_VALUE
84 static bool
85 get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
87 /* Return true when DECL can be referenced from current unit.
88 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
89 We can get declarations that are not possible to reference for various
90 reasons:
92 1) When analyzing C++ virtual tables.
93 C++ virtual tables do have known constructors even
94 when they are keyed to other compilation unit.
95 Those tables can contain pointers to methods and vars
96 in other units. Those methods have both STATIC and EXTERNAL
97 set.
98 2) In WHOPR mode devirtualization might lead to reference
99 to method that was partitioned elsehwere.
100 In this case we have static VAR_DECL or FUNCTION_DECL
101 that has no corresponding callgraph/varpool node
102 declaring the body.
103 3) COMDAT functions referred by external vtables that
104 we devirtualize only during final compilation stage.
105 At this time we already decided that we will not output
106 the function body and thus we can't reference the symbol
107 directly. */
109 static bool
110 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
112 varpool_node *vnode;
113 struct cgraph_node *node;
114 symtab_node *snode;
116 if (DECL_ABSTRACT_P (decl))
117 return false;
119 /* We are concerned only about static/external vars and functions. */
120 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
121 || !VAR_OR_FUNCTION_DECL_P (decl))
122 return true;
124 /* Static objects can be referred only if they are defined and not optimized
125 out yet. */
126 if (!TREE_PUBLIC (decl))
128 if (DECL_EXTERNAL (decl))
129 return false;
130 /* Before we start optimizing unreachable code we can be sure all
131 static objects are defined. */
132 if (symtab->function_flags_ready)
133 return true;
134 snode = symtab_node::get (decl);
135 if (!snode || !snode->definition)
136 return false;
137 node = dyn_cast <cgraph_node *> (snode);
138 return !node || !node->global.inlined_to;
141 /* We will later output the initializer, so we can refer to it.
142 So we are concerned only when DECL comes from initializer of
143 external var or var that has been optimized out. */
144 if (!from_decl
145 || !VAR_P (from_decl)
146 || (!DECL_EXTERNAL (from_decl)
147 && (vnode = varpool_node::get (from_decl)) != NULL
148 && vnode->definition)
149 || (flag_ltrans
150 && (vnode = varpool_node::get (from_decl)) != NULL
151 && vnode->in_other_partition))
152 return true;
153 /* We are folding reference from external vtable. The vtable may reffer
154 to a symbol keyed to other compilation unit. The other compilation
155 unit may be in separate DSO and the symbol may be hidden. */
156 if (DECL_VISIBILITY_SPECIFIED (decl)
157 && DECL_EXTERNAL (decl)
158 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
159 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
160 return false;
161 /* When function is public, we always can introduce new reference.
162 Exception are the COMDAT functions where introducing a direct
163 reference imply need to include function body in the curren tunit. */
164 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
165 return true;
166 /* We have COMDAT. We are going to check if we still have definition
167 or if the definition is going to be output in other partition.
168 Bypass this when gimplifying; all needed functions will be produced.
170 As observed in PR20991 for already optimized out comdat virtual functions
171 it may be tempting to not necessarily give up because the copy will be
172 output elsewhere when corresponding vtable is output.
173 This is however not possible - ABI specify that COMDATs are output in
174 units where they are used and when the other unit was compiled with LTO
175 it is possible that vtable was kept public while the function itself
176 was privatized. */
177 if (!symtab->function_flags_ready)
178 return true;
180 snode = symtab_node::get (decl);
181 if (!snode
182 || ((!snode->definition || DECL_EXTERNAL (decl))
183 && (!snode->in_other_partition
184 || (!snode->forced_by_abi && !snode->force_output))))
185 return false;
186 node = dyn_cast <cgraph_node *> (snode);
187 return !node || !node->global.inlined_to;
190 /* Create a temporary for TYPE for a statement STMT. If the current function
191 is in SSA form, a SSA name is created. Otherwise a temporary register
192 is made. */
194 tree
195 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
197 if (gimple_in_ssa_p (cfun))
198 return make_ssa_name (type, stmt);
199 else
200 return create_tmp_reg (type);
203 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
204 acceptable form for is_gimple_min_invariant.
205 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
207 tree
208 canonicalize_constructor_val (tree cval, tree from_decl)
210 if (CONSTANT_CLASS_P (cval))
211 return cval;
213 tree orig_cval = cval;
214 STRIP_NOPS (cval);
215 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
216 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
218 tree ptr = TREE_OPERAND (cval, 0);
219 if (is_gimple_min_invariant (ptr))
220 cval = build1_loc (EXPR_LOCATION (cval),
221 ADDR_EXPR, TREE_TYPE (ptr),
222 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
223 ptr,
224 fold_convert (ptr_type_node,
225 TREE_OPERAND (cval, 1))));
227 if (TREE_CODE (cval) == ADDR_EXPR)
229 tree base = NULL_TREE;
230 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
232 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
233 if (base)
234 TREE_OPERAND (cval, 0) = base;
236 else
237 base = get_base_address (TREE_OPERAND (cval, 0));
238 if (!base)
239 return NULL_TREE;
241 if (VAR_OR_FUNCTION_DECL_P (base)
242 && !can_refer_decl_in_current_unit_p (base, from_decl))
243 return NULL_TREE;
244 if (TREE_TYPE (base) == error_mark_node)
245 return NULL_TREE;
246 if (VAR_P (base))
247 TREE_ADDRESSABLE (base) = 1;
248 else if (TREE_CODE (base) == FUNCTION_DECL)
250 /* Make sure we create a cgraph node for functions we'll reference.
251 They can be non-existent if the reference comes from an entry
252 of an external vtable for example. */
253 cgraph_node::get_create (base);
255 /* Fixup types in global initializers. */
256 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
257 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
259 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
260 cval = fold_convert (TREE_TYPE (orig_cval), cval);
261 return cval;
263 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
264 if (TREE_CODE (cval) == INTEGER_CST)
266 if (TREE_OVERFLOW_P (cval))
267 cval = drop_tree_overflow (cval);
268 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
269 cval = fold_convert (TREE_TYPE (orig_cval), cval);
270 return cval;
272 return orig_cval;
275 /* If SYM is a constant variable with known value, return the value.
276 NULL_TREE is returned otherwise. */
278 tree
279 get_symbol_constant_value (tree sym)
281 tree val = ctor_for_folding (sym);
282 if (val != error_mark_node)
284 if (val)
286 val = canonicalize_constructor_val (unshare_expr (val), sym);
287 if (val && is_gimple_min_invariant (val))
288 return val;
289 else
290 return NULL_TREE;
292 /* Variables declared 'const' without an initializer
293 have zero as the initializer if they may not be
294 overridden at link or run time. */
295 if (!val
296 && is_gimple_reg_type (TREE_TYPE (sym)))
297 return build_zero_cst (TREE_TYPE (sym));
300 return NULL_TREE;
305 /* Subroutine of fold_stmt. We perform several simplifications of the
306 memory reference tree EXPR and make sure to re-gimplify them properly
307 after propagation of constant addresses. IS_LHS is true if the
308 reference is supposed to be an lvalue. */
310 static tree
311 maybe_fold_reference (tree expr, bool is_lhs)
313 tree result;
315 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
316 || TREE_CODE (expr) == REALPART_EXPR
317 || TREE_CODE (expr) == IMAGPART_EXPR)
318 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
319 return fold_unary_loc (EXPR_LOCATION (expr),
320 TREE_CODE (expr),
321 TREE_TYPE (expr),
322 TREE_OPERAND (expr, 0));
323 else if (TREE_CODE (expr) == BIT_FIELD_REF
324 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
325 return fold_ternary_loc (EXPR_LOCATION (expr),
326 TREE_CODE (expr),
327 TREE_TYPE (expr),
328 TREE_OPERAND (expr, 0),
329 TREE_OPERAND (expr, 1),
330 TREE_OPERAND (expr, 2));
332 if (!is_lhs
333 && (result = fold_const_aggregate_ref (expr))
334 && is_gimple_min_invariant (result))
335 return result;
337 return NULL_TREE;
341 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
342 replacement rhs for the statement or NULL_TREE if no simplification
343 could be made. It is assumed that the operands have been previously
344 folded. */
346 static tree
347 fold_gimple_assign (gimple_stmt_iterator *si)
349 gimple *stmt = gsi_stmt (*si);
350 enum tree_code subcode = gimple_assign_rhs_code (stmt);
351 location_t loc = gimple_location (stmt);
353 tree result = NULL_TREE;
355 switch (get_gimple_rhs_class (subcode))
357 case GIMPLE_SINGLE_RHS:
359 tree rhs = gimple_assign_rhs1 (stmt);
361 if (TREE_CLOBBER_P (rhs))
362 return NULL_TREE;
364 if (REFERENCE_CLASS_P (rhs))
365 return maybe_fold_reference (rhs, false);
367 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
369 tree val = OBJ_TYPE_REF_EXPR (rhs);
370 if (is_gimple_min_invariant (val))
371 return val;
372 else if (flag_devirtualize && virtual_method_call_p (rhs))
374 bool final;
375 vec <cgraph_node *>targets
376 = possible_polymorphic_call_targets (rhs, stmt, &final);
377 if (final && targets.length () <= 1 && dbg_cnt (devirt))
379 if (dump_enabled_p ())
381 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
382 "resolving virtual function address "
383 "reference to function %s\n",
384 targets.length () == 1
385 ? targets[0]->name ()
386 : "NULL");
388 if (targets.length () == 1)
390 val = fold_convert (TREE_TYPE (val),
391 build_fold_addr_expr_loc
392 (loc, targets[0]->decl));
393 STRIP_USELESS_TYPE_CONVERSION (val);
395 else
396 /* We cannot use __builtin_unreachable here because it
397 cannot have address taken. */
398 val = build_int_cst (TREE_TYPE (val), 0);
399 return val;
404 else if (TREE_CODE (rhs) == ADDR_EXPR)
406 tree ref = TREE_OPERAND (rhs, 0);
407 tree tem = maybe_fold_reference (ref, true);
408 if (tem
409 && TREE_CODE (tem) == MEM_REF
410 && integer_zerop (TREE_OPERAND (tem, 1)))
411 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
412 else if (tem)
413 result = fold_convert (TREE_TYPE (rhs),
414 build_fold_addr_expr_loc (loc, tem));
415 else if (TREE_CODE (ref) == MEM_REF
416 && integer_zerop (TREE_OPERAND (ref, 1)))
417 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
419 if (result)
421 /* Strip away useless type conversions. Both the
422 NON_LVALUE_EXPR that may have been added by fold, and
423 "useless" type conversions that might now be apparent
424 due to propagation. */
425 STRIP_USELESS_TYPE_CONVERSION (result);
427 if (result != rhs && valid_gimple_rhs_p (result))
428 return result;
432 else if (TREE_CODE (rhs) == CONSTRUCTOR
433 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
435 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
436 unsigned i;
437 tree val;
439 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
440 if (! CONSTANT_CLASS_P (val))
441 return NULL_TREE;
443 return build_vector_from_ctor (TREE_TYPE (rhs),
444 CONSTRUCTOR_ELTS (rhs));
447 else if (DECL_P (rhs))
448 return get_symbol_constant_value (rhs);
450 break;
452 case GIMPLE_UNARY_RHS:
453 break;
455 case GIMPLE_BINARY_RHS:
456 break;
458 case GIMPLE_TERNARY_RHS:
459 result = fold_ternary_loc (loc, subcode,
460 TREE_TYPE (gimple_assign_lhs (stmt)),
461 gimple_assign_rhs1 (stmt),
462 gimple_assign_rhs2 (stmt),
463 gimple_assign_rhs3 (stmt));
465 if (result)
467 STRIP_USELESS_TYPE_CONVERSION (result);
468 if (valid_gimple_rhs_p (result))
469 return result;
471 break;
473 case GIMPLE_INVALID_RHS:
474 gcc_unreachable ();
477 return NULL_TREE;
481 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
482 adjusting the replacement stmts location and virtual operands.
483 If the statement has a lhs the last stmt in the sequence is expected
484 to assign to that lhs. */
486 static void
487 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
489 gimple *stmt = gsi_stmt (*si_p);
491 if (gimple_has_location (stmt))
492 annotate_all_with_location (stmts, gimple_location (stmt));
494 /* First iterate over the replacement statements backward, assigning
495 virtual operands to their defining statements. */
496 gimple *laststore = NULL;
497 for (gimple_stmt_iterator i = gsi_last (stmts);
498 !gsi_end_p (i); gsi_prev (&i))
500 gimple *new_stmt = gsi_stmt (i);
501 if ((gimple_assign_single_p (new_stmt)
502 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
503 || (is_gimple_call (new_stmt)
504 && (gimple_call_flags (new_stmt)
505 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
507 tree vdef;
508 if (!laststore)
509 vdef = gimple_vdef (stmt);
510 else
511 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
512 gimple_set_vdef (new_stmt, vdef);
513 if (vdef && TREE_CODE (vdef) == SSA_NAME)
514 SSA_NAME_DEF_STMT (vdef) = new_stmt;
515 laststore = new_stmt;
519 /* Second iterate over the statements forward, assigning virtual
520 operands to their uses. */
521 tree reaching_vuse = gimple_vuse (stmt);
522 for (gimple_stmt_iterator i = gsi_start (stmts);
523 !gsi_end_p (i); gsi_next (&i))
525 gimple *new_stmt = gsi_stmt (i);
526 /* If the new statement possibly has a VUSE, update it with exact SSA
527 name we know will reach this one. */
528 if (gimple_has_mem_ops (new_stmt))
529 gimple_set_vuse (new_stmt, reaching_vuse);
530 gimple_set_modified (new_stmt, true);
531 if (gimple_vdef (new_stmt))
532 reaching_vuse = gimple_vdef (new_stmt);
535 /* If the new sequence does not do a store release the virtual
536 definition of the original statement. */
537 if (reaching_vuse
538 && reaching_vuse == gimple_vuse (stmt))
540 tree vdef = gimple_vdef (stmt);
541 if (vdef
542 && TREE_CODE (vdef) == SSA_NAME)
544 unlink_stmt_vdef (stmt);
545 release_ssa_name (vdef);
549 /* Finally replace the original statement with the sequence. */
550 gsi_replace_with_seq (si_p, stmts, false);
553 /* Convert EXPR into a GIMPLE value suitable for substitution on the
554 RHS of an assignment. Insert the necessary statements before
555 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
556 is replaced. If the call is expected to produces a result, then it
557 is replaced by an assignment of the new RHS to the result variable.
558 If the result is to be ignored, then the call is replaced by a
559 GIMPLE_NOP. A proper VDEF chain is retained by making the first
560 VUSE and the last VDEF of the whole sequence be the same as the replaced
561 statement and using new SSA names for stores in between. */
563 void
564 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
566 tree lhs;
567 gimple *stmt, *new_stmt;
568 gimple_stmt_iterator i;
569 gimple_seq stmts = NULL;
571 stmt = gsi_stmt (*si_p);
573 gcc_assert (is_gimple_call (stmt));
575 push_gimplify_context (gimple_in_ssa_p (cfun));
577 lhs = gimple_call_lhs (stmt);
578 if (lhs == NULL_TREE)
580 gimplify_and_add (expr, &stmts);
581 /* We can end up with folding a memcpy of an empty class assignment
582 which gets optimized away by C++ gimplification. */
583 if (gimple_seq_empty_p (stmts))
585 pop_gimplify_context (NULL);
586 if (gimple_in_ssa_p (cfun))
588 unlink_stmt_vdef (stmt);
589 release_defs (stmt);
591 gsi_replace (si_p, gimple_build_nop (), false);
592 return;
595 else
597 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
598 new_stmt = gimple_build_assign (lhs, tmp);
599 i = gsi_last (stmts);
600 gsi_insert_after_without_update (&i, new_stmt,
601 GSI_CONTINUE_LINKING);
604 pop_gimplify_context (NULL);
606 gsi_replace_with_seq_vops (si_p, stmts);
610 /* Replace the call at *GSI with the gimple value VAL. */
612 void
613 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
615 gimple *stmt = gsi_stmt (*gsi);
616 tree lhs = gimple_call_lhs (stmt);
617 gimple *repl;
618 if (lhs)
620 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
621 val = fold_convert (TREE_TYPE (lhs), val);
622 repl = gimple_build_assign (lhs, val);
624 else
625 repl = gimple_build_nop ();
626 tree vdef = gimple_vdef (stmt);
627 if (vdef && TREE_CODE (vdef) == SSA_NAME)
629 unlink_stmt_vdef (stmt);
630 release_ssa_name (vdef);
632 gsi_replace (gsi, repl, false);
635 /* Replace the call at *GSI with the new call REPL and fold that
636 again. */
638 static void
639 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
641 gimple *stmt = gsi_stmt (*gsi);
642 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
643 gimple_set_location (repl, gimple_location (stmt));
644 if (gimple_vdef (stmt)
645 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
647 gimple_set_vdef (repl, gimple_vdef (stmt));
648 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
650 if (gimple_vuse (stmt))
651 gimple_set_vuse (repl, gimple_vuse (stmt));
652 gsi_replace (gsi, repl, false);
653 fold_stmt (gsi);
656 /* Return true if VAR is a VAR_DECL or a component thereof. */
658 static bool
659 var_decl_component_p (tree var)
661 tree inner = var;
662 while (handled_component_p (inner))
663 inner = TREE_OPERAND (inner, 0);
664 return (DECL_P (inner)
665 || (TREE_CODE (inner) == MEM_REF
666 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
669 /* Return TRUE if the SIZE argument, representing the size of an
670 object, is in a range of values of which exactly zero is valid. */
672 static bool
673 size_must_be_zero_p (tree size)
675 if (integer_zerop (size))
676 return true;
678 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
679 return false;
681 tree type = TREE_TYPE (size);
682 int prec = TYPE_PRECISION (type);
684 /* Compute the value of SSIZE_MAX, the largest positive value that
685 can be stored in ssize_t, the signed counterpart of size_t. */
686 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
687 value_range valid_range (VR_RANGE,
688 build_int_cst (type, 0),
689 wide_int_to_tree (type, ssize_max));
690 value_range vr;
691 get_range_info (size, vr);
692 vr.intersect (&valid_range);
693 return vr.zero_p ();
696 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
697 diagnose (otherwise undefined) overlapping copies without preventing
698 folding. When folded, GCC guarantees that overlapping memcpy has
699 the same semantics as memmove. Call to the library memcpy need not
700 provide the same guarantee. Return false if no simplification can
701 be made. */
703 static bool
704 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
705 tree dest, tree src, enum built_in_function code)
707 gimple *stmt = gsi_stmt (*gsi);
708 tree lhs = gimple_call_lhs (stmt);
709 tree len = gimple_call_arg (stmt, 2);
710 tree destvar, srcvar;
711 location_t loc = gimple_location (stmt);
713 /* If the LEN parameter is a constant zero or in range where
714 the only valid value is zero, return DEST. */
715 if (size_must_be_zero_p (len))
717 gimple *repl;
718 if (gimple_call_lhs (stmt))
719 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
720 else
721 repl = gimple_build_nop ();
722 tree vdef = gimple_vdef (stmt);
723 if (vdef && TREE_CODE (vdef) == SSA_NAME)
725 unlink_stmt_vdef (stmt);
726 release_ssa_name (vdef);
728 gsi_replace (gsi, repl, false);
729 return true;
732 /* If SRC and DEST are the same (and not volatile), return
733 DEST{,+LEN,+LEN-1}. */
734 if (operand_equal_p (src, dest, 0))
736 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
737 It's safe and may even be emitted by GCC itself (see bug
738 32667). */
739 unlink_stmt_vdef (stmt);
740 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
741 release_ssa_name (gimple_vdef (stmt));
742 if (!lhs)
744 gsi_replace (gsi, gimple_build_nop (), false);
745 return true;
747 goto done;
749 else
751 tree srctype, desttype;
752 unsigned int src_align, dest_align;
753 tree off0;
754 const char *tmp_str;
755 unsigned HOST_WIDE_INT tmp_len;
757 /* Build accesses at offset zero with a ref-all character type. */
758 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
759 ptr_mode, true), 0);
761 /* If we can perform the copy efficiently with first doing all loads
762 and then all stores inline it that way. Currently efficiently
763 means that we can load all the memory into a single integer
764 register which is what MOVE_MAX gives us. */
765 src_align = get_pointer_alignment (src);
766 dest_align = get_pointer_alignment (dest);
767 if (tree_fits_uhwi_p (len)
768 && compare_tree_int (len, MOVE_MAX) <= 0
769 /* ??? Don't transform copies from strings with known length this
770 confuses the tree-ssa-strlen.c. This doesn't handle
771 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
772 reason. */
773 && !c_strlen (src, 2)
774 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
775 && memchr (tmp_str, 0, tmp_len) == NULL))
777 unsigned ilen = tree_to_uhwi (len);
778 if (pow2p_hwi (ilen))
780 /* Detect out-of-bounds accesses without issuing warnings.
781 Avoid folding out-of-bounds copies but to avoid false
782 positives for unreachable code defer warning until after
783 DCE has worked its magic.
784 -Wrestrict is still diagnosed. */
785 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
786 dest, src, len, len,
787 false, false))
788 if (warning != OPT_Wrestrict)
789 return false;
791 scalar_int_mode mode;
792 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
793 if (type
794 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
795 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
796 /* If the destination pointer is not aligned we must be able
797 to emit an unaligned store. */
798 && (dest_align >= GET_MODE_ALIGNMENT (mode)
799 || !targetm.slow_unaligned_access (mode, dest_align)
800 || (optab_handler (movmisalign_optab, mode)
801 != CODE_FOR_nothing)))
803 tree srctype = type;
804 tree desttype = type;
805 if (src_align < GET_MODE_ALIGNMENT (mode))
806 srctype = build_aligned_type (type, src_align);
807 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
808 tree tem = fold_const_aggregate_ref (srcmem);
809 if (tem)
810 srcmem = tem;
811 else if (src_align < GET_MODE_ALIGNMENT (mode)
812 && targetm.slow_unaligned_access (mode, src_align)
813 && (optab_handler (movmisalign_optab, mode)
814 == CODE_FOR_nothing))
815 srcmem = NULL_TREE;
816 if (srcmem)
818 gimple *new_stmt;
819 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
821 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
822 srcmem
823 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
824 new_stmt);
825 gimple_assign_set_lhs (new_stmt, srcmem);
826 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
827 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
829 if (dest_align < GET_MODE_ALIGNMENT (mode))
830 desttype = build_aligned_type (type, dest_align);
831 new_stmt
832 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
833 dest, off0),
834 srcmem);
835 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
836 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
837 if (gimple_vdef (new_stmt)
838 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
839 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
840 if (!lhs)
842 gsi_replace (gsi, new_stmt, false);
843 return true;
845 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
846 goto done;
852 if (code == BUILT_IN_MEMMOVE)
854 /* Both DEST and SRC must be pointer types.
855 ??? This is what old code did. Is the testing for pointer types
856 really mandatory?
858 If either SRC is readonly or length is 1, we can use memcpy. */
859 if (!dest_align || !src_align)
860 return false;
861 if (readonly_data_expr (src)
862 || (tree_fits_uhwi_p (len)
863 && (MIN (src_align, dest_align) / BITS_PER_UNIT
864 >= tree_to_uhwi (len))))
866 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
867 if (!fn)
868 return false;
869 gimple_call_set_fndecl (stmt, fn);
870 gimple_call_set_arg (stmt, 0, dest);
871 gimple_call_set_arg (stmt, 1, src);
872 fold_stmt (gsi);
873 return true;
876 /* If *src and *dest can't overlap, optimize into memcpy as well. */
877 if (TREE_CODE (src) == ADDR_EXPR
878 && TREE_CODE (dest) == ADDR_EXPR)
880 tree src_base, dest_base, fn;
881 poly_int64 src_offset = 0, dest_offset = 0;
882 poly_uint64 maxsize;
884 srcvar = TREE_OPERAND (src, 0);
885 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
886 if (src_base == NULL)
887 src_base = srcvar;
888 destvar = TREE_OPERAND (dest, 0);
889 dest_base = get_addr_base_and_unit_offset (destvar,
890 &dest_offset);
891 if (dest_base == NULL)
892 dest_base = destvar;
893 if (!poly_int_tree_p (len, &maxsize))
894 maxsize = -1;
895 if (SSA_VAR_P (src_base)
896 && SSA_VAR_P (dest_base))
898 if (operand_equal_p (src_base, dest_base, 0)
899 && ranges_maybe_overlap_p (src_offset, maxsize,
900 dest_offset, maxsize))
901 return false;
903 else if (TREE_CODE (src_base) == MEM_REF
904 && TREE_CODE (dest_base) == MEM_REF)
906 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
907 TREE_OPERAND (dest_base, 0), 0))
908 return false;
909 poly_offset_int full_src_offset
910 = mem_ref_offset (src_base) + src_offset;
911 poly_offset_int full_dest_offset
912 = mem_ref_offset (dest_base) + dest_offset;
913 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
914 full_dest_offset, maxsize))
915 return false;
917 else
918 return false;
920 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
921 if (!fn)
922 return false;
923 gimple_call_set_fndecl (stmt, fn);
924 gimple_call_set_arg (stmt, 0, dest);
925 gimple_call_set_arg (stmt, 1, src);
926 fold_stmt (gsi);
927 return true;
930 /* If the destination and source do not alias optimize into
931 memcpy as well. */
932 if ((is_gimple_min_invariant (dest)
933 || TREE_CODE (dest) == SSA_NAME)
934 && (is_gimple_min_invariant (src)
935 || TREE_CODE (src) == SSA_NAME))
937 ao_ref destr, srcr;
938 ao_ref_init_from_ptr_and_size (&destr, dest, len);
939 ao_ref_init_from_ptr_and_size (&srcr, src, len);
940 if (!refs_may_alias_p_1 (&destr, &srcr, false))
942 tree fn;
943 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
944 if (!fn)
945 return false;
946 gimple_call_set_fndecl (stmt, fn);
947 gimple_call_set_arg (stmt, 0, dest);
948 gimple_call_set_arg (stmt, 1, src);
949 fold_stmt (gsi);
950 return true;
954 return false;
957 if (!tree_fits_shwi_p (len))
958 return false;
959 if (!POINTER_TYPE_P (TREE_TYPE (src))
960 || !POINTER_TYPE_P (TREE_TYPE (dest)))
961 return false;
962 /* In the following try to find a type that is most natural to be
963 used for the memcpy source and destination and that allows
964 the most optimization when memcpy is turned into a plain assignment
965 using that type. In theory we could always use a char[len] type
966 but that only gains us that the destination and source possibly
967 no longer will have their address taken. */
968 srctype = TREE_TYPE (TREE_TYPE (src));
969 if (TREE_CODE (srctype) == ARRAY_TYPE
970 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
971 srctype = TREE_TYPE (srctype);
972 desttype = TREE_TYPE (TREE_TYPE (dest));
973 if (TREE_CODE (desttype) == ARRAY_TYPE
974 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
975 desttype = TREE_TYPE (desttype);
976 if (TREE_ADDRESSABLE (srctype)
977 || TREE_ADDRESSABLE (desttype))
978 return false;
980 /* Make sure we are not copying using a floating-point mode or
981 a type whose size possibly does not match its precision. */
982 if (FLOAT_MODE_P (TYPE_MODE (desttype))
983 || TREE_CODE (desttype) == BOOLEAN_TYPE
984 || TREE_CODE (desttype) == ENUMERAL_TYPE)
985 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
986 if (FLOAT_MODE_P (TYPE_MODE (srctype))
987 || TREE_CODE (srctype) == BOOLEAN_TYPE
988 || TREE_CODE (srctype) == ENUMERAL_TYPE)
989 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
990 if (!srctype)
991 srctype = desttype;
992 if (!desttype)
993 desttype = srctype;
994 if (!srctype)
995 return false;
997 src_align = get_pointer_alignment (src);
998 dest_align = get_pointer_alignment (dest);
999 if (dest_align < TYPE_ALIGN (desttype)
1000 || src_align < TYPE_ALIGN (srctype))
1001 return false;
1003 destvar = NULL_TREE;
1004 if (TREE_CODE (dest) == ADDR_EXPR
1005 && var_decl_component_p (TREE_OPERAND (dest, 0))
1006 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1007 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1009 srcvar = NULL_TREE;
1010 if (TREE_CODE (src) == ADDR_EXPR
1011 && var_decl_component_p (TREE_OPERAND (src, 0))
1012 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1014 if (!destvar
1015 || src_align >= TYPE_ALIGN (desttype))
1016 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1017 src, off0);
1018 else if (!STRICT_ALIGNMENT)
1020 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1021 src_align);
1022 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1026 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1027 return false;
1029 if (srcvar == NULL_TREE)
1031 if (src_align >= TYPE_ALIGN (desttype))
1032 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1033 else
1035 if (STRICT_ALIGNMENT)
1036 return false;
1037 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1038 src_align);
1039 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1042 else if (destvar == NULL_TREE)
1044 if (dest_align >= TYPE_ALIGN (srctype))
1045 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1046 else
1048 if (STRICT_ALIGNMENT)
1049 return false;
1050 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1051 dest_align);
1052 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1056 /* Same as above, detect out-of-bounds accesses without issuing
1057 warnings. Avoid folding out-of-bounds copies but to avoid
1058 false positives for unreachable code defer warning until
1059 after DCE has worked its magic.
1060 -Wrestrict is still diagnosed. */
1061 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1062 dest, src, len, len,
1063 false, false))
1064 if (warning != OPT_Wrestrict)
1065 return false;
1067 gimple *new_stmt;
1068 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1070 tree tem = fold_const_aggregate_ref (srcvar);
1071 if (tem)
1072 srcvar = tem;
1073 if (! is_gimple_min_invariant (srcvar))
1075 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1076 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1077 new_stmt);
1078 gimple_assign_set_lhs (new_stmt, srcvar);
1079 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1080 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1082 new_stmt = gimple_build_assign (destvar, srcvar);
1083 goto set_vop_and_replace;
1086 /* We get an aggregate copy. Use an unsigned char[] type to
1087 perform the copying to preserve padding and to avoid any issues
1088 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1089 desttype = build_array_type_nelts (unsigned_char_type_node,
1090 tree_to_uhwi (len));
1091 srctype = desttype;
1092 if (src_align > TYPE_ALIGN (srctype))
1093 srctype = build_aligned_type (srctype, src_align);
1094 if (dest_align > TYPE_ALIGN (desttype))
1095 desttype = build_aligned_type (desttype, dest_align);
1096 new_stmt
1097 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1098 fold_build2 (MEM_REF, srctype, src, off0));
1099 set_vop_and_replace:
1100 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1101 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1102 if (gimple_vdef (new_stmt)
1103 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1104 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1105 if (!lhs)
1107 gsi_replace (gsi, new_stmt, false);
1108 return true;
1110 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1113 done:
1114 gimple_seq stmts = NULL;
1115 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1116 len = NULL_TREE;
1117 else if (code == BUILT_IN_MEMPCPY)
1119 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1120 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1121 TREE_TYPE (dest), dest, len);
1123 else
1124 gcc_unreachable ();
1126 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1127 gimple *repl = gimple_build_assign (lhs, dest);
1128 gsi_replace (gsi, repl, false);
1129 return true;
1132 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1133 to built-in memcmp (a, b, len). */
1135 static bool
1136 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1138 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1140 if (!fn)
1141 return false;
1143 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1145 gimple *stmt = gsi_stmt (*gsi);
1146 tree a = gimple_call_arg (stmt, 0);
1147 tree b = gimple_call_arg (stmt, 1);
1148 tree len = gimple_call_arg (stmt, 2);
1150 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1151 replace_call_with_call_and_fold (gsi, repl);
1153 return true;
1156 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1157 to built-in memmove (dest, src, len). */
1159 static bool
1160 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1162 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1164 if (!fn)
1165 return false;
1167 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1168 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1169 len) into memmove (dest, src, len). */
1171 gimple *stmt = gsi_stmt (*gsi);
1172 tree src = gimple_call_arg (stmt, 0);
1173 tree dest = gimple_call_arg (stmt, 1);
1174 tree len = gimple_call_arg (stmt, 2);
1176 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1177 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1178 replace_call_with_call_and_fold (gsi, repl);
1180 return true;
1183 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1184 to built-in memset (dest, 0, len). */
1186 static bool
1187 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1189 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1191 if (!fn)
1192 return false;
1194 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1196 gimple *stmt = gsi_stmt (*gsi);
1197 tree dest = gimple_call_arg (stmt, 0);
1198 tree len = gimple_call_arg (stmt, 1);
1200 gimple_seq seq = NULL;
1201 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1202 gimple_seq_add_stmt_without_update (&seq, repl);
1203 gsi_replace_with_seq_vops (gsi, seq);
1204 fold_stmt (gsi);
1206 return true;
1209 /* Fold function call to builtin memset or bzero at *GSI setting the
1210 memory of size LEN to VAL. Return whether a simplification was made. */
1212 static bool
1213 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1215 gimple *stmt = gsi_stmt (*gsi);
1216 tree etype;
1217 unsigned HOST_WIDE_INT length, cval;
1219 /* If the LEN parameter is zero, return DEST. */
1220 if (integer_zerop (len))
1222 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1223 return true;
1226 if (! tree_fits_uhwi_p (len))
1227 return false;
1229 if (TREE_CODE (c) != INTEGER_CST)
1230 return false;
1232 tree dest = gimple_call_arg (stmt, 0);
1233 tree var = dest;
1234 if (TREE_CODE (var) != ADDR_EXPR)
1235 return false;
1237 var = TREE_OPERAND (var, 0);
1238 if (TREE_THIS_VOLATILE (var))
1239 return false;
1241 etype = TREE_TYPE (var);
1242 if (TREE_CODE (etype) == ARRAY_TYPE)
1243 etype = TREE_TYPE (etype);
1245 if (!INTEGRAL_TYPE_P (etype)
1246 && !POINTER_TYPE_P (etype))
1247 return NULL_TREE;
1249 if (! var_decl_component_p (var))
1250 return NULL_TREE;
1252 length = tree_to_uhwi (len);
1253 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1254 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1255 return NULL_TREE;
1257 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1258 return NULL_TREE;
1260 if (integer_zerop (c))
1261 cval = 0;
1262 else
1264 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1265 return NULL_TREE;
1267 cval = TREE_INT_CST_LOW (c);
1268 cval &= 0xff;
1269 cval |= cval << 8;
1270 cval |= cval << 16;
1271 cval |= (cval << 31) << 1;
1274 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1275 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1276 gimple_set_vuse (store, gimple_vuse (stmt));
1277 tree vdef = gimple_vdef (stmt);
1278 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1280 gimple_set_vdef (store, gimple_vdef (stmt));
1281 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1283 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1284 if (gimple_call_lhs (stmt))
1286 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1287 gsi_replace (gsi, asgn, false);
1289 else
1291 gimple_stmt_iterator gsi2 = *gsi;
1292 gsi_prev (gsi);
1293 gsi_remove (&gsi2, true);
1296 return true;
1299 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1301 static bool
1302 get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1303 c_strlen_data *pdata, unsigned eltsize)
1305 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1307 /* The length computed by this invocation of the function. */
1308 tree val = NULL_TREE;
1310 /* True if VAL is an optimistic (tight) bound determined from
1311 the size of the character array in which the string may be
1312 stored. In that case, the computed VAL is used to set
1313 PDATA->MAXBOUND. */
1314 bool tight_bound = false;
1316 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1317 if (TREE_CODE (arg) == ADDR_EXPR
1318 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1320 tree op = TREE_OPERAND (arg, 0);
1321 if (integer_zerop (TREE_OPERAND (op, 1)))
1323 tree aop0 = TREE_OPERAND (op, 0);
1324 if (TREE_CODE (aop0) == INDIRECT_REF
1325 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1326 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1327 pdata, eltsize);
1329 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1330 && rkind == SRK_LENRANGE)
1332 /* Fail if an array is the last member of a struct object
1333 since it could be treated as a (fake) flexible array
1334 member. */
1335 tree idx = TREE_OPERAND (op, 1);
1337 arg = TREE_OPERAND (op, 0);
1338 tree optype = TREE_TYPE (arg);
1339 if (tree dom = TYPE_DOMAIN (optype))
1340 if (tree bound = TYPE_MAX_VALUE (dom))
1341 if (TREE_CODE (bound) == INTEGER_CST
1342 && TREE_CODE (idx) == INTEGER_CST
1343 && tree_int_cst_lt (bound, idx))
1344 return false;
1348 if (rkind == SRK_INT_VALUE)
1350 /* We are computing the maximum value (not string length). */
1351 val = arg;
1352 if (TREE_CODE (val) != INTEGER_CST
1353 || tree_int_cst_sgn (val) < 0)
1354 return false;
1356 else
1358 c_strlen_data lendata = { };
1359 val = c_strlen (arg, 1, &lendata, eltsize);
1361 if (!val && lendata.decl)
1363 /* ARG refers to an unterminated const character array.
1364 DATA.DECL with size DATA.LEN. */
1365 val = lendata.minlen;
1366 pdata->decl = lendata.decl;
1370 if (!val && rkind == SRK_LENRANGE)
1372 if (TREE_CODE (arg) == ADDR_EXPR)
1373 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1374 pdata, eltsize);
1376 if (TREE_CODE (arg) == ARRAY_REF)
1378 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1380 /* Determine the "innermost" array type. */
1381 while (TREE_CODE (optype) == ARRAY_TYPE
1382 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1383 optype = TREE_TYPE (optype);
1385 /* Avoid arrays of pointers. */
1386 tree eltype = TREE_TYPE (optype);
1387 if (TREE_CODE (optype) != ARRAY_TYPE
1388 || !INTEGRAL_TYPE_P (eltype))
1389 return false;
1391 /* Fail when the array bound is unknown or zero. */
1392 val = TYPE_SIZE_UNIT (optype);
1393 if (!val || integer_zerop (val))
1394 return false;
1396 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1397 integer_one_node);
1399 /* Set the minimum size to zero since the string in
1400 the array could have zero length. */
1401 pdata->minlen = ssize_int (0);
1403 tight_bound = true;
1405 else if (TREE_CODE (arg) == COMPONENT_REF
1406 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1407 == ARRAY_TYPE))
1409 /* Use the type of the member array to determine the upper
1410 bound on the length of the array. This may be overly
1411 optimistic if the array itself isn't NUL-terminated and
1412 the caller relies on the subsequent member to contain
1413 the NUL but that would only be considered valid if
1414 the array were the last member of a struct. */
1416 tree fld = TREE_OPERAND (arg, 1);
1418 tree optype = TREE_TYPE (fld);
1420 /* Determine the "innermost" array type. */
1421 while (TREE_CODE (optype) == ARRAY_TYPE
1422 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1423 optype = TREE_TYPE (optype);
1425 /* Fail when the array bound is unknown or zero. */
1426 val = TYPE_SIZE_UNIT (optype);
1427 if (!val || integer_zerop (val))
1428 return false;
1429 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1430 integer_one_node);
1432 /* Set the minimum size to zero since the string in
1433 the array could have zero length. */
1434 pdata->minlen = ssize_int (0);
1436 /* The array size determined above is an optimistic bound
1437 on the length. If the array isn't nul-terminated the
1438 length computed by the library function would be greater.
1439 Even though using strlen to cross the subobject boundary
1440 is undefined, avoid drawing conclusions from the member
1441 type about the length here. */
1442 tight_bound = true;
1444 else if (VAR_P (arg))
1446 /* Avoid handling pointers to arrays. GCC might misuse
1447 a pointer to an array of one bound to point to an array
1448 object of a greater bound. */
1449 tree argtype = TREE_TYPE (arg);
1450 if (TREE_CODE (argtype) == ARRAY_TYPE)
1452 val = TYPE_SIZE_UNIT (argtype);
1453 if (!val
1454 || TREE_CODE (val) != INTEGER_CST
1455 || integer_zerop (val))
1456 return false;
1457 val = wide_int_to_tree (TREE_TYPE (val),
1458 wi::sub (wi::to_wide (val), 1));
1460 /* Set the minimum size to zero since the string in
1461 the array could have zero length. */
1462 pdata->minlen = ssize_int (0);
1467 if (!val)
1468 return false;
1470 /* Adjust the lower bound on the string length as necessary. */
1471 if (!pdata->minlen
1472 || (rkind != SRK_STRLEN
1473 && TREE_CODE (pdata->minlen) == INTEGER_CST
1474 && TREE_CODE (val) == INTEGER_CST
1475 && tree_int_cst_lt (val, pdata->minlen)))
1476 pdata->minlen = val;
1478 if (pdata->maxbound)
1480 /* Adjust the tighter (more optimistic) string length bound
1481 if necessary and proceed to adjust the more conservative
1482 bound. */
1483 if (TREE_CODE (val) == INTEGER_CST)
1485 if (TREE_CODE (pdata->maxbound) == INTEGER_CST)
1487 if (tree_int_cst_lt (pdata->maxbound, val))
1488 pdata->maxbound = val;
1490 else
1491 pdata->maxbound = build_all_ones_cst (size_type_node);
1493 else
1494 pdata->maxbound = val;
1496 else
1497 pdata->maxbound = val;
1499 if (tight_bound)
1501 /* VAL computed above represents an optimistically tight bound
1502 on the length of the string based on the referenced object's
1503 or subobject's type. Determine the conservative upper bound
1504 based on the enclosing object's size if possible. */
1505 if (rkind == SRK_LENRANGE)
1507 poly_int64 offset;
1508 tree base = get_addr_base_and_unit_offset (arg, &offset);
1509 if (!base)
1511 /* When the call above fails due to a non-constant offset
1512 assume the offset is zero and use the size of the whole
1513 enclosing object instead. */
1514 base = get_base_address (arg);
1515 offset = 0;
1517 /* If the base object is a pointer no upper bound on the length
1518 can be determined. Otherwise the maximum length is equal to
1519 the size of the enclosing object minus the offset of
1520 the referenced subobject minus 1 (for the terminating nul). */
1521 tree type = TREE_TYPE (base);
1522 if (TREE_CODE (type) == POINTER_TYPE
1523 || !VAR_P (base) || !(val = DECL_SIZE_UNIT (base)))
1524 val = build_all_ones_cst (size_type_node);
1525 else
1527 val = DECL_SIZE_UNIT (base);
1528 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1529 size_int (offset + 1));
1532 else
1533 return false;
1536 if (pdata->maxlen)
1538 /* Adjust the more conservative bound if possible/necessary
1539 and fail otherwise. */
1540 if (rkind != SRK_STRLEN)
1542 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1543 || TREE_CODE (val) != INTEGER_CST)
1544 return false;
1546 if (tree_int_cst_lt (pdata->maxlen, val))
1547 pdata->maxlen = val;
1548 return true;
1550 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1552 /* Fail if the length of this ARG is different from that
1553 previously determined from another ARG. */
1554 return false;
1558 pdata->maxlen = val;
1559 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1562 /* For an ARG referencing one or more strings, try to obtain the range
1563 of their lengths, or the size of the largest array ARG referes to if
1564 the range of lengths cannot be determined, and store all in *PDATA.
1565 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1566 the maximum constant value.
1567 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1568 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1569 length or if we are unable to determine the length, return false.
1570 VISITED is a bitmap of visited variables.
1571 RKIND determines the kind of value or range to obtain (see
1572 strlen_range_kind).
1573 Set PDATA->DECL if ARG refers to an unterminated constant array.
1574 On input, set ELTSIZE to 1 for normal single byte character strings,
1575 and either 2 or 4 for wide characer strings (the size of wchar_t).
1576 Return true if *PDATA was successfully populated and false otherwise. */
1578 static bool
1579 get_range_strlen (tree arg, bitmap *visited,
1580 strlen_range_kind rkind,
1581 c_strlen_data *pdata, unsigned eltsize)
1584 if (TREE_CODE (arg) != SSA_NAME)
1585 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1587 /* If ARG is registered for SSA update we cannot look at its defining
1588 statement. */
1589 if (name_registered_for_update_p (arg))
1590 return false;
1592 /* If we were already here, break the infinite cycle. */
1593 if (!*visited)
1594 *visited = BITMAP_ALLOC (NULL);
1595 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1596 return true;
1598 tree var = arg;
1599 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1601 switch (gimple_code (def_stmt))
1603 case GIMPLE_ASSIGN:
1604 /* The RHS of the statement defining VAR must either have a
1605 constant length or come from another SSA_NAME with a constant
1606 length. */
1607 if (gimple_assign_single_p (def_stmt)
1608 || gimple_assign_unary_nop_p (def_stmt))
1610 tree rhs = gimple_assign_rhs1 (def_stmt);
1611 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1613 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1615 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1616 gimple_assign_rhs3 (def_stmt) };
1618 for (unsigned int i = 0; i < 2; i++)
1619 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1621 if (rkind != SRK_LENRANGE)
1622 return false;
1623 /* Set the upper bound to the maximum to prevent
1624 it from being adjusted in the next iteration but
1625 leave MINLEN and the more conservative MAXBOUND
1626 determined so far alone (or leave them null if
1627 they haven't been set yet). That the MINLEN is
1628 in fact zero can be determined from MAXLEN being
1629 unbounded but the discovered minimum is used for
1630 diagnostics. */
1631 pdata->maxlen = build_all_ones_cst (size_type_node);
1633 return true;
1635 return false;
1637 case GIMPLE_PHI:
1638 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1639 must have a constant length. */
1640 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1642 tree arg = gimple_phi_arg (def_stmt, i)->def;
1644 /* If this PHI has itself as an argument, we cannot
1645 determine the string length of this argument. However,
1646 if we can find a constant string length for the other
1647 PHI args then we can still be sure that this is a
1648 constant string length. So be optimistic and just
1649 continue with the next argument. */
1650 if (arg == gimple_phi_result (def_stmt))
1651 continue;
1653 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1655 if (rkind != SRK_LENRANGE)
1656 return false;
1657 /* Set the upper bound to the maximum to prevent
1658 it from being adjusted in the next iteration but
1659 leave MINLEN and the more conservative MAXBOUND
1660 determined so far alone (or leave them null if
1661 they haven't been set yet). That the MINLEN is
1662 in fact zero can be determined from MAXLEN being
1663 unbounded but the discovered minimum is used for
1664 diagnostics. */
1665 pdata->maxlen = build_all_ones_cst (size_type_node);
1668 return true;
1670 default:
1671 return false;
1675 /* Determine the minimum and maximum value or string length that ARG
1676 refers to and store each in the first two elements of MINMAXLEN.
1677 For expressions that point to strings of unknown lengths that are
1678 character arrays, use the upper bound of the array as the maximum
1679 length. For example, given an expression like 'x ? array : "xyz"'
1680 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1681 to 0 and MINMAXLEN[1] to 7, the longest string that could be
1682 stored in array.
1683 Return true if the range of the string lengths has been obtained
1684 from the upper bound of an array at the end of a struct. Such
1685 an array may hold a string that's longer than its upper bound
1686 due to it being used as a poor-man's flexible array member.
1688 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1689 and false if PHIs and COND_EXPRs are to be handled optimistically,
1690 if we can determine string length minimum and maximum; it will use
1691 the minimum from the ones where it can be determined.
1692 STRICT false should be only used for warning code.
1693 When non-null, clear *NONSTR if ARG refers to a constant array
1694 that is known not be nul-terminated. Otherwise set it to
1695 the declaration of the constant non-terminated array.
1697 ELTSIZE is 1 for normal single byte character strings, and 2 or
1698 4 for wide characer strings. ELTSIZE is by default 1. */
1700 bool
1701 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1703 bitmap visited = NULL;
1705 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
1707 /* On failure extend the length range to an impossible maximum
1708 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1709 members can stay unchanged regardless. */
1710 pdata->minlen = ssize_int (0);
1711 pdata->maxlen = build_all_ones_cst (size_type_node);
1713 else if (!pdata->minlen)
1714 pdata->minlen = ssize_int (0);
1716 /* Unless its null, leave the more conservative MAXBOUND unchanged. */
1717 if (!pdata->maxbound)
1718 pdata->maxbound = pdata->maxlen;
1720 if (visited)
1721 BITMAP_FREE (visited);
1723 return !integer_all_onesp (pdata->maxlen);
1726 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1727 For ARG of pointer types, NONSTR indicates if the caller is prepared
1728 to handle unterminated strings. For integer ARG and when RKIND ==
1729 SRK_INT_VALUE, NONSTR must be null.
1731 If an unterminated array is discovered and our caller handles
1732 unterminated arrays, then bubble up the offending DECL and
1733 return the maximum size. Otherwise return NULL. */
1735 static tree
1736 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1738 /* A non-null NONSTR is meaningless when determining the maximum
1739 value of an integer ARG. */
1740 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1741 /* ARG must have an integral type when RKIND says so. */
1742 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1744 bitmap visited = NULL;
1746 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1747 is unbounded. */
1748 c_strlen_data lendata = { };
1749 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
1750 lendata.maxlen = NULL_TREE;
1751 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1752 lendata.maxlen = NULL_TREE;
1754 if (visited)
1755 BITMAP_FREE (visited);
1757 if (nonstr)
1759 /* For callers prepared to handle unterminated arrays set
1760 *NONSTR to point to the declaration of the array and return
1761 the maximum length/size. */
1762 *nonstr = lendata.decl;
1763 return lendata.maxlen;
1766 /* Fail if the constant array isn't nul-terminated. */
1767 return lendata.decl ? NULL_TREE : lendata.maxlen;
1771 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1772 If LEN is not NULL, it represents the length of the string to be
1773 copied. Return NULL_TREE if no simplification can be made. */
1775 static bool
1776 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1777 tree dest, tree src)
1779 gimple *stmt = gsi_stmt (*gsi);
1780 location_t loc = gimple_location (stmt);
1781 tree fn;
1783 /* If SRC and DEST are the same (and not volatile), return DEST. */
1784 if (operand_equal_p (src, dest, 0))
1786 /* Issue -Wrestrict unless the pointers are null (those do
1787 not point to objects and so do not indicate an overlap;
1788 such calls could be the result of sanitization and jump
1789 threading). */
1790 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1792 tree func = gimple_call_fndecl (stmt);
1794 warning_at (loc, OPT_Wrestrict,
1795 "%qD source argument is the same as destination",
1796 func);
1799 replace_call_with_value (gsi, dest);
1800 return true;
1803 if (optimize_function_for_size_p (cfun))
1804 return false;
1806 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1807 if (!fn)
1808 return false;
1810 /* Set to non-null if ARG refers to an unterminated array. */
1811 tree nonstr = NULL;
1812 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
1814 if (nonstr)
1816 /* Avoid folding calls with unterminated arrays. */
1817 if (!gimple_no_warning_p (stmt))
1818 warn_string_no_nul (loc, "strcpy", src, nonstr);
1819 gimple_set_no_warning (stmt, true);
1820 return false;
1823 if (!len)
1824 return false;
1826 len = fold_convert_loc (loc, size_type_node, len);
1827 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1828 len = force_gimple_operand_gsi (gsi, len, true,
1829 NULL_TREE, true, GSI_SAME_STMT);
1830 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1831 replace_call_with_call_and_fold (gsi, repl);
1832 return true;
1835 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1836 If SLEN is not NULL, it represents the length of the source string.
1837 Return NULL_TREE if no simplification can be made. */
1839 static bool
1840 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1841 tree dest, tree src, tree len)
1843 gimple *stmt = gsi_stmt (*gsi);
1844 location_t loc = gimple_location (stmt);
1845 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1847 /* If the LEN parameter is zero, return DEST. */
1848 if (integer_zerop (len))
1850 /* Avoid warning if the destination refers to a an array/pointer
1851 decorate with attribute nonstring. */
1852 if (!nonstring)
1854 tree fndecl = gimple_call_fndecl (stmt);
1856 /* Warn about the lack of nul termination: the result is not
1857 a (nul-terminated) string. */
1858 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1859 if (slen && !integer_zerop (slen))
1860 warning_at (loc, OPT_Wstringop_truncation,
1861 "%G%qD destination unchanged after copying no bytes "
1862 "from a string of length %E",
1863 stmt, fndecl, slen);
1864 else
1865 warning_at (loc, OPT_Wstringop_truncation,
1866 "%G%qD destination unchanged after copying no bytes",
1867 stmt, fndecl);
1870 replace_call_with_value (gsi, dest);
1871 return true;
1874 /* We can't compare slen with len as constants below if len is not a
1875 constant. */
1876 if (TREE_CODE (len) != INTEGER_CST)
1877 return false;
1879 /* Now, we must be passed a constant src ptr parameter. */
1880 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1881 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1882 return false;
1884 /* The size of the source string including the terminating nul. */
1885 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1887 /* We do not support simplification of this case, though we do
1888 support it when expanding trees into RTL. */
1889 /* FIXME: generate a call to __builtin_memset. */
1890 if (tree_int_cst_lt (ssize, len))
1891 return false;
1893 /* Diagnose truncation that leaves the copy unterminated. */
1894 maybe_diag_stxncpy_trunc (*gsi, src, len);
1896 /* OK transform into builtin memcpy. */
1897 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1898 if (!fn)
1899 return false;
1901 len = fold_convert_loc (loc, size_type_node, len);
1902 len = force_gimple_operand_gsi (gsi, len, true,
1903 NULL_TREE, true, GSI_SAME_STMT);
1904 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1905 replace_call_with_call_and_fold (gsi, repl);
1907 return true;
1910 /* Fold function call to builtin strchr or strrchr.
1911 If both arguments are constant, evaluate and fold the result,
1912 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1913 In general strlen is significantly faster than strchr
1914 due to being a simpler operation. */
1915 static bool
1916 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1918 gimple *stmt = gsi_stmt (*gsi);
1919 tree str = gimple_call_arg (stmt, 0);
1920 tree c = gimple_call_arg (stmt, 1);
1921 location_t loc = gimple_location (stmt);
1922 const char *p;
1923 char ch;
1925 if (!gimple_call_lhs (stmt))
1926 return false;
1928 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1930 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1932 if (p1 == NULL)
1934 replace_call_with_value (gsi, integer_zero_node);
1935 return true;
1938 tree len = build_int_cst (size_type_node, p1 - p);
1939 gimple_seq stmts = NULL;
1940 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1941 POINTER_PLUS_EXPR, str, len);
1942 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1943 gsi_replace_with_seq_vops (gsi, stmts);
1944 return true;
1947 if (!integer_zerop (c))
1948 return false;
1950 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1951 if (is_strrchr && optimize_function_for_size_p (cfun))
1953 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1955 if (strchr_fn)
1957 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1958 replace_call_with_call_and_fold (gsi, repl);
1959 return true;
1962 return false;
1965 tree len;
1966 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1968 if (!strlen_fn)
1969 return false;
1971 /* Create newstr = strlen (str). */
1972 gimple_seq stmts = NULL;
1973 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1974 gimple_set_location (new_stmt, loc);
1975 len = create_tmp_reg_or_ssa_name (size_type_node);
1976 gimple_call_set_lhs (new_stmt, len);
1977 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1979 /* Create (str p+ strlen (str)). */
1980 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1981 POINTER_PLUS_EXPR, str, len);
1982 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1983 gsi_replace_with_seq_vops (gsi, stmts);
1984 /* gsi now points at the assignment to the lhs, get a
1985 stmt iterator to the strlen.
1986 ??? We can't use gsi_for_stmt as that doesn't work when the
1987 CFG isn't built yet. */
1988 gimple_stmt_iterator gsi2 = *gsi;
1989 gsi_prev (&gsi2);
1990 fold_stmt (&gsi2);
1991 return true;
1994 /* Fold function call to builtin strstr.
1995 If both arguments are constant, evaluate and fold the result,
1996 additionally fold strstr (x, "") into x and strstr (x, "c")
1997 into strchr (x, 'c'). */
1998 static bool
1999 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2001 gimple *stmt = gsi_stmt (*gsi);
2002 tree haystack = gimple_call_arg (stmt, 0);
2003 tree needle = gimple_call_arg (stmt, 1);
2004 const char *p, *q;
2006 if (!gimple_call_lhs (stmt))
2007 return false;
2009 q = c_getstr (needle);
2010 if (q == NULL)
2011 return false;
2013 if ((p = c_getstr (haystack)))
2015 const char *r = strstr (p, q);
2017 if (r == NULL)
2019 replace_call_with_value (gsi, integer_zero_node);
2020 return true;
2023 tree len = build_int_cst (size_type_node, r - p);
2024 gimple_seq stmts = NULL;
2025 gimple *new_stmt
2026 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2027 haystack, len);
2028 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2029 gsi_replace_with_seq_vops (gsi, stmts);
2030 return true;
2033 /* For strstr (x, "") return x. */
2034 if (q[0] == '\0')
2036 replace_call_with_value (gsi, haystack);
2037 return true;
2040 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2041 if (q[1] == '\0')
2043 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2044 if (strchr_fn)
2046 tree c = build_int_cst (integer_type_node, q[0]);
2047 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2048 replace_call_with_call_and_fold (gsi, repl);
2049 return true;
2053 return false;
2056 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2057 to the call.
2059 Return NULL_TREE if no simplification was possible, otherwise return the
2060 simplified form of the call as a tree.
2062 The simplified form may be a constant or other expression which
2063 computes the same value, but in a more efficient manner (including
2064 calls to other builtin functions).
2066 The call may contain arguments which need to be evaluated, but
2067 which are not useful to determine the result of the call. In
2068 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2069 COMPOUND_EXPR will be an argument which must be evaluated.
2070 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2071 COMPOUND_EXPR in the chain will contain the tree for the simplified
2072 form of the builtin function call. */
2074 static bool
2075 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2077 gimple *stmt = gsi_stmt (*gsi);
2078 location_t loc = gimple_location (stmt);
2080 const char *p = c_getstr (src);
2082 /* If the string length is zero, return the dst parameter. */
2083 if (p && *p == '\0')
2085 replace_call_with_value (gsi, dst);
2086 return true;
2089 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2090 return false;
2092 /* See if we can store by pieces into (dst + strlen(dst)). */
2093 tree newdst;
2094 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2095 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2097 if (!strlen_fn || !memcpy_fn)
2098 return false;
2100 /* If the length of the source string isn't computable don't
2101 split strcat into strlen and memcpy. */
2102 tree len = get_maxval_strlen (src, SRK_STRLEN);
2103 if (! len)
2104 return false;
2106 /* Create strlen (dst). */
2107 gimple_seq stmts = NULL, stmts2;
2108 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2109 gimple_set_location (repl, loc);
2110 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2111 gimple_call_set_lhs (repl, newdst);
2112 gimple_seq_add_stmt_without_update (&stmts, repl);
2114 /* Create (dst p+ strlen (dst)). */
2115 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2116 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2117 gimple_seq_add_seq_without_update (&stmts, stmts2);
2119 len = fold_convert_loc (loc, size_type_node, len);
2120 len = size_binop_loc (loc, PLUS_EXPR, len,
2121 build_int_cst (size_type_node, 1));
2122 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2123 gimple_seq_add_seq_without_update (&stmts, stmts2);
2125 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2126 gimple_seq_add_stmt_without_update (&stmts, repl);
2127 if (gimple_call_lhs (stmt))
2129 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2130 gimple_seq_add_stmt_without_update (&stmts, repl);
2131 gsi_replace_with_seq_vops (gsi, stmts);
2132 /* gsi now points at the assignment to the lhs, get a
2133 stmt iterator to the memcpy call.
2134 ??? We can't use gsi_for_stmt as that doesn't work when the
2135 CFG isn't built yet. */
2136 gimple_stmt_iterator gsi2 = *gsi;
2137 gsi_prev (&gsi2);
2138 fold_stmt (&gsi2);
2140 else
2142 gsi_replace_with_seq_vops (gsi, stmts);
2143 fold_stmt (gsi);
2145 return true;
2148 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2149 are the arguments to the call. */
2151 static bool
2152 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2154 gimple *stmt = gsi_stmt (*gsi);
2155 tree dest = gimple_call_arg (stmt, 0);
2156 tree src = gimple_call_arg (stmt, 1);
2157 tree size = gimple_call_arg (stmt, 2);
2158 tree fn;
2159 const char *p;
2162 p = c_getstr (src);
2163 /* If the SRC parameter is "", return DEST. */
2164 if (p && *p == '\0')
2166 replace_call_with_value (gsi, dest);
2167 return true;
2170 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2171 return false;
2173 /* If __builtin_strcat_chk is used, assume strcat is available. */
2174 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2175 if (!fn)
2176 return false;
2178 gimple *repl = gimple_build_call (fn, 2, dest, src);
2179 replace_call_with_call_and_fold (gsi, repl);
2180 return true;
2183 /* Simplify a call to the strncat builtin. */
2185 static bool
2186 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2188 gimple *stmt = gsi_stmt (*gsi);
2189 tree dst = gimple_call_arg (stmt, 0);
2190 tree src = gimple_call_arg (stmt, 1);
2191 tree len = gimple_call_arg (stmt, 2);
2193 const char *p = c_getstr (src);
2195 /* If the requested length is zero, or the src parameter string
2196 length is zero, return the dst parameter. */
2197 if (integer_zerop (len) || (p && *p == '\0'))
2199 replace_call_with_value (gsi, dst);
2200 return true;
2203 if (TREE_CODE (len) != INTEGER_CST || !p)
2204 return false;
2206 unsigned srclen = strlen (p);
2208 int cmpsrc = compare_tree_int (len, srclen);
2210 /* Return early if the requested len is less than the string length.
2211 Warnings will be issued elsewhere later. */
2212 if (cmpsrc < 0)
2213 return false;
2215 unsigned HOST_WIDE_INT dstsize;
2217 bool nowarn = gimple_no_warning_p (stmt);
2219 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2221 int cmpdst = compare_tree_int (len, dstsize);
2223 if (cmpdst >= 0)
2225 tree fndecl = gimple_call_fndecl (stmt);
2227 /* Strncat copies (at most) LEN bytes and always appends
2228 the terminating NUL so the specified bound should never
2229 be equal to (or greater than) the size of the destination.
2230 If it is, the copy could overflow. */
2231 location_t loc = gimple_location (stmt);
2232 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2233 cmpdst == 0
2234 ? G_("%G%qD specified bound %E equals "
2235 "destination size")
2236 : G_("%G%qD specified bound %E exceeds "
2237 "destination size %wu"),
2238 stmt, fndecl, len, dstsize);
2239 if (nowarn)
2240 gimple_set_no_warning (stmt, true);
2244 if (!nowarn && cmpsrc == 0)
2246 tree fndecl = gimple_call_fndecl (stmt);
2247 location_t loc = gimple_location (stmt);
2249 /* To avoid possible overflow the specified bound should also
2250 not be equal to the length of the source, even when the size
2251 of the destination is unknown (it's not an uncommon mistake
2252 to specify as the bound to strncpy the length of the source). */
2253 if (warning_at (loc, OPT_Wstringop_overflow_,
2254 "%G%qD specified bound %E equals source length",
2255 stmt, fndecl, len))
2256 gimple_set_no_warning (stmt, true);
2259 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2261 /* If the replacement _DECL isn't initialized, don't do the
2262 transformation. */
2263 if (!fn)
2264 return false;
2266 /* Otherwise, emit a call to strcat. */
2267 gcall *repl = gimple_build_call (fn, 2, dst, src);
2268 replace_call_with_call_and_fold (gsi, repl);
2269 return true;
2272 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2273 LEN, and SIZE. */
2275 static bool
2276 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2278 gimple *stmt = gsi_stmt (*gsi);
2279 tree dest = gimple_call_arg (stmt, 0);
2280 tree src = gimple_call_arg (stmt, 1);
2281 tree len = gimple_call_arg (stmt, 2);
2282 tree size = gimple_call_arg (stmt, 3);
2283 tree fn;
2284 const char *p;
2286 p = c_getstr (src);
2287 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2288 if ((p && *p == '\0')
2289 || integer_zerop (len))
2291 replace_call_with_value (gsi, dest);
2292 return true;
2295 if (! tree_fits_uhwi_p (size))
2296 return false;
2298 if (! integer_all_onesp (size))
2300 tree src_len = c_strlen (src, 1);
2301 if (src_len
2302 && tree_fits_uhwi_p (src_len)
2303 && tree_fits_uhwi_p (len)
2304 && ! tree_int_cst_lt (len, src_len))
2306 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2307 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2308 if (!fn)
2309 return false;
2311 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2312 replace_call_with_call_and_fold (gsi, repl);
2313 return true;
2315 return false;
2318 /* If __builtin_strncat_chk is used, assume strncat is available. */
2319 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2320 if (!fn)
2321 return false;
2323 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2324 replace_call_with_call_and_fold (gsi, repl);
2325 return true;
2328 /* Build and append gimple statements to STMTS that would load a first
2329 character of a memory location identified by STR. LOC is location
2330 of the statement. */
2332 static tree
2333 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2335 tree var;
2337 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2338 tree cst_uchar_ptr_node
2339 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2340 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2342 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2343 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2344 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2346 gimple_assign_set_lhs (stmt, var);
2347 gimple_seq_add_stmt_without_update (stmts, stmt);
2349 return var;
2352 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2353 FCODE is the name of the builtin. */
2355 static bool
2356 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2358 gimple *stmt = gsi_stmt (*gsi);
2359 tree callee = gimple_call_fndecl (stmt);
2360 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2362 tree type = integer_type_node;
2363 tree str1 = gimple_call_arg (stmt, 0);
2364 tree str2 = gimple_call_arg (stmt, 1);
2365 tree lhs = gimple_call_lhs (stmt);
2366 HOST_WIDE_INT length = -1;
2368 /* Handle strncmp and strncasecmp functions. */
2369 if (gimple_call_num_args (stmt) == 3)
2371 tree len = gimple_call_arg (stmt, 2);
2372 if (tree_fits_uhwi_p (len))
2373 length = tree_to_uhwi (len);
2376 /* If the LEN parameter is zero, return zero. */
2377 if (length == 0)
2379 replace_call_with_value (gsi, integer_zero_node);
2380 return true;
2383 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2384 if (operand_equal_p (str1, str2, 0))
2386 replace_call_with_value (gsi, integer_zero_node);
2387 return true;
2390 const char *p1 = c_getstr (str1);
2391 const char *p2 = c_getstr (str2);
2393 /* For known strings, return an immediate value. */
2394 if (p1 && p2)
2396 int r = 0;
2397 bool known_result = false;
2399 switch (fcode)
2401 case BUILT_IN_STRCMP:
2402 case BUILT_IN_STRCMP_EQ:
2404 r = strcmp (p1, p2);
2405 known_result = true;
2406 break;
2408 case BUILT_IN_STRNCMP:
2409 case BUILT_IN_STRNCMP_EQ:
2411 if (length == -1)
2412 break;
2413 r = strncmp (p1, p2, length);
2414 known_result = true;
2415 break;
2417 /* Only handleable situation is where the string are equal (result 0),
2418 which is already handled by operand_equal_p case. */
2419 case BUILT_IN_STRCASECMP:
2420 break;
2421 case BUILT_IN_STRNCASECMP:
2423 if (length == -1)
2424 break;
2425 r = strncmp (p1, p2, length);
2426 if (r == 0)
2427 known_result = true;
2428 break;
2430 default:
2431 gcc_unreachable ();
2434 if (known_result)
2436 replace_call_with_value (gsi, build_cmp_result (type, r));
2437 return true;
2441 bool nonzero_length = length >= 1
2442 || fcode == BUILT_IN_STRCMP
2443 || fcode == BUILT_IN_STRCMP_EQ
2444 || fcode == BUILT_IN_STRCASECMP;
2446 location_t loc = gimple_location (stmt);
2448 /* If the second arg is "", return *(const unsigned char*)arg1. */
2449 if (p2 && *p2 == '\0' && nonzero_length)
2451 gimple_seq stmts = NULL;
2452 tree var = gimple_load_first_char (loc, str1, &stmts);
2453 if (lhs)
2455 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2456 gimple_seq_add_stmt_without_update (&stmts, stmt);
2459 gsi_replace_with_seq_vops (gsi, stmts);
2460 return true;
2463 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2464 if (p1 && *p1 == '\0' && nonzero_length)
2466 gimple_seq stmts = NULL;
2467 tree var = gimple_load_first_char (loc, str2, &stmts);
2469 if (lhs)
2471 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2472 stmt = gimple_build_assign (c, NOP_EXPR, var);
2473 gimple_seq_add_stmt_without_update (&stmts, stmt);
2475 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2476 gimple_seq_add_stmt_without_update (&stmts, stmt);
2479 gsi_replace_with_seq_vops (gsi, stmts);
2480 return true;
2483 /* If len parameter is one, return an expression corresponding to
2484 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2485 if (fcode == BUILT_IN_STRNCMP && length == 1)
2487 gimple_seq stmts = NULL;
2488 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2489 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2491 if (lhs)
2493 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2494 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2495 gimple_seq_add_stmt_without_update (&stmts, convert1);
2497 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2498 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2499 gimple_seq_add_stmt_without_update (&stmts, convert2);
2501 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2502 gimple_seq_add_stmt_without_update (&stmts, stmt);
2505 gsi_replace_with_seq_vops (gsi, stmts);
2506 return true;
2509 /* If length is larger than the length of one constant string,
2510 replace strncmp with corresponding strcmp */
2511 if (fcode == BUILT_IN_STRNCMP
2512 && length > 0
2513 && ((p2 && (size_t) length > strlen (p2))
2514 || (p1 && (size_t) length > strlen (p1))))
2516 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2517 if (!fn)
2518 return false;
2519 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2520 replace_call_with_call_and_fold (gsi, repl);
2521 return true;
2524 return false;
2527 /* Fold a call to the memchr pointed by GSI iterator. */
2529 static bool
2530 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2532 gimple *stmt = gsi_stmt (*gsi);
2533 tree lhs = gimple_call_lhs (stmt);
2534 tree arg1 = gimple_call_arg (stmt, 0);
2535 tree arg2 = gimple_call_arg (stmt, 1);
2536 tree len = gimple_call_arg (stmt, 2);
2538 /* If the LEN parameter is zero, return zero. */
2539 if (integer_zerop (len))
2541 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2542 return true;
2545 char c;
2546 if (TREE_CODE (arg2) != INTEGER_CST
2547 || !tree_fits_uhwi_p (len)
2548 || !target_char_cst_p (arg2, &c))
2549 return false;
2551 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2552 unsigned HOST_WIDE_INT string_length;
2553 const char *p1 = c_getstr (arg1, &string_length);
2555 if (p1)
2557 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2558 if (r == NULL)
2560 if (length <= string_length)
2562 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2563 return true;
2566 else
2568 unsigned HOST_WIDE_INT offset = r - p1;
2569 gimple_seq stmts = NULL;
2570 if (lhs != NULL_TREE)
2572 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2573 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2574 arg1, offset_cst);
2575 gimple_seq_add_stmt_without_update (&stmts, stmt);
2577 else
2578 gimple_seq_add_stmt_without_update (&stmts,
2579 gimple_build_nop ());
2581 gsi_replace_with_seq_vops (gsi, stmts);
2582 return true;
2586 return false;
2589 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2590 to the call. IGNORE is true if the value returned
2591 by the builtin will be ignored. UNLOCKED is true is true if this
2592 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2593 the known length of the string. Return NULL_TREE if no simplification
2594 was possible. */
2596 static bool
2597 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2598 tree arg0, tree arg1,
2599 bool unlocked)
2601 gimple *stmt = gsi_stmt (*gsi);
2603 /* If we're using an unlocked function, assume the other unlocked
2604 functions exist explicitly. */
2605 tree const fn_fputc = (unlocked
2606 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2607 : builtin_decl_implicit (BUILT_IN_FPUTC));
2608 tree const fn_fwrite = (unlocked
2609 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2610 : builtin_decl_implicit (BUILT_IN_FWRITE));
2612 /* If the return value is used, don't do the transformation. */
2613 if (gimple_call_lhs (stmt))
2614 return false;
2616 /* Get the length of the string passed to fputs. If the length
2617 can't be determined, punt. */
2618 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2619 if (!len
2620 || TREE_CODE (len) != INTEGER_CST)
2621 return false;
2623 switch (compare_tree_int (len, 1))
2625 case -1: /* length is 0, delete the call entirely . */
2626 replace_call_with_value (gsi, integer_zero_node);
2627 return true;
2629 case 0: /* length is 1, call fputc. */
2631 const char *p = c_getstr (arg0);
2632 if (p != NULL)
2634 if (!fn_fputc)
2635 return false;
2637 gimple *repl = gimple_build_call (fn_fputc, 2,
2638 build_int_cst
2639 (integer_type_node, p[0]), arg1);
2640 replace_call_with_call_and_fold (gsi, repl);
2641 return true;
2644 /* FALLTHROUGH */
2645 case 1: /* length is greater than 1, call fwrite. */
2647 /* If optimizing for size keep fputs. */
2648 if (optimize_function_for_size_p (cfun))
2649 return false;
2650 /* New argument list transforming fputs(string, stream) to
2651 fwrite(string, 1, len, stream). */
2652 if (!fn_fwrite)
2653 return false;
2655 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2656 size_one_node, len, arg1);
2657 replace_call_with_call_and_fold (gsi, repl);
2658 return true;
2660 default:
2661 gcc_unreachable ();
2663 return false;
2666 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2667 DEST, SRC, LEN, and SIZE are the arguments to the call.
2668 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2669 code of the builtin. If MAXLEN is not NULL, it is maximum length
2670 passed as third argument. */
2672 static bool
2673 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2674 tree dest, tree src, tree len, tree size,
2675 enum built_in_function fcode)
2677 gimple *stmt = gsi_stmt (*gsi);
2678 location_t loc = gimple_location (stmt);
2679 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2680 tree fn;
2682 /* If SRC and DEST are the same (and not volatile), return DEST
2683 (resp. DEST+LEN for __mempcpy_chk). */
2684 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2686 if (fcode != BUILT_IN_MEMPCPY_CHK)
2688 replace_call_with_value (gsi, dest);
2689 return true;
2691 else
2693 gimple_seq stmts = NULL;
2694 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2695 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2696 TREE_TYPE (dest), dest, len);
2697 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2698 replace_call_with_value (gsi, temp);
2699 return true;
2703 if (! tree_fits_uhwi_p (size))
2704 return false;
2706 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2707 if (! integer_all_onesp (size))
2709 if (! tree_fits_uhwi_p (len))
2711 /* If LEN is not constant, try MAXLEN too.
2712 For MAXLEN only allow optimizing into non-_ocs function
2713 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2714 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2716 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2718 /* (void) __mempcpy_chk () can be optimized into
2719 (void) __memcpy_chk (). */
2720 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2721 if (!fn)
2722 return false;
2724 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2725 replace_call_with_call_and_fold (gsi, repl);
2726 return true;
2728 return false;
2731 else
2732 maxlen = len;
2734 if (tree_int_cst_lt (size, maxlen))
2735 return false;
2738 fn = NULL_TREE;
2739 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2740 mem{cpy,pcpy,move,set} is available. */
2741 switch (fcode)
2743 case BUILT_IN_MEMCPY_CHK:
2744 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2745 break;
2746 case BUILT_IN_MEMPCPY_CHK:
2747 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2748 break;
2749 case BUILT_IN_MEMMOVE_CHK:
2750 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2751 break;
2752 case BUILT_IN_MEMSET_CHK:
2753 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2754 break;
2755 default:
2756 break;
2759 if (!fn)
2760 return false;
2762 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2763 replace_call_with_call_and_fold (gsi, repl);
2764 return true;
2767 /* Fold a call to the __st[rp]cpy_chk builtin.
2768 DEST, SRC, and SIZE are the arguments to the call.
2769 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2770 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2771 strings passed as second argument. */
2773 static bool
2774 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2775 tree dest,
2776 tree src, tree size,
2777 enum built_in_function fcode)
2779 gimple *stmt = gsi_stmt (*gsi);
2780 location_t loc = gimple_location (stmt);
2781 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2782 tree len, fn;
2784 /* If SRC and DEST are the same (and not volatile), return DEST. */
2785 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2787 /* Issue -Wrestrict unless the pointers are null (those do
2788 not point to objects and so do not indicate an overlap;
2789 such calls could be the result of sanitization and jump
2790 threading). */
2791 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2793 tree func = gimple_call_fndecl (stmt);
2795 warning_at (loc, OPT_Wrestrict,
2796 "%qD source argument is the same as destination",
2797 func);
2800 replace_call_with_value (gsi, dest);
2801 return true;
2804 if (! tree_fits_uhwi_p (size))
2805 return false;
2807 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
2808 if (! integer_all_onesp (size))
2810 len = c_strlen (src, 1);
2811 if (! len || ! tree_fits_uhwi_p (len))
2813 /* If LEN is not constant, try MAXLEN too.
2814 For MAXLEN only allow optimizing into non-_ocs function
2815 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2816 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2818 if (fcode == BUILT_IN_STPCPY_CHK)
2820 if (! ignore)
2821 return false;
2823 /* If return value of __stpcpy_chk is ignored,
2824 optimize into __strcpy_chk. */
2825 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2826 if (!fn)
2827 return false;
2829 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2830 replace_call_with_call_and_fold (gsi, repl);
2831 return true;
2834 if (! len || TREE_SIDE_EFFECTS (len))
2835 return false;
2837 /* If c_strlen returned something, but not a constant,
2838 transform __strcpy_chk into __memcpy_chk. */
2839 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2840 if (!fn)
2841 return false;
2843 gimple_seq stmts = NULL;
2844 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
2845 len = gimple_convert (&stmts, loc, size_type_node, len);
2846 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2847 build_int_cst (size_type_node, 1));
2848 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2849 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2850 replace_call_with_call_and_fold (gsi, repl);
2851 return true;
2854 else
2855 maxlen = len;
2857 if (! tree_int_cst_lt (maxlen, size))
2858 return false;
2861 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2862 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2863 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2864 if (!fn)
2865 return false;
2867 gimple *repl = gimple_build_call (fn, 2, dest, src);
2868 replace_call_with_call_and_fold (gsi, repl);
2869 return true;
2872 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2873 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2874 length passed as third argument. IGNORE is true if return value can be
2875 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2877 static bool
2878 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2879 tree dest, tree src,
2880 tree len, tree size,
2881 enum built_in_function fcode)
2883 gimple *stmt = gsi_stmt (*gsi);
2884 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2885 tree fn;
2887 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2889 /* If return value of __stpncpy_chk is ignored,
2890 optimize into __strncpy_chk. */
2891 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2892 if (fn)
2894 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2895 replace_call_with_call_and_fold (gsi, repl);
2896 return true;
2900 if (! tree_fits_uhwi_p (size))
2901 return false;
2903 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2904 if (! integer_all_onesp (size))
2906 if (! tree_fits_uhwi_p (len))
2908 /* If LEN is not constant, try MAXLEN too.
2909 For MAXLEN only allow optimizing into non-_ocs function
2910 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2911 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2912 return false;
2914 else
2915 maxlen = len;
2917 if (tree_int_cst_lt (size, maxlen))
2918 return false;
2921 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2922 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2923 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2924 if (!fn)
2925 return false;
2927 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2928 replace_call_with_call_and_fold (gsi, repl);
2929 return true;
2932 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2933 Return NULL_TREE if no simplification can be made. */
2935 static bool
2936 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2938 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2939 location_t loc = gimple_location (stmt);
2940 tree dest = gimple_call_arg (stmt, 0);
2941 tree src = gimple_call_arg (stmt, 1);
2942 tree fn, lenp1;
2944 /* If the result is unused, replace stpcpy with strcpy. */
2945 if (gimple_call_lhs (stmt) == NULL_TREE)
2947 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2948 if (!fn)
2949 return false;
2950 gimple_call_set_fndecl (stmt, fn);
2951 fold_stmt (gsi);
2952 return true;
2955 /* Set to non-null if ARG refers to an unterminated array. */
2956 c_strlen_data data = { };
2957 tree len = c_strlen (src, 1, &data, 1);
2958 if (!len
2959 || TREE_CODE (len) != INTEGER_CST)
2961 data.decl = unterminated_array (src);
2962 if (!data.decl)
2963 return false;
2966 if (data.decl)
2968 /* Avoid folding calls with unterminated arrays. */
2969 if (!gimple_no_warning_p (stmt))
2970 warn_string_no_nul (loc, "stpcpy", src, data.decl);
2971 gimple_set_no_warning (stmt, true);
2972 return false;
2975 if (optimize_function_for_size_p (cfun)
2976 /* If length is zero it's small enough. */
2977 && !integer_zerop (len))
2978 return false;
2980 /* If the source has a known length replace stpcpy with memcpy. */
2981 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2982 if (!fn)
2983 return false;
2985 gimple_seq stmts = NULL;
2986 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2987 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2988 tem, build_int_cst (size_type_node, 1));
2989 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2990 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2991 gimple_set_vuse (repl, gimple_vuse (stmt));
2992 gimple_set_vdef (repl, gimple_vdef (stmt));
2993 if (gimple_vdef (repl)
2994 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2995 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2996 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2997 /* Replace the result with dest + len. */
2998 stmts = NULL;
2999 tem = gimple_convert (&stmts, loc, sizetype, len);
3000 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3001 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3002 POINTER_PLUS_EXPR, dest, tem);
3003 gsi_replace (gsi, ret, false);
3004 /* Finally fold the memcpy call. */
3005 gimple_stmt_iterator gsi2 = *gsi;
3006 gsi_prev (&gsi2);
3007 fold_stmt (&gsi2);
3008 return true;
3011 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3012 NULL_TREE if a normal call should be emitted rather than expanding
3013 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3014 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3015 passed as second argument. */
3017 static bool
3018 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3019 enum built_in_function fcode)
3021 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3022 tree dest, size, len, fn, fmt, flag;
3023 const char *fmt_str;
3025 /* Verify the required arguments in the original call. */
3026 if (gimple_call_num_args (stmt) < 5)
3027 return false;
3029 dest = gimple_call_arg (stmt, 0);
3030 len = gimple_call_arg (stmt, 1);
3031 flag = gimple_call_arg (stmt, 2);
3032 size = gimple_call_arg (stmt, 3);
3033 fmt = gimple_call_arg (stmt, 4);
3035 if (! tree_fits_uhwi_p (size))
3036 return false;
3038 if (! integer_all_onesp (size))
3040 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3041 if (! tree_fits_uhwi_p (len))
3043 /* If LEN is not constant, try MAXLEN too.
3044 For MAXLEN only allow optimizing into non-_ocs function
3045 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3046 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3047 return false;
3049 else
3050 maxlen = len;
3052 if (tree_int_cst_lt (size, maxlen))
3053 return false;
3056 if (!init_target_chars ())
3057 return false;
3059 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3060 or if format doesn't contain % chars or is "%s". */
3061 if (! integer_zerop (flag))
3063 fmt_str = c_getstr (fmt);
3064 if (fmt_str == NULL)
3065 return false;
3066 if (strchr (fmt_str, target_percent) != NULL
3067 && strcmp (fmt_str, target_percent_s))
3068 return false;
3071 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3072 available. */
3073 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3074 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3075 if (!fn)
3076 return false;
3078 /* Replace the called function and the first 5 argument by 3 retaining
3079 trailing varargs. */
3080 gimple_call_set_fndecl (stmt, fn);
3081 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3082 gimple_call_set_arg (stmt, 0, dest);
3083 gimple_call_set_arg (stmt, 1, len);
3084 gimple_call_set_arg (stmt, 2, fmt);
3085 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3086 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3087 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3088 fold_stmt (gsi);
3089 return true;
3092 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3093 Return NULL_TREE if a normal call should be emitted rather than
3094 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3095 or BUILT_IN_VSPRINTF_CHK. */
3097 static bool
3098 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3099 enum built_in_function fcode)
3101 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3102 tree dest, size, len, fn, fmt, flag;
3103 const char *fmt_str;
3104 unsigned nargs = gimple_call_num_args (stmt);
3106 /* Verify the required arguments in the original call. */
3107 if (nargs < 4)
3108 return false;
3109 dest = gimple_call_arg (stmt, 0);
3110 flag = gimple_call_arg (stmt, 1);
3111 size = gimple_call_arg (stmt, 2);
3112 fmt = gimple_call_arg (stmt, 3);
3114 if (! tree_fits_uhwi_p (size))
3115 return false;
3117 len = NULL_TREE;
3119 if (!init_target_chars ())
3120 return false;
3122 /* Check whether the format is a literal string constant. */
3123 fmt_str = c_getstr (fmt);
3124 if (fmt_str != NULL)
3126 /* If the format doesn't contain % args or %%, we know the size. */
3127 if (strchr (fmt_str, target_percent) == 0)
3129 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3130 len = build_int_cstu (size_type_node, strlen (fmt_str));
3132 /* If the format is "%s" and first ... argument is a string literal,
3133 we know the size too. */
3134 else if (fcode == BUILT_IN_SPRINTF_CHK
3135 && strcmp (fmt_str, target_percent_s) == 0)
3137 tree arg;
3139 if (nargs == 5)
3141 arg = gimple_call_arg (stmt, 4);
3142 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3144 len = c_strlen (arg, 1);
3145 if (! len || ! tree_fits_uhwi_p (len))
3146 len = NULL_TREE;
3152 if (! integer_all_onesp (size))
3154 if (! len || ! tree_int_cst_lt (len, size))
3155 return false;
3158 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3159 or if format doesn't contain % chars or is "%s". */
3160 if (! integer_zerop (flag))
3162 if (fmt_str == NULL)
3163 return false;
3164 if (strchr (fmt_str, target_percent) != NULL
3165 && strcmp (fmt_str, target_percent_s))
3166 return false;
3169 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3170 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3171 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3172 if (!fn)
3173 return false;
3175 /* Replace the called function and the first 4 argument by 2 retaining
3176 trailing varargs. */
3177 gimple_call_set_fndecl (stmt, fn);
3178 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3179 gimple_call_set_arg (stmt, 0, dest);
3180 gimple_call_set_arg (stmt, 1, fmt);
3181 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3182 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3183 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3184 fold_stmt (gsi);
3185 return true;
3188 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3189 ORIG may be null if this is a 2-argument call. We don't attempt to
3190 simplify calls with more than 3 arguments.
3192 Return true if simplification was possible, otherwise false. */
3194 bool
3195 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3197 gimple *stmt = gsi_stmt (*gsi);
3198 tree dest = gimple_call_arg (stmt, 0);
3199 tree fmt = gimple_call_arg (stmt, 1);
3200 tree orig = NULL_TREE;
3201 const char *fmt_str = NULL;
3203 /* Verify the required arguments in the original call. We deal with two
3204 types of sprintf() calls: 'sprintf (str, fmt)' and
3205 'sprintf (dest, "%s", orig)'. */
3206 if (gimple_call_num_args (stmt) > 3)
3207 return false;
3209 if (gimple_call_num_args (stmt) == 3)
3210 orig = gimple_call_arg (stmt, 2);
3212 /* Check whether the format is a literal string constant. */
3213 fmt_str = c_getstr (fmt);
3214 if (fmt_str == NULL)
3215 return false;
3217 if (!init_target_chars ())
3218 return false;
3220 /* If the format doesn't contain % args or %%, use strcpy. */
3221 if (strchr (fmt_str, target_percent) == NULL)
3223 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3225 if (!fn)
3226 return false;
3228 /* Don't optimize sprintf (buf, "abc", ptr++). */
3229 if (orig)
3230 return false;
3232 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3233 'format' is known to contain no % formats. */
3234 gimple_seq stmts = NULL;
3235 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3237 /* Propagate the NO_WARNING bit to avoid issuing the same
3238 warning more than once. */
3239 if (gimple_no_warning_p (stmt))
3240 gimple_set_no_warning (repl, true);
3242 gimple_seq_add_stmt_without_update (&stmts, repl);
3243 if (tree lhs = gimple_call_lhs (stmt))
3245 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3246 strlen (fmt_str)));
3247 gimple_seq_add_stmt_without_update (&stmts, repl);
3248 gsi_replace_with_seq_vops (gsi, stmts);
3249 /* gsi now points at the assignment to the lhs, get a
3250 stmt iterator to the memcpy call.
3251 ??? We can't use gsi_for_stmt as that doesn't work when the
3252 CFG isn't built yet. */
3253 gimple_stmt_iterator gsi2 = *gsi;
3254 gsi_prev (&gsi2);
3255 fold_stmt (&gsi2);
3257 else
3259 gsi_replace_with_seq_vops (gsi, stmts);
3260 fold_stmt (gsi);
3262 return true;
3265 /* If the format is "%s", use strcpy if the result isn't used. */
3266 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3268 tree fn;
3269 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3271 if (!fn)
3272 return false;
3274 /* Don't crash on sprintf (str1, "%s"). */
3275 if (!orig)
3276 return false;
3278 tree orig_len = NULL_TREE;
3279 if (gimple_call_lhs (stmt))
3281 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3282 if (!orig_len)
3283 return false;
3286 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3287 gimple_seq stmts = NULL;
3288 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3290 /* Propagate the NO_WARNING bit to avoid issuing the same
3291 warning more than once. */
3292 if (gimple_no_warning_p (stmt))
3293 gimple_set_no_warning (repl, true);
3295 gimple_seq_add_stmt_without_update (&stmts, repl);
3296 if (tree lhs = gimple_call_lhs (stmt))
3298 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3299 TREE_TYPE (orig_len)))
3300 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3301 repl = gimple_build_assign (lhs, orig_len);
3302 gimple_seq_add_stmt_without_update (&stmts, repl);
3303 gsi_replace_with_seq_vops (gsi, stmts);
3304 /* gsi now points at the assignment to the lhs, get a
3305 stmt iterator to the memcpy call.
3306 ??? We can't use gsi_for_stmt as that doesn't work when the
3307 CFG isn't built yet. */
3308 gimple_stmt_iterator gsi2 = *gsi;
3309 gsi_prev (&gsi2);
3310 fold_stmt (&gsi2);
3312 else
3314 gsi_replace_with_seq_vops (gsi, stmts);
3315 fold_stmt (gsi);
3317 return true;
3319 return false;
3322 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3323 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3324 attempt to simplify calls with more than 4 arguments.
3326 Return true if simplification was possible, otherwise false. */
3328 bool
3329 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3331 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3332 tree dest = gimple_call_arg (stmt, 0);
3333 tree destsize = gimple_call_arg (stmt, 1);
3334 tree fmt = gimple_call_arg (stmt, 2);
3335 tree orig = NULL_TREE;
3336 const char *fmt_str = NULL;
3338 if (gimple_call_num_args (stmt) > 4)
3339 return false;
3341 if (gimple_call_num_args (stmt) == 4)
3342 orig = gimple_call_arg (stmt, 3);
3344 if (!tree_fits_uhwi_p (destsize))
3345 return false;
3346 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3348 /* Check whether the format is a literal string constant. */
3349 fmt_str = c_getstr (fmt);
3350 if (fmt_str == NULL)
3351 return false;
3353 if (!init_target_chars ())
3354 return false;
3356 /* If the format doesn't contain % args or %%, use strcpy. */
3357 if (strchr (fmt_str, target_percent) == NULL)
3359 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3360 if (!fn)
3361 return false;
3363 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3364 if (orig)
3365 return false;
3367 /* We could expand this as
3368 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3369 or to
3370 memcpy (str, fmt_with_nul_at_cstm1, cst);
3371 but in the former case that might increase code size
3372 and in the latter case grow .rodata section too much.
3373 So punt for now. */
3374 size_t len = strlen (fmt_str);
3375 if (len >= destlen)
3376 return false;
3378 gimple_seq stmts = NULL;
3379 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3380 gimple_seq_add_stmt_without_update (&stmts, repl);
3381 if (tree lhs = gimple_call_lhs (stmt))
3383 repl = gimple_build_assign (lhs,
3384 build_int_cst (TREE_TYPE (lhs), len));
3385 gimple_seq_add_stmt_without_update (&stmts, repl);
3386 gsi_replace_with_seq_vops (gsi, stmts);
3387 /* gsi now points at the assignment to the lhs, get a
3388 stmt iterator to the memcpy call.
3389 ??? We can't use gsi_for_stmt as that doesn't work when the
3390 CFG isn't built yet. */
3391 gimple_stmt_iterator gsi2 = *gsi;
3392 gsi_prev (&gsi2);
3393 fold_stmt (&gsi2);
3395 else
3397 gsi_replace_with_seq_vops (gsi, stmts);
3398 fold_stmt (gsi);
3400 return true;
3403 /* If the format is "%s", use strcpy if the result isn't used. */
3404 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3406 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3407 if (!fn)
3408 return false;
3410 /* Don't crash on snprintf (str1, cst, "%s"). */
3411 if (!orig)
3412 return false;
3414 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3415 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3416 return false;
3418 /* We could expand this as
3419 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3420 or to
3421 memcpy (str1, str2_with_nul_at_cstm1, cst);
3422 but in the former case that might increase code size
3423 and in the latter case grow .rodata section too much.
3424 So punt for now. */
3425 if (compare_tree_int (orig_len, destlen) >= 0)
3426 return false;
3428 /* Convert snprintf (str1, cst, "%s", str2) into
3429 strcpy (str1, str2) if strlen (str2) < cst. */
3430 gimple_seq stmts = NULL;
3431 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3432 gimple_seq_add_stmt_without_update (&stmts, repl);
3433 if (tree lhs = gimple_call_lhs (stmt))
3435 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3436 TREE_TYPE (orig_len)))
3437 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3438 repl = gimple_build_assign (lhs, orig_len);
3439 gimple_seq_add_stmt_without_update (&stmts, repl);
3440 gsi_replace_with_seq_vops (gsi, stmts);
3441 /* gsi now points at the assignment to the lhs, get a
3442 stmt iterator to the memcpy call.
3443 ??? We can't use gsi_for_stmt as that doesn't work when the
3444 CFG isn't built yet. */
3445 gimple_stmt_iterator gsi2 = *gsi;
3446 gsi_prev (&gsi2);
3447 fold_stmt (&gsi2);
3449 else
3451 gsi_replace_with_seq_vops (gsi, stmts);
3452 fold_stmt (gsi);
3454 return true;
3456 return false;
3459 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3460 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3461 more than 3 arguments, and ARG may be null in the 2-argument case.
3463 Return NULL_TREE if no simplification was possible, otherwise return the
3464 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3465 code of the function to be simplified. */
3467 static bool
3468 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3469 tree fp, tree fmt, tree arg,
3470 enum built_in_function fcode)
3472 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3473 tree fn_fputc, fn_fputs;
3474 const char *fmt_str = NULL;
3476 /* If the return value is used, don't do the transformation. */
3477 if (gimple_call_lhs (stmt) != NULL_TREE)
3478 return false;
3480 /* Check whether the format is a literal string constant. */
3481 fmt_str = c_getstr (fmt);
3482 if (fmt_str == NULL)
3483 return false;
3485 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3487 /* If we're using an unlocked function, assume the other
3488 unlocked functions exist explicitly. */
3489 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3490 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3492 else
3494 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3495 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3498 if (!init_target_chars ())
3499 return false;
3501 /* If the format doesn't contain % args or %%, use strcpy. */
3502 if (strchr (fmt_str, target_percent) == NULL)
3504 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3505 && arg)
3506 return false;
3508 /* If the format specifier was "", fprintf does nothing. */
3509 if (fmt_str[0] == '\0')
3511 replace_call_with_value (gsi, NULL_TREE);
3512 return true;
3515 /* When "string" doesn't contain %, replace all cases of
3516 fprintf (fp, string) with fputs (string, fp). The fputs
3517 builtin will take care of special cases like length == 1. */
3518 if (fn_fputs)
3520 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3521 replace_call_with_call_and_fold (gsi, repl);
3522 return true;
3526 /* The other optimizations can be done only on the non-va_list variants. */
3527 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3528 return false;
3530 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3531 else if (strcmp (fmt_str, target_percent_s) == 0)
3533 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3534 return false;
3535 if (fn_fputs)
3537 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3538 replace_call_with_call_and_fold (gsi, repl);
3539 return true;
3543 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3544 else if (strcmp (fmt_str, target_percent_c) == 0)
3546 if (!arg
3547 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3548 return false;
3549 if (fn_fputc)
3551 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3552 replace_call_with_call_and_fold (gsi, repl);
3553 return true;
3557 return false;
3560 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3561 FMT and ARG are the arguments to the call; we don't fold cases with
3562 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3564 Return NULL_TREE if no simplification was possible, otherwise return the
3565 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3566 code of the function to be simplified. */
3568 static bool
3569 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3570 tree arg, enum built_in_function fcode)
3572 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3573 tree fn_putchar, fn_puts, newarg;
3574 const char *fmt_str = NULL;
3576 /* If the return value is used, don't do the transformation. */
3577 if (gimple_call_lhs (stmt) != NULL_TREE)
3578 return false;
3580 /* Check whether the format is a literal string constant. */
3581 fmt_str = c_getstr (fmt);
3582 if (fmt_str == NULL)
3583 return false;
3585 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3587 /* If we're using an unlocked function, assume the other
3588 unlocked functions exist explicitly. */
3589 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3590 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3592 else
3594 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3595 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3598 if (!init_target_chars ())
3599 return false;
3601 if (strcmp (fmt_str, target_percent_s) == 0
3602 || strchr (fmt_str, target_percent) == NULL)
3604 const char *str;
3606 if (strcmp (fmt_str, target_percent_s) == 0)
3608 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3609 return false;
3611 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3612 return false;
3614 str = c_getstr (arg);
3615 if (str == NULL)
3616 return false;
3618 else
3620 /* The format specifier doesn't contain any '%' characters. */
3621 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3622 && arg)
3623 return false;
3624 str = fmt_str;
3627 /* If the string was "", printf does nothing. */
3628 if (str[0] == '\0')
3630 replace_call_with_value (gsi, NULL_TREE);
3631 return true;
3634 /* If the string has length of 1, call putchar. */
3635 if (str[1] == '\0')
3637 /* Given printf("c"), (where c is any one character,)
3638 convert "c"[0] to an int and pass that to the replacement
3639 function. */
3640 newarg = build_int_cst (integer_type_node, str[0]);
3641 if (fn_putchar)
3643 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3644 replace_call_with_call_and_fold (gsi, repl);
3645 return true;
3648 else
3650 /* If the string was "string\n", call puts("string"). */
3651 size_t len = strlen (str);
3652 if ((unsigned char)str[len - 1] == target_newline
3653 && (size_t) (int) len == len
3654 && (int) len > 0)
3656 char *newstr;
3658 /* Create a NUL-terminated string that's one char shorter
3659 than the original, stripping off the trailing '\n'. */
3660 newstr = xstrdup (str);
3661 newstr[len - 1] = '\0';
3662 newarg = build_string_literal (len, newstr);
3663 free (newstr);
3664 if (fn_puts)
3666 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3667 replace_call_with_call_and_fold (gsi, repl);
3668 return true;
3671 else
3672 /* We'd like to arrange to call fputs(string,stdout) here,
3673 but we need stdout and don't have a way to get it yet. */
3674 return false;
3678 /* The other optimizations can be done only on the non-va_list variants. */
3679 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3680 return false;
3682 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3683 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3685 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3686 return false;
3687 if (fn_puts)
3689 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3690 replace_call_with_call_and_fold (gsi, repl);
3691 return true;
3695 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3696 else if (strcmp (fmt_str, target_percent_c) == 0)
3698 if (!arg || ! useless_type_conversion_p (integer_type_node,
3699 TREE_TYPE (arg)))
3700 return false;
3701 if (fn_putchar)
3703 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3704 replace_call_with_call_and_fold (gsi, repl);
3705 return true;
3709 return false;
3714 /* Fold a call to __builtin_strlen with known length LEN. */
3716 static bool
3717 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3719 gimple *stmt = gsi_stmt (*gsi);
3720 tree arg = gimple_call_arg (stmt, 0);
3722 wide_int minlen;
3723 wide_int maxlen;
3725 c_strlen_data lendata = { };
3726 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
3727 && !lendata.decl
3728 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3729 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
3731 /* The range of lengths refers to either a single constant
3732 string or to the longest and shortest constant string
3733 referenced by the argument of the strlen() call, or to
3734 the strings that can possibly be stored in the arrays
3735 the argument refers to. */
3736 minlen = wi::to_wide (lendata.minlen);
3737 maxlen = wi::to_wide (lendata.maxlen);
3739 else
3741 unsigned prec = TYPE_PRECISION (sizetype);
3743 minlen = wi::shwi (0, prec);
3744 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3747 if (minlen == maxlen)
3749 /* Fold the strlen call to a constant. */
3750 tree type = TREE_TYPE (lendata.minlen);
3751 tree len = force_gimple_operand_gsi (gsi,
3752 wide_int_to_tree (type, minlen),
3753 true, NULL, true, GSI_SAME_STMT);
3754 replace_call_with_value (gsi, len);
3755 return true;
3758 /* Set the strlen() range to [0, MAXLEN]. */
3759 if (tree lhs = gimple_call_lhs (stmt))
3760 set_strlen_range (lhs, maxlen);
3762 return false;
3765 /* Fold a call to __builtin_acc_on_device. */
3767 static bool
3768 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3770 /* Defer folding until we know which compiler we're in. */
3771 if (symtab->state != EXPANSION)
3772 return false;
3774 unsigned val_host = GOMP_DEVICE_HOST;
3775 unsigned val_dev = GOMP_DEVICE_NONE;
3777 #ifdef ACCEL_COMPILER
3778 val_host = GOMP_DEVICE_NOT_HOST;
3779 val_dev = ACCEL_COMPILER_acc_device;
3780 #endif
3782 location_t loc = gimple_location (gsi_stmt (*gsi));
3784 tree host_eq = make_ssa_name (boolean_type_node);
3785 gimple *host_ass = gimple_build_assign
3786 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3787 gimple_set_location (host_ass, loc);
3788 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3790 tree dev_eq = make_ssa_name (boolean_type_node);
3791 gimple *dev_ass = gimple_build_assign
3792 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3793 gimple_set_location (dev_ass, loc);
3794 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3796 tree result = make_ssa_name (boolean_type_node);
3797 gimple *result_ass = gimple_build_assign
3798 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3799 gimple_set_location (result_ass, loc);
3800 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3802 replace_call_with_value (gsi, result);
3804 return true;
3807 /* Fold realloc (0, n) -> malloc (n). */
3809 static bool
3810 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3812 gimple *stmt = gsi_stmt (*gsi);
3813 tree arg = gimple_call_arg (stmt, 0);
3814 tree size = gimple_call_arg (stmt, 1);
3816 if (operand_equal_p (arg, null_pointer_node, 0))
3818 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3819 if (fn_malloc)
3821 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3822 replace_call_with_call_and_fold (gsi, repl);
3823 return true;
3826 return false;
3829 /* Fold the non-target builtin at *GSI and return whether any simplification
3830 was made. */
3832 static bool
3833 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3835 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3836 tree callee = gimple_call_fndecl (stmt);
3838 /* Give up for always_inline inline builtins until they are
3839 inlined. */
3840 if (avoid_folding_inline_builtin (callee))
3841 return false;
3843 unsigned n = gimple_call_num_args (stmt);
3844 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3845 switch (fcode)
3847 case BUILT_IN_BCMP:
3848 return gimple_fold_builtin_bcmp (gsi);
3849 case BUILT_IN_BCOPY:
3850 return gimple_fold_builtin_bcopy (gsi);
3851 case BUILT_IN_BZERO:
3852 return gimple_fold_builtin_bzero (gsi);
3854 case BUILT_IN_MEMSET:
3855 return gimple_fold_builtin_memset (gsi,
3856 gimple_call_arg (stmt, 1),
3857 gimple_call_arg (stmt, 2));
3858 case BUILT_IN_MEMCPY:
3859 case BUILT_IN_MEMPCPY:
3860 case BUILT_IN_MEMMOVE:
3861 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3862 gimple_call_arg (stmt, 1), fcode);
3863 case BUILT_IN_SPRINTF_CHK:
3864 case BUILT_IN_VSPRINTF_CHK:
3865 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3866 case BUILT_IN_STRCAT_CHK:
3867 return gimple_fold_builtin_strcat_chk (gsi);
3868 case BUILT_IN_STRNCAT_CHK:
3869 return gimple_fold_builtin_strncat_chk (gsi);
3870 case BUILT_IN_STRLEN:
3871 return gimple_fold_builtin_strlen (gsi);
3872 case BUILT_IN_STRCPY:
3873 return gimple_fold_builtin_strcpy (gsi,
3874 gimple_call_arg (stmt, 0),
3875 gimple_call_arg (stmt, 1));
3876 case BUILT_IN_STRNCPY:
3877 return gimple_fold_builtin_strncpy (gsi,
3878 gimple_call_arg (stmt, 0),
3879 gimple_call_arg (stmt, 1),
3880 gimple_call_arg (stmt, 2));
3881 case BUILT_IN_STRCAT:
3882 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3883 gimple_call_arg (stmt, 1));
3884 case BUILT_IN_STRNCAT:
3885 return gimple_fold_builtin_strncat (gsi);
3886 case BUILT_IN_INDEX:
3887 case BUILT_IN_STRCHR:
3888 return gimple_fold_builtin_strchr (gsi, false);
3889 case BUILT_IN_RINDEX:
3890 case BUILT_IN_STRRCHR:
3891 return gimple_fold_builtin_strchr (gsi, true);
3892 case BUILT_IN_STRSTR:
3893 return gimple_fold_builtin_strstr (gsi);
3894 case BUILT_IN_STRCMP:
3895 case BUILT_IN_STRCMP_EQ:
3896 case BUILT_IN_STRCASECMP:
3897 case BUILT_IN_STRNCMP:
3898 case BUILT_IN_STRNCMP_EQ:
3899 case BUILT_IN_STRNCASECMP:
3900 return gimple_fold_builtin_string_compare (gsi);
3901 case BUILT_IN_MEMCHR:
3902 return gimple_fold_builtin_memchr (gsi);
3903 case BUILT_IN_FPUTS:
3904 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3905 gimple_call_arg (stmt, 1), false);
3906 case BUILT_IN_FPUTS_UNLOCKED:
3907 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3908 gimple_call_arg (stmt, 1), true);
3909 case BUILT_IN_MEMCPY_CHK:
3910 case BUILT_IN_MEMPCPY_CHK:
3911 case BUILT_IN_MEMMOVE_CHK:
3912 case BUILT_IN_MEMSET_CHK:
3913 return gimple_fold_builtin_memory_chk (gsi,
3914 gimple_call_arg (stmt, 0),
3915 gimple_call_arg (stmt, 1),
3916 gimple_call_arg (stmt, 2),
3917 gimple_call_arg (stmt, 3),
3918 fcode);
3919 case BUILT_IN_STPCPY:
3920 return gimple_fold_builtin_stpcpy (gsi);
3921 case BUILT_IN_STRCPY_CHK:
3922 case BUILT_IN_STPCPY_CHK:
3923 return gimple_fold_builtin_stxcpy_chk (gsi,
3924 gimple_call_arg (stmt, 0),
3925 gimple_call_arg (stmt, 1),
3926 gimple_call_arg (stmt, 2),
3927 fcode);
3928 case BUILT_IN_STRNCPY_CHK:
3929 case BUILT_IN_STPNCPY_CHK:
3930 return gimple_fold_builtin_stxncpy_chk (gsi,
3931 gimple_call_arg (stmt, 0),
3932 gimple_call_arg (stmt, 1),
3933 gimple_call_arg (stmt, 2),
3934 gimple_call_arg (stmt, 3),
3935 fcode);
3936 case BUILT_IN_SNPRINTF_CHK:
3937 case BUILT_IN_VSNPRINTF_CHK:
3938 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3940 case BUILT_IN_FPRINTF:
3941 case BUILT_IN_FPRINTF_UNLOCKED:
3942 case BUILT_IN_VFPRINTF:
3943 if (n == 2 || n == 3)
3944 return gimple_fold_builtin_fprintf (gsi,
3945 gimple_call_arg (stmt, 0),
3946 gimple_call_arg (stmt, 1),
3947 n == 3
3948 ? gimple_call_arg (stmt, 2)
3949 : NULL_TREE,
3950 fcode);
3951 break;
3952 case BUILT_IN_FPRINTF_CHK:
3953 case BUILT_IN_VFPRINTF_CHK:
3954 if (n == 3 || n == 4)
3955 return gimple_fold_builtin_fprintf (gsi,
3956 gimple_call_arg (stmt, 0),
3957 gimple_call_arg (stmt, 2),
3958 n == 4
3959 ? gimple_call_arg (stmt, 3)
3960 : NULL_TREE,
3961 fcode);
3962 break;
3963 case BUILT_IN_PRINTF:
3964 case BUILT_IN_PRINTF_UNLOCKED:
3965 case BUILT_IN_VPRINTF:
3966 if (n == 1 || n == 2)
3967 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3968 n == 2
3969 ? gimple_call_arg (stmt, 1)
3970 : NULL_TREE, fcode);
3971 break;
3972 case BUILT_IN_PRINTF_CHK:
3973 case BUILT_IN_VPRINTF_CHK:
3974 if (n == 2 || n == 3)
3975 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3976 n == 3
3977 ? gimple_call_arg (stmt, 2)
3978 : NULL_TREE, fcode);
3979 break;
3980 case BUILT_IN_ACC_ON_DEVICE:
3981 return gimple_fold_builtin_acc_on_device (gsi,
3982 gimple_call_arg (stmt, 0));
3983 case BUILT_IN_REALLOC:
3984 return gimple_fold_builtin_realloc (gsi);
3986 default:;
3989 /* Try the generic builtin folder. */
3990 bool ignore = (gimple_call_lhs (stmt) == NULL);
3991 tree result = fold_call_stmt (stmt, ignore);
3992 if (result)
3994 if (ignore)
3995 STRIP_NOPS (result);
3996 else
3997 result = fold_convert (gimple_call_return_type (stmt), result);
3998 if (!update_call_from_tree (gsi, result))
3999 gimplify_and_update_call_from_tree (gsi, result);
4000 return true;
4003 return false;
4006 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
4007 function calls to constants, where possible. */
4009 static tree
4010 fold_internal_goacc_dim (const gimple *call)
4012 int axis = oacc_get_ifn_dim_arg (call);
4013 int size = oacc_get_fn_dim_size (current_function_decl, axis);
4014 tree result = NULL_TREE;
4015 tree type = TREE_TYPE (gimple_call_lhs (call));
4017 switch (gimple_call_internal_fn (call))
4019 case IFN_GOACC_DIM_POS:
4020 /* If the size is 1, we know the answer. */
4021 if (size == 1)
4022 result = build_int_cst (type, 0);
4023 break;
4024 case IFN_GOACC_DIM_SIZE:
4025 /* If the size is not dynamic, we know the answer. */
4026 if (size)
4027 result = build_int_cst (type, size);
4028 break;
4029 default:
4030 break;
4033 return result;
4036 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4037 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4038 &var where var is only addressable because of such calls. */
4040 bool
4041 optimize_atomic_compare_exchange_p (gimple *stmt)
4043 if (gimple_call_num_args (stmt) != 6
4044 || !flag_inline_atomics
4045 || !optimize
4046 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
4047 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4048 || !gimple_vdef (stmt)
4049 || !gimple_vuse (stmt))
4050 return false;
4052 tree fndecl = gimple_call_fndecl (stmt);
4053 switch (DECL_FUNCTION_CODE (fndecl))
4055 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
4056 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
4057 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
4058 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
4059 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
4060 break;
4061 default:
4062 return false;
4065 tree expected = gimple_call_arg (stmt, 1);
4066 if (TREE_CODE (expected) != ADDR_EXPR
4067 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
4068 return false;
4070 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
4071 if (!is_gimple_reg_type (etype)
4072 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
4073 || TREE_THIS_VOLATILE (etype)
4074 || VECTOR_TYPE_P (etype)
4075 || TREE_CODE (etype) == COMPLEX_TYPE
4076 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4077 might not preserve all the bits. See PR71716. */
4078 || SCALAR_FLOAT_TYPE_P (etype)
4079 || maybe_ne (TYPE_PRECISION (etype),
4080 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4081 return false;
4083 tree weak = gimple_call_arg (stmt, 3);
4084 if (!integer_zerop (weak) && !integer_onep (weak))
4085 return false;
4087 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4088 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4089 machine_mode mode = TYPE_MODE (itype);
4091 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
4092 == CODE_FOR_nothing
4093 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
4094 return false;
4096 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
4097 return false;
4099 return true;
4102 /* Fold
4103 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4104 into
4105 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4106 i = IMAGPART_EXPR <t>;
4107 r = (_Bool) i;
4108 e = REALPART_EXPR <t>; */
4110 void
4111 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
4113 gimple *stmt = gsi_stmt (*gsi);
4114 tree fndecl = gimple_call_fndecl (stmt);
4115 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4116 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4117 tree ctype = build_complex_type (itype);
4118 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
4119 bool throws = false;
4120 edge e = NULL;
4121 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4122 expected);
4123 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4124 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4125 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4127 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4128 build1 (VIEW_CONVERT_EXPR, itype,
4129 gimple_assign_lhs (g)));
4130 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4132 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4133 + int_size_in_bytes (itype);
4134 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4135 gimple_call_arg (stmt, 0),
4136 gimple_assign_lhs (g),
4137 gimple_call_arg (stmt, 2),
4138 build_int_cst (integer_type_node, flag),
4139 gimple_call_arg (stmt, 4),
4140 gimple_call_arg (stmt, 5));
4141 tree lhs = make_ssa_name (ctype);
4142 gimple_call_set_lhs (g, lhs);
4143 gimple_set_vdef (g, gimple_vdef (stmt));
4144 gimple_set_vuse (g, gimple_vuse (stmt));
4145 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
4146 tree oldlhs = gimple_call_lhs (stmt);
4147 if (stmt_can_throw_internal (cfun, stmt))
4149 throws = true;
4150 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4152 gimple_call_set_nothrow (as_a <gcall *> (g),
4153 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4154 gimple_call_set_lhs (stmt, NULL_TREE);
4155 gsi_replace (gsi, g, true);
4156 if (oldlhs)
4158 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4159 build1 (IMAGPART_EXPR, itype, lhs));
4160 if (throws)
4162 gsi_insert_on_edge_immediate (e, g);
4163 *gsi = gsi_for_stmt (g);
4165 else
4166 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4167 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4168 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4170 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4171 build1 (REALPART_EXPR, itype, lhs));
4172 if (throws && oldlhs == NULL_TREE)
4174 gsi_insert_on_edge_immediate (e, g);
4175 *gsi = gsi_for_stmt (g);
4177 else
4178 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4179 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4181 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4182 VIEW_CONVERT_EXPR,
4183 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4184 gimple_assign_lhs (g)));
4185 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4187 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4188 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4189 *gsi = gsiret;
4192 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4193 doesn't fit into TYPE. The test for overflow should be regardless of
4194 -fwrapv, and even for unsigned types. */
4196 bool
4197 arith_overflowed_p (enum tree_code code, const_tree type,
4198 const_tree arg0, const_tree arg1)
4200 widest2_int warg0 = widest2_int_cst (arg0);
4201 widest2_int warg1 = widest2_int_cst (arg1);
4202 widest2_int wres;
4203 switch (code)
4205 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4206 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4207 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4208 default: gcc_unreachable ();
4210 signop sign = TYPE_SIGN (type);
4211 if (sign == UNSIGNED && wi::neg_p (wres))
4212 return true;
4213 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4216 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4217 The statement may be replaced by another statement, e.g., if the call
4218 simplifies to a constant value. Return true if any changes were made.
4219 It is assumed that the operands have been previously folded. */
4221 static bool
4222 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4224 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4225 tree callee;
4226 bool changed = false;
4227 unsigned i;
4229 /* Fold *& in call arguments. */
4230 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4231 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4233 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4234 if (tmp)
4236 gimple_call_set_arg (stmt, i, tmp);
4237 changed = true;
4241 /* Check for virtual calls that became direct calls. */
4242 callee = gimple_call_fn (stmt);
4243 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4245 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4247 if (dump_file && virtual_method_call_p (callee)
4248 && !possible_polymorphic_call_target_p
4249 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4250 (OBJ_TYPE_REF_EXPR (callee)))))
4252 fprintf (dump_file,
4253 "Type inheritance inconsistent devirtualization of ");
4254 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4255 fprintf (dump_file, " to ");
4256 print_generic_expr (dump_file, callee, TDF_SLIM);
4257 fprintf (dump_file, "\n");
4260 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4261 changed = true;
4263 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4265 bool final;
4266 vec <cgraph_node *>targets
4267 = possible_polymorphic_call_targets (callee, stmt, &final);
4268 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4270 tree lhs = gimple_call_lhs (stmt);
4271 if (dump_enabled_p ())
4273 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4274 "folding virtual function call to %s\n",
4275 targets.length () == 1
4276 ? targets[0]->name ()
4277 : "__builtin_unreachable");
4279 if (targets.length () == 1)
4281 tree fndecl = targets[0]->decl;
4282 gimple_call_set_fndecl (stmt, fndecl);
4283 changed = true;
4284 /* If changing the call to __cxa_pure_virtual
4285 or similar noreturn function, adjust gimple_call_fntype
4286 too. */
4287 if (gimple_call_noreturn_p (stmt)
4288 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4289 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4290 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4291 == void_type_node))
4292 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4293 /* If the call becomes noreturn, remove the lhs. */
4294 if (lhs
4295 && gimple_call_noreturn_p (stmt)
4296 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4297 || should_remove_lhs_p (lhs)))
4299 if (TREE_CODE (lhs) == SSA_NAME)
4301 tree var = create_tmp_var (TREE_TYPE (lhs));
4302 tree def = get_or_create_ssa_default_def (cfun, var);
4303 gimple *new_stmt = gimple_build_assign (lhs, def);
4304 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4306 gimple_call_set_lhs (stmt, NULL_TREE);
4308 maybe_remove_unused_call_args (cfun, stmt);
4310 else
4312 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4313 gimple *new_stmt = gimple_build_call (fndecl, 0);
4314 gimple_set_location (new_stmt, gimple_location (stmt));
4315 /* If the call had a SSA name as lhs morph that into
4316 an uninitialized value. */
4317 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4319 tree var = create_tmp_var (TREE_TYPE (lhs));
4320 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4321 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4322 set_ssa_default_def (cfun, var, lhs);
4324 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4325 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4326 gsi_replace (gsi, new_stmt, false);
4327 return true;
4333 /* Check for indirect calls that became direct calls, and then
4334 no longer require a static chain. */
4335 if (gimple_call_chain (stmt))
4337 tree fn = gimple_call_fndecl (stmt);
4338 if (fn && !DECL_STATIC_CHAIN (fn))
4340 gimple_call_set_chain (stmt, NULL);
4341 changed = true;
4343 else
4345 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4346 if (tmp)
4348 gimple_call_set_chain (stmt, tmp);
4349 changed = true;
4354 if (inplace)
4355 return changed;
4357 /* Check for builtins that CCP can handle using information not
4358 available in the generic fold routines. */
4359 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4361 if (gimple_fold_builtin (gsi))
4362 changed = true;
4364 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4366 changed |= targetm.gimple_fold_builtin (gsi);
4368 else if (gimple_call_internal_p (stmt))
4370 enum tree_code subcode = ERROR_MARK;
4371 tree result = NULL_TREE;
4372 bool cplx_result = false;
4373 tree overflow = NULL_TREE;
4374 switch (gimple_call_internal_fn (stmt))
4376 case IFN_BUILTIN_EXPECT:
4377 result = fold_builtin_expect (gimple_location (stmt),
4378 gimple_call_arg (stmt, 0),
4379 gimple_call_arg (stmt, 1),
4380 gimple_call_arg (stmt, 2),
4381 NULL_TREE);
4382 break;
4383 case IFN_UBSAN_OBJECT_SIZE:
4385 tree offset = gimple_call_arg (stmt, 1);
4386 tree objsize = gimple_call_arg (stmt, 2);
4387 if (integer_all_onesp (objsize)
4388 || (TREE_CODE (offset) == INTEGER_CST
4389 && TREE_CODE (objsize) == INTEGER_CST
4390 && tree_int_cst_le (offset, objsize)))
4392 replace_call_with_value (gsi, NULL_TREE);
4393 return true;
4396 break;
4397 case IFN_UBSAN_PTR:
4398 if (integer_zerop (gimple_call_arg (stmt, 1)))
4400 replace_call_with_value (gsi, NULL_TREE);
4401 return true;
4403 break;
4404 case IFN_UBSAN_BOUNDS:
4406 tree index = gimple_call_arg (stmt, 1);
4407 tree bound = gimple_call_arg (stmt, 2);
4408 if (TREE_CODE (index) == INTEGER_CST
4409 && TREE_CODE (bound) == INTEGER_CST)
4411 index = fold_convert (TREE_TYPE (bound), index);
4412 if (TREE_CODE (index) == INTEGER_CST
4413 && tree_int_cst_le (index, bound))
4415 replace_call_with_value (gsi, NULL_TREE);
4416 return true;
4420 break;
4421 case IFN_GOACC_DIM_SIZE:
4422 case IFN_GOACC_DIM_POS:
4423 result = fold_internal_goacc_dim (stmt);
4424 break;
4425 case IFN_UBSAN_CHECK_ADD:
4426 subcode = PLUS_EXPR;
4427 break;
4428 case IFN_UBSAN_CHECK_SUB:
4429 subcode = MINUS_EXPR;
4430 break;
4431 case IFN_UBSAN_CHECK_MUL:
4432 subcode = MULT_EXPR;
4433 break;
4434 case IFN_ADD_OVERFLOW:
4435 subcode = PLUS_EXPR;
4436 cplx_result = true;
4437 break;
4438 case IFN_SUB_OVERFLOW:
4439 subcode = MINUS_EXPR;
4440 cplx_result = true;
4441 break;
4442 case IFN_MUL_OVERFLOW:
4443 subcode = MULT_EXPR;
4444 cplx_result = true;
4445 break;
4446 default:
4447 break;
4449 if (subcode != ERROR_MARK)
4451 tree arg0 = gimple_call_arg (stmt, 0);
4452 tree arg1 = gimple_call_arg (stmt, 1);
4453 tree type = TREE_TYPE (arg0);
4454 if (cplx_result)
4456 tree lhs = gimple_call_lhs (stmt);
4457 if (lhs == NULL_TREE)
4458 type = NULL_TREE;
4459 else
4460 type = TREE_TYPE (TREE_TYPE (lhs));
4462 if (type == NULL_TREE)
4464 /* x = y + 0; x = y - 0; x = y * 0; */
4465 else if (integer_zerop (arg1))
4466 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4467 /* x = 0 + y; x = 0 * y; */
4468 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4469 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4470 /* x = y - y; */
4471 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4472 result = integer_zero_node;
4473 /* x = y * 1; x = 1 * y; */
4474 else if (subcode == MULT_EXPR && integer_onep (arg1))
4475 result = arg0;
4476 else if (subcode == MULT_EXPR && integer_onep (arg0))
4477 result = arg1;
4478 else if (TREE_CODE (arg0) == INTEGER_CST
4479 && TREE_CODE (arg1) == INTEGER_CST)
4481 if (cplx_result)
4482 result = int_const_binop (subcode, fold_convert (type, arg0),
4483 fold_convert (type, arg1));
4484 else
4485 result = int_const_binop (subcode, arg0, arg1);
4486 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4488 if (cplx_result)
4489 overflow = build_one_cst (type);
4490 else
4491 result = NULL_TREE;
4494 if (result)
4496 if (result == integer_zero_node)
4497 result = build_zero_cst (type);
4498 else if (cplx_result && TREE_TYPE (result) != type)
4500 if (TREE_CODE (result) == INTEGER_CST)
4502 if (arith_overflowed_p (PLUS_EXPR, type, result,
4503 integer_zero_node))
4504 overflow = build_one_cst (type);
4506 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4507 && TYPE_UNSIGNED (type))
4508 || (TYPE_PRECISION (type)
4509 < (TYPE_PRECISION (TREE_TYPE (result))
4510 + (TYPE_UNSIGNED (TREE_TYPE (result))
4511 && !TYPE_UNSIGNED (type)))))
4512 result = NULL_TREE;
4513 if (result)
4514 result = fold_convert (type, result);
4519 if (result)
4521 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4522 result = drop_tree_overflow (result);
4523 if (cplx_result)
4525 if (overflow == NULL_TREE)
4526 overflow = build_zero_cst (TREE_TYPE (result));
4527 tree ctype = build_complex_type (TREE_TYPE (result));
4528 if (TREE_CODE (result) == INTEGER_CST
4529 && TREE_CODE (overflow) == INTEGER_CST)
4530 result = build_complex (ctype, result, overflow);
4531 else
4532 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4533 ctype, result, overflow);
4535 if (!update_call_from_tree (gsi, result))
4536 gimplify_and_update_call_from_tree (gsi, result);
4537 changed = true;
4541 return changed;
4545 /* Return true whether NAME has a use on STMT. */
4547 static bool
4548 has_use_on_stmt (tree name, gimple *stmt)
4550 imm_use_iterator iter;
4551 use_operand_p use_p;
4552 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4553 if (USE_STMT (use_p) == stmt)
4554 return true;
4555 return false;
4558 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4559 gimple_simplify.
4561 Replaces *GSI with the simplification result in RCODE and OPS
4562 and the associated statements in *SEQ. Does the replacement
4563 according to INPLACE and returns true if the operation succeeded. */
4565 static bool
4566 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4567 gimple_match_op *res_op,
4568 gimple_seq *seq, bool inplace)
4570 gimple *stmt = gsi_stmt (*gsi);
4571 tree *ops = res_op->ops;
4572 unsigned int num_ops = res_op->num_ops;
4574 /* Play safe and do not allow abnormals to be mentioned in
4575 newly created statements. See also maybe_push_res_to_seq.
4576 As an exception allow such uses if there was a use of the
4577 same SSA name on the old stmt. */
4578 for (unsigned int i = 0; i < num_ops; ++i)
4579 if (TREE_CODE (ops[i]) == SSA_NAME
4580 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4581 && !has_use_on_stmt (ops[i], stmt))
4582 return false;
4584 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4585 for (unsigned int i = 0; i < 2; ++i)
4586 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4587 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4588 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4589 return false;
4591 /* Don't insert new statements when INPLACE is true, even if we could
4592 reuse STMT for the final statement. */
4593 if (inplace && !gimple_seq_empty_p (*seq))
4594 return false;
4596 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4598 gcc_assert (res_op->code.is_tree_code ());
4599 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4600 /* GIMPLE_CONDs condition may not throw. */
4601 && (!flag_exceptions
4602 || !cfun->can_throw_non_call_exceptions
4603 || !operation_could_trap_p (res_op->code,
4604 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4605 false, NULL_TREE)))
4606 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4607 else if (res_op->code == SSA_NAME)
4608 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4609 build_zero_cst (TREE_TYPE (ops[0])));
4610 else if (res_op->code == INTEGER_CST)
4612 if (integer_zerop (ops[0]))
4613 gimple_cond_make_false (cond_stmt);
4614 else
4615 gimple_cond_make_true (cond_stmt);
4617 else if (!inplace)
4619 tree res = maybe_push_res_to_seq (res_op, seq);
4620 if (!res)
4621 return false;
4622 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4623 build_zero_cst (TREE_TYPE (res)));
4625 else
4626 return false;
4627 if (dump_file && (dump_flags & TDF_DETAILS))
4629 fprintf (dump_file, "gimple_simplified to ");
4630 if (!gimple_seq_empty_p (*seq))
4631 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4632 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4633 0, TDF_SLIM);
4635 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4636 return true;
4638 else if (is_gimple_assign (stmt)
4639 && res_op->code.is_tree_code ())
4641 if (!inplace
4642 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4644 maybe_build_generic_op (res_op);
4645 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4646 res_op->op_or_null (0),
4647 res_op->op_or_null (1),
4648 res_op->op_or_null (2));
4649 if (dump_file && (dump_flags & TDF_DETAILS))
4651 fprintf (dump_file, "gimple_simplified to ");
4652 if (!gimple_seq_empty_p (*seq))
4653 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4654 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4655 0, TDF_SLIM);
4657 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4658 return true;
4661 else if (res_op->code.is_fn_code ()
4662 && gimple_call_combined_fn (stmt) == res_op->code)
4664 gcc_assert (num_ops == gimple_call_num_args (stmt));
4665 for (unsigned int i = 0; i < num_ops; ++i)
4666 gimple_call_set_arg (stmt, i, ops[i]);
4667 if (dump_file && (dump_flags & TDF_DETAILS))
4669 fprintf (dump_file, "gimple_simplified to ");
4670 if (!gimple_seq_empty_p (*seq))
4671 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4672 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4674 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4675 return true;
4677 else if (!inplace)
4679 if (gimple_has_lhs (stmt))
4681 tree lhs = gimple_get_lhs (stmt);
4682 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4683 return false;
4684 if (dump_file && (dump_flags & TDF_DETAILS))
4686 fprintf (dump_file, "gimple_simplified to ");
4687 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4689 gsi_replace_with_seq_vops (gsi, *seq);
4690 return true;
4692 else
4693 gcc_unreachable ();
4696 return false;
4699 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4701 static bool
4702 maybe_canonicalize_mem_ref_addr (tree *t)
4704 bool res = false;
4706 if (TREE_CODE (*t) == ADDR_EXPR)
4707 t = &TREE_OPERAND (*t, 0);
4709 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4710 generic vector extension. The actual vector referenced is
4711 view-converted to an array type for this purpose. If the index
4712 is constant the canonical representation in the middle-end is a
4713 BIT_FIELD_REF so re-write the former to the latter here. */
4714 if (TREE_CODE (*t) == ARRAY_REF
4715 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4716 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4717 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4719 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4720 if (VECTOR_TYPE_P (vtype))
4722 tree low = array_ref_low_bound (*t);
4723 if (TREE_CODE (low) == INTEGER_CST)
4725 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4727 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4728 wi::to_widest (low));
4729 idx = wi::mul (idx, wi::to_widest
4730 (TYPE_SIZE (TREE_TYPE (*t))));
4731 widest_int ext
4732 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4733 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4735 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4736 TREE_TYPE (*t),
4737 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4738 TYPE_SIZE (TREE_TYPE (*t)),
4739 wide_int_to_tree (bitsizetype, idx));
4740 res = true;
4747 while (handled_component_p (*t))
4748 t = &TREE_OPERAND (*t, 0);
4750 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4751 of invariant addresses into a SSA name MEM_REF address. */
4752 if (TREE_CODE (*t) == MEM_REF
4753 || TREE_CODE (*t) == TARGET_MEM_REF)
4755 tree addr = TREE_OPERAND (*t, 0);
4756 if (TREE_CODE (addr) == ADDR_EXPR
4757 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4758 || handled_component_p (TREE_OPERAND (addr, 0))))
4760 tree base;
4761 poly_int64 coffset;
4762 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4763 &coffset);
4764 if (!base)
4765 gcc_unreachable ();
4767 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4768 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4769 TREE_OPERAND (*t, 1),
4770 size_int (coffset));
4771 res = true;
4773 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4774 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4777 /* Canonicalize back MEM_REFs to plain reference trees if the object
4778 accessed is a decl that has the same access semantics as the MEM_REF. */
4779 if (TREE_CODE (*t) == MEM_REF
4780 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4781 && integer_zerop (TREE_OPERAND (*t, 1))
4782 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4784 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4785 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4786 if (/* Same volatile qualification. */
4787 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4788 /* Same TBAA behavior with -fstrict-aliasing. */
4789 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4790 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4791 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4792 /* Same alignment. */
4793 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4794 /* We have to look out here to not drop a required conversion
4795 from the rhs to the lhs if *t appears on the lhs or vice-versa
4796 if it appears on the rhs. Thus require strict type
4797 compatibility. */
4798 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4800 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4801 res = true;
4805 /* Canonicalize TARGET_MEM_REF in particular with respect to
4806 the indexes becoming constant. */
4807 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4809 tree tem = maybe_fold_tmr (*t);
4810 if (tem)
4812 *t = tem;
4813 res = true;
4817 return res;
4820 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4821 distinguishes both cases. */
4823 static bool
4824 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4826 bool changed = false;
4827 gimple *stmt = gsi_stmt (*gsi);
4828 bool nowarning = gimple_no_warning_p (stmt);
4829 unsigned i;
4830 fold_defer_overflow_warnings ();
4832 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4833 after propagation.
4834 ??? This shouldn't be done in generic folding but in the
4835 propagation helpers which also know whether an address was
4836 propagated.
4837 Also canonicalize operand order. */
4838 switch (gimple_code (stmt))
4840 case GIMPLE_ASSIGN:
4841 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4843 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4844 if ((REFERENCE_CLASS_P (*rhs)
4845 || TREE_CODE (*rhs) == ADDR_EXPR)
4846 && maybe_canonicalize_mem_ref_addr (rhs))
4847 changed = true;
4848 tree *lhs = gimple_assign_lhs_ptr (stmt);
4849 if (REFERENCE_CLASS_P (*lhs)
4850 && maybe_canonicalize_mem_ref_addr (lhs))
4851 changed = true;
4853 else
4855 /* Canonicalize operand order. */
4856 enum tree_code code = gimple_assign_rhs_code (stmt);
4857 if (TREE_CODE_CLASS (code) == tcc_comparison
4858 || commutative_tree_code (code)
4859 || commutative_ternary_tree_code (code))
4861 tree rhs1 = gimple_assign_rhs1 (stmt);
4862 tree rhs2 = gimple_assign_rhs2 (stmt);
4863 if (tree_swap_operands_p (rhs1, rhs2))
4865 gimple_assign_set_rhs1 (stmt, rhs2);
4866 gimple_assign_set_rhs2 (stmt, rhs1);
4867 if (TREE_CODE_CLASS (code) == tcc_comparison)
4868 gimple_assign_set_rhs_code (stmt,
4869 swap_tree_comparison (code));
4870 changed = true;
4874 break;
4875 case GIMPLE_CALL:
4877 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4879 tree *arg = gimple_call_arg_ptr (stmt, i);
4880 if (REFERENCE_CLASS_P (*arg)
4881 && maybe_canonicalize_mem_ref_addr (arg))
4882 changed = true;
4884 tree *lhs = gimple_call_lhs_ptr (stmt);
4885 if (*lhs
4886 && REFERENCE_CLASS_P (*lhs)
4887 && maybe_canonicalize_mem_ref_addr (lhs))
4888 changed = true;
4889 break;
4891 case GIMPLE_ASM:
4893 gasm *asm_stmt = as_a <gasm *> (stmt);
4894 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4896 tree link = gimple_asm_output_op (asm_stmt, i);
4897 tree op = TREE_VALUE (link);
4898 if (REFERENCE_CLASS_P (op)
4899 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4900 changed = true;
4902 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4904 tree link = gimple_asm_input_op (asm_stmt, i);
4905 tree op = TREE_VALUE (link);
4906 if ((REFERENCE_CLASS_P (op)
4907 || TREE_CODE (op) == ADDR_EXPR)
4908 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4909 changed = true;
4912 break;
4913 case GIMPLE_DEBUG:
4914 if (gimple_debug_bind_p (stmt))
4916 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4917 if (*val
4918 && (REFERENCE_CLASS_P (*val)
4919 || TREE_CODE (*val) == ADDR_EXPR)
4920 && maybe_canonicalize_mem_ref_addr (val))
4921 changed = true;
4923 break;
4924 case GIMPLE_COND:
4926 /* Canonicalize operand order. */
4927 tree lhs = gimple_cond_lhs (stmt);
4928 tree rhs = gimple_cond_rhs (stmt);
4929 if (tree_swap_operands_p (lhs, rhs))
4931 gcond *gc = as_a <gcond *> (stmt);
4932 gimple_cond_set_lhs (gc, rhs);
4933 gimple_cond_set_rhs (gc, lhs);
4934 gimple_cond_set_code (gc,
4935 swap_tree_comparison (gimple_cond_code (gc)));
4936 changed = true;
4939 default:;
4942 /* Dispatch to pattern-based folding. */
4943 if (!inplace
4944 || is_gimple_assign (stmt)
4945 || gimple_code (stmt) == GIMPLE_COND)
4947 gimple_seq seq = NULL;
4948 gimple_match_op res_op;
4949 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
4950 valueize, valueize))
4952 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
4953 changed = true;
4954 else
4955 gimple_seq_discard (seq);
4959 stmt = gsi_stmt (*gsi);
4961 /* Fold the main computation performed by the statement. */
4962 switch (gimple_code (stmt))
4964 case GIMPLE_ASSIGN:
4966 /* Try to canonicalize for boolean-typed X the comparisons
4967 X == 0, X == 1, X != 0, and X != 1. */
4968 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4969 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4971 tree lhs = gimple_assign_lhs (stmt);
4972 tree op1 = gimple_assign_rhs1 (stmt);
4973 tree op2 = gimple_assign_rhs2 (stmt);
4974 tree type = TREE_TYPE (op1);
4976 /* Check whether the comparison operands are of the same boolean
4977 type as the result type is.
4978 Check that second operand is an integer-constant with value
4979 one or zero. */
4980 if (TREE_CODE (op2) == INTEGER_CST
4981 && (integer_zerop (op2) || integer_onep (op2))
4982 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4984 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4985 bool is_logical_not = false;
4987 /* X == 0 and X != 1 is a logical-not.of X
4988 X == 1 and X != 0 is X */
4989 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4990 || (cmp_code == NE_EXPR && integer_onep (op2)))
4991 is_logical_not = true;
4993 if (is_logical_not == false)
4994 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4995 /* Only for one-bit precision typed X the transformation
4996 !X -> ~X is valied. */
4997 else if (TYPE_PRECISION (type) == 1)
4998 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4999 /* Otherwise we use !X -> X ^ 1. */
5000 else
5001 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
5002 build_int_cst (type, 1));
5003 changed = true;
5004 break;
5008 unsigned old_num_ops = gimple_num_ops (stmt);
5009 tree lhs = gimple_assign_lhs (stmt);
5010 tree new_rhs = fold_gimple_assign (gsi);
5011 if (new_rhs
5012 && !useless_type_conversion_p (TREE_TYPE (lhs),
5013 TREE_TYPE (new_rhs)))
5014 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
5015 if (new_rhs
5016 && (!inplace
5017 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
5019 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
5020 changed = true;
5022 break;
5025 case GIMPLE_CALL:
5026 changed |= gimple_fold_call (gsi, inplace);
5027 break;
5029 case GIMPLE_ASM:
5030 /* Fold *& in asm operands. */
5032 gasm *asm_stmt = as_a <gasm *> (stmt);
5033 size_t noutputs;
5034 const char **oconstraints;
5035 const char *constraint;
5036 bool allows_mem, allows_reg;
5038 noutputs = gimple_asm_noutputs (asm_stmt);
5039 oconstraints = XALLOCAVEC (const char *, noutputs);
5041 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
5043 tree link = gimple_asm_output_op (asm_stmt, i);
5044 tree op = TREE_VALUE (link);
5045 oconstraints[i]
5046 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5047 if (REFERENCE_CLASS_P (op)
5048 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
5050 TREE_VALUE (link) = op;
5051 changed = true;
5054 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5056 tree link = gimple_asm_input_op (asm_stmt, i);
5057 tree op = TREE_VALUE (link);
5058 constraint
5059 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5060 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5061 oconstraints, &allows_mem, &allows_reg);
5062 if (REFERENCE_CLASS_P (op)
5063 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
5064 != NULL_TREE)
5066 TREE_VALUE (link) = op;
5067 changed = true;
5071 break;
5073 case GIMPLE_DEBUG:
5074 if (gimple_debug_bind_p (stmt))
5076 tree val = gimple_debug_bind_get_value (stmt);
5077 if (val
5078 && REFERENCE_CLASS_P (val))
5080 tree tem = maybe_fold_reference (val, false);
5081 if (tem)
5083 gimple_debug_bind_set_value (stmt, tem);
5084 changed = true;
5087 else if (val
5088 && TREE_CODE (val) == ADDR_EXPR)
5090 tree ref = TREE_OPERAND (val, 0);
5091 tree tem = maybe_fold_reference (ref, false);
5092 if (tem)
5094 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
5095 gimple_debug_bind_set_value (stmt, tem);
5096 changed = true;
5100 break;
5102 case GIMPLE_RETURN:
5104 greturn *ret_stmt = as_a<greturn *> (stmt);
5105 tree ret = gimple_return_retval(ret_stmt);
5107 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
5109 tree val = valueize (ret);
5110 if (val && val != ret
5111 && may_propagate_copy (ret, val))
5113 gimple_return_set_retval (ret_stmt, val);
5114 changed = true;
5118 break;
5120 default:;
5123 stmt = gsi_stmt (*gsi);
5125 /* Fold *& on the lhs. */
5126 if (gimple_has_lhs (stmt))
5128 tree lhs = gimple_get_lhs (stmt);
5129 if (lhs && REFERENCE_CLASS_P (lhs))
5131 tree new_lhs = maybe_fold_reference (lhs, true);
5132 if (new_lhs)
5134 gimple_set_lhs (stmt, new_lhs);
5135 changed = true;
5140 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
5141 return changed;
5144 /* Valueziation callback that ends up not following SSA edges. */
5146 tree
5147 no_follow_ssa_edges (tree)
5149 return NULL_TREE;
5152 /* Valueization callback that ends up following single-use SSA edges only. */
5154 tree
5155 follow_single_use_edges (tree val)
5157 if (TREE_CODE (val) == SSA_NAME
5158 && !has_single_use (val))
5159 return NULL_TREE;
5160 return val;
5163 /* Valueization callback that follows all SSA edges. */
5165 tree
5166 follow_all_ssa_edges (tree val)
5168 return val;
5171 /* Fold the statement pointed to by GSI. In some cases, this function may
5172 replace the whole statement with a new one. Returns true iff folding
5173 makes any changes.
5174 The statement pointed to by GSI should be in valid gimple form but may
5175 be in unfolded state as resulting from for example constant propagation
5176 which can produce *&x = 0. */
5178 bool
5179 fold_stmt (gimple_stmt_iterator *gsi)
5181 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5184 bool
5185 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5187 return fold_stmt_1 (gsi, false, valueize);
5190 /* Perform the minimal folding on statement *GSI. Only operations like
5191 *&x created by constant propagation are handled. The statement cannot
5192 be replaced with a new one. Return true if the statement was
5193 changed, false otherwise.
5194 The statement *GSI should be in valid gimple form but may
5195 be in unfolded state as resulting from for example constant propagation
5196 which can produce *&x = 0. */
5198 bool
5199 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5201 gimple *stmt = gsi_stmt (*gsi);
5202 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5203 gcc_assert (gsi_stmt (*gsi) == stmt);
5204 return changed;
5207 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5208 if EXPR is null or we don't know how.
5209 If non-null, the result always has boolean type. */
5211 static tree
5212 canonicalize_bool (tree expr, bool invert)
5214 if (!expr)
5215 return NULL_TREE;
5216 else if (invert)
5218 if (integer_nonzerop (expr))
5219 return boolean_false_node;
5220 else if (integer_zerop (expr))
5221 return boolean_true_node;
5222 else if (TREE_CODE (expr) == SSA_NAME)
5223 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5224 build_int_cst (TREE_TYPE (expr), 0));
5225 else if (COMPARISON_CLASS_P (expr))
5226 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5227 boolean_type_node,
5228 TREE_OPERAND (expr, 0),
5229 TREE_OPERAND (expr, 1));
5230 else
5231 return NULL_TREE;
5233 else
5235 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5236 return expr;
5237 if (integer_nonzerop (expr))
5238 return boolean_true_node;
5239 else if (integer_zerop (expr))
5240 return boolean_false_node;
5241 else if (TREE_CODE (expr) == SSA_NAME)
5242 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5243 build_int_cst (TREE_TYPE (expr), 0));
5244 else if (COMPARISON_CLASS_P (expr))
5245 return fold_build2 (TREE_CODE (expr),
5246 boolean_type_node,
5247 TREE_OPERAND (expr, 0),
5248 TREE_OPERAND (expr, 1));
5249 else
5250 return NULL_TREE;
5254 /* Check to see if a boolean expression EXPR is logically equivalent to the
5255 comparison (OP1 CODE OP2). Check for various identities involving
5256 SSA_NAMEs. */
5258 static bool
5259 same_bool_comparison_p (const_tree expr, enum tree_code code,
5260 const_tree op1, const_tree op2)
5262 gimple *s;
5264 /* The obvious case. */
5265 if (TREE_CODE (expr) == code
5266 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5267 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5268 return true;
5270 /* Check for comparing (name, name != 0) and the case where expr
5271 is an SSA_NAME with a definition matching the comparison. */
5272 if (TREE_CODE (expr) == SSA_NAME
5273 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5275 if (operand_equal_p (expr, op1, 0))
5276 return ((code == NE_EXPR && integer_zerop (op2))
5277 || (code == EQ_EXPR && integer_nonzerop (op2)));
5278 s = SSA_NAME_DEF_STMT (expr);
5279 if (is_gimple_assign (s)
5280 && gimple_assign_rhs_code (s) == code
5281 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5282 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5283 return true;
5286 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5287 of name is a comparison, recurse. */
5288 if (TREE_CODE (op1) == SSA_NAME
5289 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5291 s = SSA_NAME_DEF_STMT (op1);
5292 if (is_gimple_assign (s)
5293 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5295 enum tree_code c = gimple_assign_rhs_code (s);
5296 if ((c == NE_EXPR && integer_zerop (op2))
5297 || (c == EQ_EXPR && integer_nonzerop (op2)))
5298 return same_bool_comparison_p (expr, c,
5299 gimple_assign_rhs1 (s),
5300 gimple_assign_rhs2 (s));
5301 if ((c == EQ_EXPR && integer_zerop (op2))
5302 || (c == NE_EXPR && integer_nonzerop (op2)))
5303 return same_bool_comparison_p (expr,
5304 invert_tree_comparison (c, false),
5305 gimple_assign_rhs1 (s),
5306 gimple_assign_rhs2 (s));
5309 return false;
5312 /* Check to see if two boolean expressions OP1 and OP2 are logically
5313 equivalent. */
5315 static bool
5316 same_bool_result_p (const_tree op1, const_tree op2)
5318 /* Simple cases first. */
5319 if (operand_equal_p (op1, op2, 0))
5320 return true;
5322 /* Check the cases where at least one of the operands is a comparison.
5323 These are a bit smarter than operand_equal_p in that they apply some
5324 identifies on SSA_NAMEs. */
5325 if (COMPARISON_CLASS_P (op2)
5326 && same_bool_comparison_p (op1, TREE_CODE (op2),
5327 TREE_OPERAND (op2, 0),
5328 TREE_OPERAND (op2, 1)))
5329 return true;
5330 if (COMPARISON_CLASS_P (op1)
5331 && same_bool_comparison_p (op2, TREE_CODE (op1),
5332 TREE_OPERAND (op1, 0),
5333 TREE_OPERAND (op1, 1)))
5334 return true;
5336 /* Default case. */
5337 return false;
5340 /* Forward declarations for some mutually recursive functions. */
5342 static tree
5343 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5344 enum tree_code code2, tree op2a, tree op2b);
5345 static tree
5346 and_var_with_comparison (tree var, bool invert,
5347 enum tree_code code2, tree op2a, tree op2b);
5348 static tree
5349 and_var_with_comparison_1 (gimple *stmt,
5350 enum tree_code code2, tree op2a, tree op2b);
5351 static tree
5352 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5353 enum tree_code code2, tree op2a, tree op2b);
5354 static tree
5355 or_var_with_comparison (tree var, bool invert,
5356 enum tree_code code2, tree op2a, tree op2b);
5357 static tree
5358 or_var_with_comparison_1 (gimple *stmt,
5359 enum tree_code code2, tree op2a, tree op2b);
5361 /* Helper function for and_comparisons_1: try to simplify the AND of the
5362 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5363 If INVERT is true, invert the value of the VAR before doing the AND.
5364 Return NULL_EXPR if we can't simplify this to a single expression. */
5366 static tree
5367 and_var_with_comparison (tree var, bool invert,
5368 enum tree_code code2, tree op2a, tree op2b)
5370 tree t;
5371 gimple *stmt = SSA_NAME_DEF_STMT (var);
5373 /* We can only deal with variables whose definitions are assignments. */
5374 if (!is_gimple_assign (stmt))
5375 return NULL_TREE;
5377 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5378 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5379 Then we only have to consider the simpler non-inverted cases. */
5380 if (invert)
5381 t = or_var_with_comparison_1 (stmt,
5382 invert_tree_comparison (code2, false),
5383 op2a, op2b);
5384 else
5385 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5386 return canonicalize_bool (t, invert);
5389 /* Try to simplify the AND of the ssa variable defined by the assignment
5390 STMT with the comparison specified by (OP2A CODE2 OP2B).
5391 Return NULL_EXPR if we can't simplify this to a single expression. */
5393 static tree
5394 and_var_with_comparison_1 (gimple *stmt,
5395 enum tree_code code2, tree op2a, tree op2b)
5397 tree var = gimple_assign_lhs (stmt);
5398 tree true_test_var = NULL_TREE;
5399 tree false_test_var = NULL_TREE;
5400 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5402 /* Check for identities like (var AND (var == 0)) => false. */
5403 if (TREE_CODE (op2a) == SSA_NAME
5404 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5406 if ((code2 == NE_EXPR && integer_zerop (op2b))
5407 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5409 true_test_var = op2a;
5410 if (var == true_test_var)
5411 return var;
5413 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5414 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5416 false_test_var = op2a;
5417 if (var == false_test_var)
5418 return boolean_false_node;
5422 /* If the definition is a comparison, recurse on it. */
5423 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5425 tree t = and_comparisons_1 (innercode,
5426 gimple_assign_rhs1 (stmt),
5427 gimple_assign_rhs2 (stmt),
5428 code2,
5429 op2a,
5430 op2b);
5431 if (t)
5432 return t;
5435 /* If the definition is an AND or OR expression, we may be able to
5436 simplify by reassociating. */
5437 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5438 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5440 tree inner1 = gimple_assign_rhs1 (stmt);
5441 tree inner2 = gimple_assign_rhs2 (stmt);
5442 gimple *s;
5443 tree t;
5444 tree partial = NULL_TREE;
5445 bool is_and = (innercode == BIT_AND_EXPR);
5447 /* Check for boolean identities that don't require recursive examination
5448 of inner1/inner2:
5449 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5450 inner1 AND (inner1 OR inner2) => inner1
5451 !inner1 AND (inner1 AND inner2) => false
5452 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5453 Likewise for similar cases involving inner2. */
5454 if (inner1 == true_test_var)
5455 return (is_and ? var : inner1);
5456 else if (inner2 == true_test_var)
5457 return (is_and ? var : inner2);
5458 else if (inner1 == false_test_var)
5459 return (is_and
5460 ? boolean_false_node
5461 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5462 else if (inner2 == false_test_var)
5463 return (is_and
5464 ? boolean_false_node
5465 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5467 /* Next, redistribute/reassociate the AND across the inner tests.
5468 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5469 if (TREE_CODE (inner1) == SSA_NAME
5470 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5471 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5472 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5473 gimple_assign_rhs1 (s),
5474 gimple_assign_rhs2 (s),
5475 code2, op2a, op2b)))
5477 /* Handle the AND case, where we are reassociating:
5478 (inner1 AND inner2) AND (op2a code2 op2b)
5479 => (t AND inner2)
5480 If the partial result t is a constant, we win. Otherwise
5481 continue on to try reassociating with the other inner test. */
5482 if (is_and)
5484 if (integer_onep (t))
5485 return inner2;
5486 else if (integer_zerop (t))
5487 return boolean_false_node;
5490 /* Handle the OR case, where we are redistributing:
5491 (inner1 OR inner2) AND (op2a code2 op2b)
5492 => (t OR (inner2 AND (op2a code2 op2b))) */
5493 else if (integer_onep (t))
5494 return boolean_true_node;
5496 /* Save partial result for later. */
5497 partial = t;
5500 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5501 if (TREE_CODE (inner2) == SSA_NAME
5502 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5503 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5504 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5505 gimple_assign_rhs1 (s),
5506 gimple_assign_rhs2 (s),
5507 code2, op2a, op2b)))
5509 /* Handle the AND case, where we are reassociating:
5510 (inner1 AND inner2) AND (op2a code2 op2b)
5511 => (inner1 AND t) */
5512 if (is_and)
5514 if (integer_onep (t))
5515 return inner1;
5516 else if (integer_zerop (t))
5517 return boolean_false_node;
5518 /* If both are the same, we can apply the identity
5519 (x AND x) == x. */
5520 else if (partial && same_bool_result_p (t, partial))
5521 return t;
5524 /* Handle the OR case. where we are redistributing:
5525 (inner1 OR inner2) AND (op2a code2 op2b)
5526 => (t OR (inner1 AND (op2a code2 op2b)))
5527 => (t OR partial) */
5528 else
5530 if (integer_onep (t))
5531 return boolean_true_node;
5532 else if (partial)
5534 /* We already got a simplification for the other
5535 operand to the redistributed OR expression. The
5536 interesting case is when at least one is false.
5537 Or, if both are the same, we can apply the identity
5538 (x OR x) == x. */
5539 if (integer_zerop (partial))
5540 return t;
5541 else if (integer_zerop (t))
5542 return partial;
5543 else if (same_bool_result_p (t, partial))
5544 return t;
5549 return NULL_TREE;
5552 /* Try to simplify the AND of two comparisons defined by
5553 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5554 If this can be done without constructing an intermediate value,
5555 return the resulting tree; otherwise NULL_TREE is returned.
5556 This function is deliberately asymmetric as it recurses on SSA_DEFs
5557 in the first comparison but not the second. */
5559 static tree
5560 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5561 enum tree_code code2, tree op2a, tree op2b)
5563 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5565 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5566 if (operand_equal_p (op1a, op2a, 0)
5567 && operand_equal_p (op1b, op2b, 0))
5569 /* Result will be either NULL_TREE, or a combined comparison. */
5570 tree t = combine_comparisons (UNKNOWN_LOCATION,
5571 TRUTH_ANDIF_EXPR, code1, code2,
5572 truth_type, op1a, op1b);
5573 if (t)
5574 return t;
5577 /* Likewise the swapped case of the above. */
5578 if (operand_equal_p (op1a, op2b, 0)
5579 && operand_equal_p (op1b, op2a, 0))
5581 /* Result will be either NULL_TREE, or a combined comparison. */
5582 tree t = combine_comparisons (UNKNOWN_LOCATION,
5583 TRUTH_ANDIF_EXPR, code1,
5584 swap_tree_comparison (code2),
5585 truth_type, op1a, op1b);
5586 if (t)
5587 return t;
5590 /* If both comparisons are of the same value against constants, we might
5591 be able to merge them. */
5592 if (operand_equal_p (op1a, op2a, 0)
5593 && TREE_CODE (op1b) == INTEGER_CST
5594 && TREE_CODE (op2b) == INTEGER_CST)
5596 int cmp = tree_int_cst_compare (op1b, op2b);
5598 /* If we have (op1a == op1b), we should either be able to
5599 return that or FALSE, depending on whether the constant op1b
5600 also satisfies the other comparison against op2b. */
5601 if (code1 == EQ_EXPR)
5603 bool done = true;
5604 bool val;
5605 switch (code2)
5607 case EQ_EXPR: val = (cmp == 0); break;
5608 case NE_EXPR: val = (cmp != 0); break;
5609 case LT_EXPR: val = (cmp < 0); break;
5610 case GT_EXPR: val = (cmp > 0); break;
5611 case LE_EXPR: val = (cmp <= 0); break;
5612 case GE_EXPR: val = (cmp >= 0); break;
5613 default: done = false;
5615 if (done)
5617 if (val)
5618 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5619 else
5620 return boolean_false_node;
5623 /* Likewise if the second comparison is an == comparison. */
5624 else if (code2 == EQ_EXPR)
5626 bool done = true;
5627 bool val;
5628 switch (code1)
5630 case EQ_EXPR: val = (cmp == 0); break;
5631 case NE_EXPR: val = (cmp != 0); break;
5632 case LT_EXPR: val = (cmp > 0); break;
5633 case GT_EXPR: val = (cmp < 0); break;
5634 case LE_EXPR: val = (cmp >= 0); break;
5635 case GE_EXPR: val = (cmp <= 0); break;
5636 default: done = false;
5638 if (done)
5640 if (val)
5641 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5642 else
5643 return boolean_false_node;
5647 /* Same business with inequality tests. */
5648 else if (code1 == NE_EXPR)
5650 bool val;
5651 switch (code2)
5653 case EQ_EXPR: val = (cmp != 0); break;
5654 case NE_EXPR: val = (cmp == 0); break;
5655 case LT_EXPR: val = (cmp >= 0); break;
5656 case GT_EXPR: val = (cmp <= 0); break;
5657 case LE_EXPR: val = (cmp > 0); break;
5658 case GE_EXPR: val = (cmp < 0); break;
5659 default:
5660 val = false;
5662 if (val)
5663 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5665 else if (code2 == NE_EXPR)
5667 bool val;
5668 switch (code1)
5670 case EQ_EXPR: val = (cmp == 0); break;
5671 case NE_EXPR: val = (cmp != 0); break;
5672 case LT_EXPR: val = (cmp <= 0); break;
5673 case GT_EXPR: val = (cmp >= 0); break;
5674 case LE_EXPR: val = (cmp < 0); break;
5675 case GE_EXPR: val = (cmp > 0); break;
5676 default:
5677 val = false;
5679 if (val)
5680 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5683 /* Chose the more restrictive of two < or <= comparisons. */
5684 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5685 && (code2 == LT_EXPR || code2 == LE_EXPR))
5687 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5688 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5689 else
5690 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5693 /* Likewise chose the more restrictive of two > or >= comparisons. */
5694 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5695 && (code2 == GT_EXPR || code2 == GE_EXPR))
5697 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5698 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5699 else
5700 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5703 /* Check for singleton ranges. */
5704 else if (cmp == 0
5705 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5706 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5707 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5709 /* Check for disjoint ranges. */
5710 else if (cmp <= 0
5711 && (code1 == LT_EXPR || code1 == LE_EXPR)
5712 && (code2 == GT_EXPR || code2 == GE_EXPR))
5713 return boolean_false_node;
5714 else if (cmp >= 0
5715 && (code1 == GT_EXPR || code1 == GE_EXPR)
5716 && (code2 == LT_EXPR || code2 == LE_EXPR))
5717 return boolean_false_node;
5720 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5721 NAME's definition is a truth value. See if there are any simplifications
5722 that can be done against the NAME's definition. */
5723 if (TREE_CODE (op1a) == SSA_NAME
5724 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5725 && (integer_zerop (op1b) || integer_onep (op1b)))
5727 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5728 || (code1 == NE_EXPR && integer_onep (op1b)));
5729 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5730 switch (gimple_code (stmt))
5732 case GIMPLE_ASSIGN:
5733 /* Try to simplify by copy-propagating the definition. */
5734 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5736 case GIMPLE_PHI:
5737 /* If every argument to the PHI produces the same result when
5738 ANDed with the second comparison, we win.
5739 Do not do this unless the type is bool since we need a bool
5740 result here anyway. */
5741 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5743 tree result = NULL_TREE;
5744 unsigned i;
5745 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5747 tree arg = gimple_phi_arg_def (stmt, i);
5749 /* If this PHI has itself as an argument, ignore it.
5750 If all the other args produce the same result,
5751 we're still OK. */
5752 if (arg == gimple_phi_result (stmt))
5753 continue;
5754 else if (TREE_CODE (arg) == INTEGER_CST)
5756 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5758 if (!result)
5759 result = boolean_false_node;
5760 else if (!integer_zerop (result))
5761 return NULL_TREE;
5763 else if (!result)
5764 result = fold_build2 (code2, boolean_type_node,
5765 op2a, op2b);
5766 else if (!same_bool_comparison_p (result,
5767 code2, op2a, op2b))
5768 return NULL_TREE;
5770 else if (TREE_CODE (arg) == SSA_NAME
5771 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5773 tree temp;
5774 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5775 /* In simple cases we can look through PHI nodes,
5776 but we have to be careful with loops.
5777 See PR49073. */
5778 if (! dom_info_available_p (CDI_DOMINATORS)
5779 || gimple_bb (def_stmt) == gimple_bb (stmt)
5780 || dominated_by_p (CDI_DOMINATORS,
5781 gimple_bb (def_stmt),
5782 gimple_bb (stmt)))
5783 return NULL_TREE;
5784 temp = and_var_with_comparison (arg, invert, code2,
5785 op2a, op2b);
5786 if (!temp)
5787 return NULL_TREE;
5788 else if (!result)
5789 result = temp;
5790 else if (!same_bool_result_p (result, temp))
5791 return NULL_TREE;
5793 else
5794 return NULL_TREE;
5796 return result;
5799 default:
5800 break;
5803 return NULL_TREE;
5806 /* Try to simplify the AND of two comparisons, specified by
5807 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5808 If this can be simplified to a single expression (without requiring
5809 introducing more SSA variables to hold intermediate values),
5810 return the resulting tree. Otherwise return NULL_TREE.
5811 If the result expression is non-null, it has boolean type. */
5813 tree
5814 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5815 enum tree_code code2, tree op2a, tree op2b)
5817 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5818 if (t)
5819 return t;
5820 else
5821 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5824 /* Helper function for or_comparisons_1: try to simplify the OR of the
5825 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5826 If INVERT is true, invert the value of VAR before doing the OR.
5827 Return NULL_EXPR if we can't simplify this to a single expression. */
5829 static tree
5830 or_var_with_comparison (tree var, bool invert,
5831 enum tree_code code2, tree op2a, tree op2b)
5833 tree t;
5834 gimple *stmt = SSA_NAME_DEF_STMT (var);
5836 /* We can only deal with variables whose definitions are assignments. */
5837 if (!is_gimple_assign (stmt))
5838 return NULL_TREE;
5840 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5841 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5842 Then we only have to consider the simpler non-inverted cases. */
5843 if (invert)
5844 t = and_var_with_comparison_1 (stmt,
5845 invert_tree_comparison (code2, false),
5846 op2a, op2b);
5847 else
5848 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5849 return canonicalize_bool (t, invert);
5852 /* Try to simplify the OR of the ssa variable defined by the assignment
5853 STMT with the comparison specified by (OP2A CODE2 OP2B).
5854 Return NULL_EXPR if we can't simplify this to a single expression. */
5856 static tree
5857 or_var_with_comparison_1 (gimple *stmt,
5858 enum tree_code code2, tree op2a, tree op2b)
5860 tree var = gimple_assign_lhs (stmt);
5861 tree true_test_var = NULL_TREE;
5862 tree false_test_var = NULL_TREE;
5863 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5865 /* Check for identities like (var OR (var != 0)) => true . */
5866 if (TREE_CODE (op2a) == SSA_NAME
5867 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5869 if ((code2 == NE_EXPR && integer_zerop (op2b))
5870 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5872 true_test_var = op2a;
5873 if (var == true_test_var)
5874 return var;
5876 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5877 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5879 false_test_var = op2a;
5880 if (var == false_test_var)
5881 return boolean_true_node;
5885 /* If the definition is a comparison, recurse on it. */
5886 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5888 tree t = or_comparisons_1 (innercode,
5889 gimple_assign_rhs1 (stmt),
5890 gimple_assign_rhs2 (stmt),
5891 code2,
5892 op2a,
5893 op2b);
5894 if (t)
5895 return t;
5898 /* If the definition is an AND or OR expression, we may be able to
5899 simplify by reassociating. */
5900 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5901 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5903 tree inner1 = gimple_assign_rhs1 (stmt);
5904 tree inner2 = gimple_assign_rhs2 (stmt);
5905 gimple *s;
5906 tree t;
5907 tree partial = NULL_TREE;
5908 bool is_or = (innercode == BIT_IOR_EXPR);
5910 /* Check for boolean identities that don't require recursive examination
5911 of inner1/inner2:
5912 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5913 inner1 OR (inner1 AND inner2) => inner1
5914 !inner1 OR (inner1 OR inner2) => true
5915 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5917 if (inner1 == true_test_var)
5918 return (is_or ? var : inner1);
5919 else if (inner2 == true_test_var)
5920 return (is_or ? var : inner2);
5921 else if (inner1 == false_test_var)
5922 return (is_or
5923 ? boolean_true_node
5924 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5925 else if (inner2 == false_test_var)
5926 return (is_or
5927 ? boolean_true_node
5928 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5930 /* Next, redistribute/reassociate the OR across the inner tests.
5931 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5932 if (TREE_CODE (inner1) == SSA_NAME
5933 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5934 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5935 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5936 gimple_assign_rhs1 (s),
5937 gimple_assign_rhs2 (s),
5938 code2, op2a, op2b)))
5940 /* Handle the OR case, where we are reassociating:
5941 (inner1 OR inner2) OR (op2a code2 op2b)
5942 => (t OR inner2)
5943 If the partial result t is a constant, we win. Otherwise
5944 continue on to try reassociating with the other inner test. */
5945 if (is_or)
5947 if (integer_onep (t))
5948 return boolean_true_node;
5949 else if (integer_zerop (t))
5950 return inner2;
5953 /* Handle the AND case, where we are redistributing:
5954 (inner1 AND inner2) OR (op2a code2 op2b)
5955 => (t AND (inner2 OR (op2a code op2b))) */
5956 else if (integer_zerop (t))
5957 return boolean_false_node;
5959 /* Save partial result for later. */
5960 partial = t;
5963 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5964 if (TREE_CODE (inner2) == SSA_NAME
5965 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5966 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5967 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5968 gimple_assign_rhs1 (s),
5969 gimple_assign_rhs2 (s),
5970 code2, op2a, op2b)))
5972 /* Handle the OR case, where we are reassociating:
5973 (inner1 OR inner2) OR (op2a code2 op2b)
5974 => (inner1 OR t)
5975 => (t OR partial) */
5976 if (is_or)
5978 if (integer_zerop (t))
5979 return inner1;
5980 else if (integer_onep (t))
5981 return boolean_true_node;
5982 /* If both are the same, we can apply the identity
5983 (x OR x) == x. */
5984 else if (partial && same_bool_result_p (t, partial))
5985 return t;
5988 /* Handle the AND case, where we are redistributing:
5989 (inner1 AND inner2) OR (op2a code2 op2b)
5990 => (t AND (inner1 OR (op2a code2 op2b)))
5991 => (t AND partial) */
5992 else
5994 if (integer_zerop (t))
5995 return boolean_false_node;
5996 else if (partial)
5998 /* We already got a simplification for the other
5999 operand to the redistributed AND expression. The
6000 interesting case is when at least one is true.
6001 Or, if both are the same, we can apply the identity
6002 (x AND x) == x. */
6003 if (integer_onep (partial))
6004 return t;
6005 else if (integer_onep (t))
6006 return partial;
6007 else if (same_bool_result_p (t, partial))
6008 return t;
6013 return NULL_TREE;
6016 /* Try to simplify the OR of two comparisons defined by
6017 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6018 If this can be done without constructing an intermediate value,
6019 return the resulting tree; otherwise NULL_TREE is returned.
6020 This function is deliberately asymmetric as it recurses on SSA_DEFs
6021 in the first comparison but not the second. */
6023 static tree
6024 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
6025 enum tree_code code2, tree op2a, tree op2b)
6027 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6029 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
6030 if (operand_equal_p (op1a, op2a, 0)
6031 && operand_equal_p (op1b, op2b, 0))
6033 /* Result will be either NULL_TREE, or a combined comparison. */
6034 tree t = combine_comparisons (UNKNOWN_LOCATION,
6035 TRUTH_ORIF_EXPR, code1, code2,
6036 truth_type, op1a, op1b);
6037 if (t)
6038 return t;
6041 /* Likewise the swapped case of the above. */
6042 if (operand_equal_p (op1a, op2b, 0)
6043 && operand_equal_p (op1b, op2a, 0))
6045 /* Result will be either NULL_TREE, or a combined comparison. */
6046 tree t = combine_comparisons (UNKNOWN_LOCATION,
6047 TRUTH_ORIF_EXPR, code1,
6048 swap_tree_comparison (code2),
6049 truth_type, op1a, op1b);
6050 if (t)
6051 return t;
6054 /* If both comparisons are of the same value against constants, we might
6055 be able to merge them. */
6056 if (operand_equal_p (op1a, op2a, 0)
6057 && TREE_CODE (op1b) == INTEGER_CST
6058 && TREE_CODE (op2b) == INTEGER_CST)
6060 int cmp = tree_int_cst_compare (op1b, op2b);
6062 /* If we have (op1a != op1b), we should either be able to
6063 return that or TRUE, depending on whether the constant op1b
6064 also satisfies the other comparison against op2b. */
6065 if (code1 == NE_EXPR)
6067 bool done = true;
6068 bool val;
6069 switch (code2)
6071 case EQ_EXPR: val = (cmp == 0); break;
6072 case NE_EXPR: val = (cmp != 0); break;
6073 case LT_EXPR: val = (cmp < 0); break;
6074 case GT_EXPR: val = (cmp > 0); break;
6075 case LE_EXPR: val = (cmp <= 0); break;
6076 case GE_EXPR: val = (cmp >= 0); break;
6077 default: done = false;
6079 if (done)
6081 if (val)
6082 return boolean_true_node;
6083 else
6084 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6087 /* Likewise if the second comparison is a != comparison. */
6088 else if (code2 == NE_EXPR)
6090 bool done = true;
6091 bool val;
6092 switch (code1)
6094 case EQ_EXPR: val = (cmp == 0); break;
6095 case NE_EXPR: val = (cmp != 0); break;
6096 case LT_EXPR: val = (cmp > 0); break;
6097 case GT_EXPR: val = (cmp < 0); break;
6098 case LE_EXPR: val = (cmp >= 0); break;
6099 case GE_EXPR: val = (cmp <= 0); break;
6100 default: done = false;
6102 if (done)
6104 if (val)
6105 return boolean_true_node;
6106 else
6107 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6111 /* See if an equality test is redundant with the other comparison. */
6112 else if (code1 == EQ_EXPR)
6114 bool val;
6115 switch (code2)
6117 case EQ_EXPR: val = (cmp == 0); break;
6118 case NE_EXPR: val = (cmp != 0); break;
6119 case LT_EXPR: val = (cmp < 0); break;
6120 case GT_EXPR: val = (cmp > 0); break;
6121 case LE_EXPR: val = (cmp <= 0); break;
6122 case GE_EXPR: val = (cmp >= 0); break;
6123 default:
6124 val = false;
6126 if (val)
6127 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6129 else if (code2 == EQ_EXPR)
6131 bool val;
6132 switch (code1)
6134 case EQ_EXPR: val = (cmp == 0); break;
6135 case NE_EXPR: val = (cmp != 0); break;
6136 case LT_EXPR: val = (cmp > 0); break;
6137 case GT_EXPR: val = (cmp < 0); break;
6138 case LE_EXPR: val = (cmp >= 0); break;
6139 case GE_EXPR: val = (cmp <= 0); break;
6140 default:
6141 val = false;
6143 if (val)
6144 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6147 /* Chose the less restrictive of two < or <= comparisons. */
6148 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
6149 && (code2 == LT_EXPR || code2 == LE_EXPR))
6151 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
6152 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6153 else
6154 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6157 /* Likewise chose the less restrictive of two > or >= comparisons. */
6158 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
6159 && (code2 == GT_EXPR || code2 == GE_EXPR))
6161 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
6162 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6163 else
6164 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6167 /* Check for singleton ranges. */
6168 else if (cmp == 0
6169 && ((code1 == LT_EXPR && code2 == GT_EXPR)
6170 || (code1 == GT_EXPR && code2 == LT_EXPR)))
6171 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
6173 /* Check for less/greater pairs that don't restrict the range at all. */
6174 else if (cmp >= 0
6175 && (code1 == LT_EXPR || code1 == LE_EXPR)
6176 && (code2 == GT_EXPR || code2 == GE_EXPR))
6177 return boolean_true_node;
6178 else if (cmp <= 0
6179 && (code1 == GT_EXPR || code1 == GE_EXPR)
6180 && (code2 == LT_EXPR || code2 == LE_EXPR))
6181 return boolean_true_node;
6184 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6185 NAME's definition is a truth value. See if there are any simplifications
6186 that can be done against the NAME's definition. */
6187 if (TREE_CODE (op1a) == SSA_NAME
6188 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6189 && (integer_zerop (op1b) || integer_onep (op1b)))
6191 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6192 || (code1 == NE_EXPR && integer_onep (op1b)));
6193 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6194 switch (gimple_code (stmt))
6196 case GIMPLE_ASSIGN:
6197 /* Try to simplify by copy-propagating the definition. */
6198 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
6200 case GIMPLE_PHI:
6201 /* If every argument to the PHI produces the same result when
6202 ORed with the second comparison, we win.
6203 Do not do this unless the type is bool since we need a bool
6204 result here anyway. */
6205 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6207 tree result = NULL_TREE;
6208 unsigned i;
6209 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6211 tree arg = gimple_phi_arg_def (stmt, i);
6213 /* If this PHI has itself as an argument, ignore it.
6214 If all the other args produce the same result,
6215 we're still OK. */
6216 if (arg == gimple_phi_result (stmt))
6217 continue;
6218 else if (TREE_CODE (arg) == INTEGER_CST)
6220 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6222 if (!result)
6223 result = boolean_true_node;
6224 else if (!integer_onep (result))
6225 return NULL_TREE;
6227 else if (!result)
6228 result = fold_build2 (code2, boolean_type_node,
6229 op2a, op2b);
6230 else if (!same_bool_comparison_p (result,
6231 code2, op2a, op2b))
6232 return NULL_TREE;
6234 else if (TREE_CODE (arg) == SSA_NAME
6235 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6237 tree temp;
6238 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6239 /* In simple cases we can look through PHI nodes,
6240 but we have to be careful with loops.
6241 See PR49073. */
6242 if (! dom_info_available_p (CDI_DOMINATORS)
6243 || gimple_bb (def_stmt) == gimple_bb (stmt)
6244 || dominated_by_p (CDI_DOMINATORS,
6245 gimple_bb (def_stmt),
6246 gimple_bb (stmt)))
6247 return NULL_TREE;
6248 temp = or_var_with_comparison (arg, invert, code2,
6249 op2a, op2b);
6250 if (!temp)
6251 return NULL_TREE;
6252 else if (!result)
6253 result = temp;
6254 else if (!same_bool_result_p (result, temp))
6255 return NULL_TREE;
6257 else
6258 return NULL_TREE;
6260 return result;
6263 default:
6264 break;
6267 return NULL_TREE;
6270 /* Try to simplify the OR of two comparisons, specified by
6271 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6272 If this can be simplified to a single expression (without requiring
6273 introducing more SSA variables to hold intermediate values),
6274 return the resulting tree. Otherwise return NULL_TREE.
6275 If the result expression is non-null, it has boolean type. */
6277 tree
6278 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6279 enum tree_code code2, tree op2a, tree op2b)
6281 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6282 if (t)
6283 return t;
6284 else
6285 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6289 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6291 Either NULL_TREE, a simplified but non-constant or a constant
6292 is returned.
6294 ??? This should go into a gimple-fold-inline.h file to be eventually
6295 privatized with the single valueize function used in the various TUs
6296 to avoid the indirect function call overhead. */
6298 tree
6299 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6300 tree (*gvalueize) (tree))
6302 gimple_match_op res_op;
6303 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6304 edges if there are intermediate VARYING defs. For this reason
6305 do not follow SSA edges here even though SCCVN can technically
6306 just deal fine with that. */
6307 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6309 tree res = NULL_TREE;
6310 if (gimple_simplified_result_is_gimple_val (&res_op))
6311 res = res_op.ops[0];
6312 else if (mprts_hook)
6313 res = mprts_hook (&res_op);
6314 if (res)
6316 if (dump_file && dump_flags & TDF_DETAILS)
6318 fprintf (dump_file, "Match-and-simplified ");
6319 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6320 fprintf (dump_file, " to ");
6321 print_generic_expr (dump_file, res);
6322 fprintf (dump_file, "\n");
6324 return res;
6328 location_t loc = gimple_location (stmt);
6329 switch (gimple_code (stmt))
6331 case GIMPLE_ASSIGN:
6333 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6335 switch (get_gimple_rhs_class (subcode))
6337 case GIMPLE_SINGLE_RHS:
6339 tree rhs = gimple_assign_rhs1 (stmt);
6340 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6342 if (TREE_CODE (rhs) == SSA_NAME)
6344 /* If the RHS is an SSA_NAME, return its known constant value,
6345 if any. */
6346 return (*valueize) (rhs);
6348 /* Handle propagating invariant addresses into address
6349 operations. */
6350 else if (TREE_CODE (rhs) == ADDR_EXPR
6351 && !is_gimple_min_invariant (rhs))
6353 poly_int64 offset = 0;
6354 tree base;
6355 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6356 &offset,
6357 valueize);
6358 if (base
6359 && (CONSTANT_CLASS_P (base)
6360 || decl_address_invariant_p (base)))
6361 return build_invariant_address (TREE_TYPE (rhs),
6362 base, offset);
6364 else if (TREE_CODE (rhs) == CONSTRUCTOR
6365 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6366 && known_eq (CONSTRUCTOR_NELTS (rhs),
6367 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6369 unsigned i, nelts;
6370 tree val;
6372 nelts = CONSTRUCTOR_NELTS (rhs);
6373 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6374 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6376 val = (*valueize) (val);
6377 if (TREE_CODE (val) == INTEGER_CST
6378 || TREE_CODE (val) == REAL_CST
6379 || TREE_CODE (val) == FIXED_CST)
6380 vec.quick_push (val);
6381 else
6382 return NULL_TREE;
6385 return vec.build ();
6387 if (subcode == OBJ_TYPE_REF)
6389 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6390 /* If callee is constant, we can fold away the wrapper. */
6391 if (is_gimple_min_invariant (val))
6392 return val;
6395 if (kind == tcc_reference)
6397 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6398 || TREE_CODE (rhs) == REALPART_EXPR
6399 || TREE_CODE (rhs) == IMAGPART_EXPR)
6400 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6402 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6403 return fold_unary_loc (EXPR_LOCATION (rhs),
6404 TREE_CODE (rhs),
6405 TREE_TYPE (rhs), val);
6407 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6408 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6410 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6411 return fold_ternary_loc (EXPR_LOCATION (rhs),
6412 TREE_CODE (rhs),
6413 TREE_TYPE (rhs), val,
6414 TREE_OPERAND (rhs, 1),
6415 TREE_OPERAND (rhs, 2));
6417 else if (TREE_CODE (rhs) == MEM_REF
6418 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6420 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6421 if (TREE_CODE (val) == ADDR_EXPR
6422 && is_gimple_min_invariant (val))
6424 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6425 unshare_expr (val),
6426 TREE_OPERAND (rhs, 1));
6427 if (tem)
6428 rhs = tem;
6431 return fold_const_aggregate_ref_1 (rhs, valueize);
6433 else if (kind == tcc_declaration)
6434 return get_symbol_constant_value (rhs);
6435 return rhs;
6438 case GIMPLE_UNARY_RHS:
6439 return NULL_TREE;
6441 case GIMPLE_BINARY_RHS:
6442 /* Translate &x + CST into an invariant form suitable for
6443 further propagation. */
6444 if (subcode == POINTER_PLUS_EXPR)
6446 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6447 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6448 if (TREE_CODE (op0) == ADDR_EXPR
6449 && TREE_CODE (op1) == INTEGER_CST)
6451 tree off = fold_convert (ptr_type_node, op1);
6452 return build_fold_addr_expr_loc
6453 (loc,
6454 fold_build2 (MEM_REF,
6455 TREE_TYPE (TREE_TYPE (op0)),
6456 unshare_expr (op0), off));
6459 /* Canonicalize bool != 0 and bool == 0 appearing after
6460 valueization. While gimple_simplify handles this
6461 it can get confused by the ~X == 1 -> X == 0 transform
6462 which we cant reduce to a SSA name or a constant
6463 (and we have no way to tell gimple_simplify to not
6464 consider those transforms in the first place). */
6465 else if (subcode == EQ_EXPR
6466 || subcode == NE_EXPR)
6468 tree lhs = gimple_assign_lhs (stmt);
6469 tree op0 = gimple_assign_rhs1 (stmt);
6470 if (useless_type_conversion_p (TREE_TYPE (lhs),
6471 TREE_TYPE (op0)))
6473 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6474 op0 = (*valueize) (op0);
6475 if (TREE_CODE (op0) == INTEGER_CST)
6476 std::swap (op0, op1);
6477 if (TREE_CODE (op1) == INTEGER_CST
6478 && ((subcode == NE_EXPR && integer_zerop (op1))
6479 || (subcode == EQ_EXPR && integer_onep (op1))))
6480 return op0;
6483 return NULL_TREE;
6485 case GIMPLE_TERNARY_RHS:
6487 /* Handle ternary operators that can appear in GIMPLE form. */
6488 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6489 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6490 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6491 return fold_ternary_loc (loc, subcode,
6492 gimple_expr_type (stmt), op0, op1, op2);
6495 default:
6496 gcc_unreachable ();
6500 case GIMPLE_CALL:
6502 tree fn;
6503 gcall *call_stmt = as_a <gcall *> (stmt);
6505 if (gimple_call_internal_p (stmt))
6507 enum tree_code subcode = ERROR_MARK;
6508 switch (gimple_call_internal_fn (stmt))
6510 case IFN_UBSAN_CHECK_ADD:
6511 subcode = PLUS_EXPR;
6512 break;
6513 case IFN_UBSAN_CHECK_SUB:
6514 subcode = MINUS_EXPR;
6515 break;
6516 case IFN_UBSAN_CHECK_MUL:
6517 subcode = MULT_EXPR;
6518 break;
6519 case IFN_BUILTIN_EXPECT:
6521 tree arg0 = gimple_call_arg (stmt, 0);
6522 tree op0 = (*valueize) (arg0);
6523 if (TREE_CODE (op0) == INTEGER_CST)
6524 return op0;
6525 return NULL_TREE;
6527 default:
6528 return NULL_TREE;
6530 tree arg0 = gimple_call_arg (stmt, 0);
6531 tree arg1 = gimple_call_arg (stmt, 1);
6532 tree op0 = (*valueize) (arg0);
6533 tree op1 = (*valueize) (arg1);
6535 if (TREE_CODE (op0) != INTEGER_CST
6536 || TREE_CODE (op1) != INTEGER_CST)
6538 switch (subcode)
6540 case MULT_EXPR:
6541 /* x * 0 = 0 * x = 0 without overflow. */
6542 if (integer_zerop (op0) || integer_zerop (op1))
6543 return build_zero_cst (TREE_TYPE (arg0));
6544 break;
6545 case MINUS_EXPR:
6546 /* y - y = 0 without overflow. */
6547 if (operand_equal_p (op0, op1, 0))
6548 return build_zero_cst (TREE_TYPE (arg0));
6549 break;
6550 default:
6551 break;
6554 tree res
6555 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6556 if (res
6557 && TREE_CODE (res) == INTEGER_CST
6558 && !TREE_OVERFLOW (res))
6559 return res;
6560 return NULL_TREE;
6563 fn = (*valueize) (gimple_call_fn (stmt));
6564 if (TREE_CODE (fn) == ADDR_EXPR
6565 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
6566 && gimple_builtin_call_types_compatible_p (stmt,
6567 TREE_OPERAND (fn, 0)))
6569 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6570 tree retval;
6571 unsigned i;
6572 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6573 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6574 retval = fold_builtin_call_array (loc,
6575 gimple_call_return_type (call_stmt),
6576 fn, gimple_call_num_args (stmt), args);
6577 if (retval)
6579 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6580 STRIP_NOPS (retval);
6581 retval = fold_convert (gimple_call_return_type (call_stmt),
6582 retval);
6584 return retval;
6586 return NULL_TREE;
6589 default:
6590 return NULL_TREE;
6594 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6595 Returns NULL_TREE if folding to a constant is not possible, otherwise
6596 returns a constant according to is_gimple_min_invariant. */
6598 tree
6599 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6601 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6602 if (res && is_gimple_min_invariant (res))
6603 return res;
6604 return NULL_TREE;
6608 /* The following set of functions are supposed to fold references using
6609 their constant initializers. */
6611 /* See if we can find constructor defining value of BASE.
6612 When we know the consructor with constant offset (such as
6613 base is array[40] and we do know constructor of array), then
6614 BIT_OFFSET is adjusted accordingly.
6616 As a special case, return error_mark_node when constructor
6617 is not explicitly available, but it is known to be zero
6618 such as 'static const int a;'. */
6619 static tree
6620 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6621 tree (*valueize)(tree))
6623 poly_int64 bit_offset2, size, max_size;
6624 bool reverse;
6626 if (TREE_CODE (base) == MEM_REF)
6628 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6629 if (!boff.to_shwi (bit_offset))
6630 return NULL_TREE;
6632 if (valueize
6633 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6634 base = valueize (TREE_OPERAND (base, 0));
6635 if (!base || TREE_CODE (base) != ADDR_EXPR)
6636 return NULL_TREE;
6637 base = TREE_OPERAND (base, 0);
6639 else if (valueize
6640 && TREE_CODE (base) == SSA_NAME)
6641 base = valueize (base);
6643 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6644 DECL_INITIAL. If BASE is a nested reference into another
6645 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6646 the inner reference. */
6647 switch (TREE_CODE (base))
6649 case VAR_DECL:
6650 case CONST_DECL:
6652 tree init = ctor_for_folding (base);
6654 /* Our semantic is exact opposite of ctor_for_folding;
6655 NULL means unknown, while error_mark_node is 0. */
6656 if (init == error_mark_node)
6657 return NULL_TREE;
6658 if (!init)
6659 return error_mark_node;
6660 return init;
6663 case VIEW_CONVERT_EXPR:
6664 return get_base_constructor (TREE_OPERAND (base, 0),
6665 bit_offset, valueize);
6667 case ARRAY_REF:
6668 case COMPONENT_REF:
6669 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6670 &reverse);
6671 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6672 return NULL_TREE;
6673 *bit_offset += bit_offset2;
6674 return get_base_constructor (base, bit_offset, valueize);
6676 case CONSTRUCTOR:
6677 return base;
6679 default:
6680 if (CONSTANT_CLASS_P (base))
6681 return base;
6683 return NULL_TREE;
6687 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6688 to the memory at bit OFFSET. When non-null, TYPE is the expected
6689 type of the reference; otherwise the type of the referenced element
6690 is used instead. When SIZE is zero, attempt to fold a reference to
6691 the entire element which OFFSET refers to. Increment *SUBOFF by
6692 the bit offset of the accessed element. */
6694 static tree
6695 fold_array_ctor_reference (tree type, tree ctor,
6696 unsigned HOST_WIDE_INT offset,
6697 unsigned HOST_WIDE_INT size,
6698 tree from_decl,
6699 unsigned HOST_WIDE_INT *suboff)
6701 offset_int low_bound;
6702 offset_int elt_size;
6703 offset_int access_index;
6704 tree domain_type = NULL_TREE;
6705 HOST_WIDE_INT inner_offset;
6707 /* Compute low bound and elt size. */
6708 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6709 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6710 if (domain_type && TYPE_MIN_VALUE (domain_type))
6712 /* Static constructors for variably sized objects make no sense. */
6713 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6714 return NULL_TREE;
6715 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6717 else
6718 low_bound = 0;
6719 /* Static constructors for variably sized objects make no sense. */
6720 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6721 return NULL_TREE;
6722 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6724 /* When TYPE is non-null, verify that it specifies a constant-sized
6725 accessed not larger than size of array element. Avoid division
6726 by zero below when ELT_SIZE is zero, such as with the result of
6727 an initializer for a zero-length array or an empty struct. */
6728 if (elt_size == 0
6729 || (type
6730 && (!TYPE_SIZE_UNIT (type)
6731 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6732 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type)))))
6733 return NULL_TREE;
6735 /* Compute the array index we look for. */
6736 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6737 elt_size);
6738 access_index += low_bound;
6740 /* And offset within the access. */
6741 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6743 /* See if the array field is large enough to span whole access. We do not
6744 care to fold accesses spanning multiple array indexes. */
6745 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6746 return NULL_TREE;
6747 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6749 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6751 /* For the final reference to the entire accessed element
6752 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6753 may be null) in favor of the type of the element, and set
6754 SIZE to the size of the accessed element. */
6755 inner_offset = 0;
6756 type = TREE_TYPE (val);
6757 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6760 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6761 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6762 suboff);
6765 /* Memory not explicitly mentioned in constructor is 0 (or
6766 the reference is out of range). */
6767 return type ? build_zero_cst (type) : NULL_TREE;
6770 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6771 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6772 is the expected type of the reference; otherwise the type of
6773 the referenced member is used instead. When SIZE is zero,
6774 attempt to fold a reference to the entire member which OFFSET
6775 refers to; in this case. Increment *SUBOFF by the bit offset
6776 of the accessed member. */
6778 static tree
6779 fold_nonarray_ctor_reference (tree type, tree ctor,
6780 unsigned HOST_WIDE_INT offset,
6781 unsigned HOST_WIDE_INT size,
6782 tree from_decl,
6783 unsigned HOST_WIDE_INT *suboff)
6785 unsigned HOST_WIDE_INT cnt;
6786 tree cfield, cval;
6788 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6789 cval)
6791 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6792 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6793 tree field_size = DECL_SIZE (cfield);
6795 if (!field_size)
6797 /* Determine the size of the flexible array member from
6798 the size of the initializer provided for it. */
6799 field_size = TYPE_SIZE (TREE_TYPE (cval));
6802 /* Variable sized objects in static constructors makes no sense,
6803 but field_size can be NULL for flexible array members. */
6804 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6805 && TREE_CODE (byte_offset) == INTEGER_CST
6806 && (field_size != NULL_TREE
6807 ? TREE_CODE (field_size) == INTEGER_CST
6808 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6810 /* Compute bit offset of the field. */
6811 offset_int bitoffset
6812 = (wi::to_offset (field_offset)
6813 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6814 /* Compute bit offset where the field ends. */
6815 offset_int bitoffset_end;
6816 if (field_size != NULL_TREE)
6817 bitoffset_end = bitoffset + wi::to_offset (field_size);
6818 else
6819 bitoffset_end = 0;
6821 /* Compute the bit offset of the end of the desired access.
6822 As a special case, if the size of the desired access is
6823 zero, assume the access is to the entire field (and let
6824 the caller make any necessary adjustments by storing
6825 the actual bounds of the field in FIELDBOUNDS). */
6826 offset_int access_end = offset_int (offset);
6827 if (size)
6828 access_end += size;
6829 else
6830 access_end = bitoffset_end;
6832 /* Is there any overlap between the desired access at
6833 [OFFSET, OFFSET+SIZE) and the offset of the field within
6834 the object at [BITOFFSET, BITOFFSET_END)? */
6835 if (wi::cmps (access_end, bitoffset) > 0
6836 && (field_size == NULL_TREE
6837 || wi::lts_p (offset, bitoffset_end)))
6839 *suboff += bitoffset.to_uhwi ();
6841 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6843 /* For the final reference to the entire accessed member
6844 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6845 be null) in favor of the type of the member, and set
6846 SIZE to the size of the accessed member. */
6847 offset = bitoffset.to_uhwi ();
6848 type = TREE_TYPE (cval);
6849 size = (bitoffset_end - bitoffset).to_uhwi ();
6852 /* We do have overlap. Now see if the field is large enough
6853 to cover the access. Give up for accesses that extend
6854 beyond the end of the object or that span multiple fields. */
6855 if (wi::cmps (access_end, bitoffset_end) > 0)
6856 return NULL_TREE;
6857 if (offset < bitoffset)
6858 return NULL_TREE;
6860 offset_int inner_offset = offset_int (offset) - bitoffset;
6861 return fold_ctor_reference (type, cval,
6862 inner_offset.to_uhwi (), size,
6863 from_decl, suboff);
6866 /* Memory not explicitly mentioned in constructor is 0. */
6867 return type ? build_zero_cst (type) : NULL_TREE;
6870 /* CTOR is value initializing memory. Fold a reference of TYPE and
6871 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
6872 is zero, attempt to fold a reference to the entire subobject
6873 which OFFSET refers to. This is used when folding accesses to
6874 string members of aggregates. When non-null, set *SUBOFF to
6875 the bit offset of the accessed subobject. */
6877 tree
6878 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6879 const poly_uint64 &poly_size, tree from_decl,
6880 unsigned HOST_WIDE_INT *suboff /* = NULL */)
6882 tree ret;
6884 /* We found the field with exact match. */
6885 if (type
6886 && useless_type_conversion_p (type, TREE_TYPE (ctor))
6887 && known_eq (poly_offset, 0U))
6888 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6890 /* The remaining optimizations need a constant size and offset. */
6891 unsigned HOST_WIDE_INT size, offset;
6892 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6893 return NULL_TREE;
6895 /* We are at the end of walk, see if we can view convert the
6896 result. */
6897 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6898 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6899 && !compare_tree_int (TYPE_SIZE (type), size)
6900 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6902 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6903 if (ret)
6905 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6906 if (ret)
6907 STRIP_USELESS_TYPE_CONVERSION (ret);
6909 return ret;
6911 /* For constants and byte-aligned/sized reads try to go through
6912 native_encode/interpret. */
6913 if (CONSTANT_CLASS_P (ctor)
6914 && BITS_PER_UNIT == 8
6915 && offset % BITS_PER_UNIT == 0
6916 && size % BITS_PER_UNIT == 0
6917 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6919 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6920 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6921 offset / BITS_PER_UNIT);
6922 if (len > 0)
6923 return native_interpret_expr (type, buf, len);
6925 if (TREE_CODE (ctor) == CONSTRUCTOR)
6927 unsigned HOST_WIDE_INT dummy = 0;
6928 if (!suboff)
6929 suboff = &dummy;
6931 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6932 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6933 return fold_array_ctor_reference (type, ctor, offset, size,
6934 from_decl, suboff);
6936 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6937 from_decl, suboff);
6940 return NULL_TREE;
6943 /* Return the tree representing the element referenced by T if T is an
6944 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6945 names using VALUEIZE. Return NULL_TREE otherwise. */
6947 tree
6948 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6950 tree ctor, idx, base;
6951 poly_int64 offset, size, max_size;
6952 tree tem;
6953 bool reverse;
6955 if (TREE_THIS_VOLATILE (t))
6956 return NULL_TREE;
6958 if (DECL_P (t))
6959 return get_symbol_constant_value (t);
6961 tem = fold_read_from_constant_string (t);
6962 if (tem)
6963 return tem;
6965 switch (TREE_CODE (t))
6967 case ARRAY_REF:
6968 case ARRAY_RANGE_REF:
6969 /* Constant indexes are handled well by get_base_constructor.
6970 Only special case variable offsets.
6971 FIXME: This code can't handle nested references with variable indexes
6972 (they will be handled only by iteration of ccp). Perhaps we can bring
6973 get_ref_base_and_extent here and make it use a valueize callback. */
6974 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6975 && valueize
6976 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6977 && poly_int_tree_p (idx))
6979 tree low_bound, unit_size;
6981 /* If the resulting bit-offset is constant, track it. */
6982 if ((low_bound = array_ref_low_bound (t),
6983 poly_int_tree_p (low_bound))
6984 && (unit_size = array_ref_element_size (t),
6985 tree_fits_uhwi_p (unit_size)))
6987 poly_offset_int woffset
6988 = wi::sext (wi::to_poly_offset (idx)
6989 - wi::to_poly_offset (low_bound),
6990 TYPE_PRECISION (TREE_TYPE (idx)));
6991 woffset *= tree_to_uhwi (unit_size);
6992 woffset *= BITS_PER_UNIT;
6993 if (woffset.to_shwi (&offset))
6995 base = TREE_OPERAND (t, 0);
6996 ctor = get_base_constructor (base, &offset, valueize);
6997 /* Empty constructor. Always fold to 0. */
6998 if (ctor == error_mark_node)
6999 return build_zero_cst (TREE_TYPE (t));
7000 /* Out of bound array access. Value is undefined,
7001 but don't fold. */
7002 if (maybe_lt (offset, 0))
7003 return NULL_TREE;
7004 /* We cannot determine ctor. */
7005 if (!ctor)
7006 return NULL_TREE;
7007 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
7008 tree_to_uhwi (unit_size)
7009 * BITS_PER_UNIT,
7010 base);
7014 /* Fallthru. */
7016 case COMPONENT_REF:
7017 case BIT_FIELD_REF:
7018 case TARGET_MEM_REF:
7019 case MEM_REF:
7020 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
7021 ctor = get_base_constructor (base, &offset, valueize);
7023 /* Empty constructor. Always fold to 0. */
7024 if (ctor == error_mark_node)
7025 return build_zero_cst (TREE_TYPE (t));
7026 /* We do not know precise address. */
7027 if (!known_size_p (max_size) || maybe_ne (max_size, size))
7028 return NULL_TREE;
7029 /* We cannot determine ctor. */
7030 if (!ctor)
7031 return NULL_TREE;
7033 /* Out of bound array access. Value is undefined, but don't fold. */
7034 if (maybe_lt (offset, 0))
7035 return NULL_TREE;
7037 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
7038 base);
7040 case REALPART_EXPR:
7041 case IMAGPART_EXPR:
7043 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
7044 if (c && TREE_CODE (c) == COMPLEX_CST)
7045 return fold_build1_loc (EXPR_LOCATION (t),
7046 TREE_CODE (t), TREE_TYPE (t), c);
7047 break;
7050 default:
7051 break;
7054 return NULL_TREE;
7057 tree
7058 fold_const_aggregate_ref (tree t)
7060 return fold_const_aggregate_ref_1 (t, NULL);
7063 /* Lookup virtual method with index TOKEN in a virtual table V
7064 at OFFSET.
7065 Set CAN_REFER if non-NULL to false if method
7066 is not referable or if the virtual table is ill-formed (such as rewriten
7067 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7069 tree
7070 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
7071 tree v,
7072 unsigned HOST_WIDE_INT offset,
7073 bool *can_refer)
7075 tree vtable = v, init, fn;
7076 unsigned HOST_WIDE_INT size;
7077 unsigned HOST_WIDE_INT elt_size, access_index;
7078 tree domain_type;
7080 if (can_refer)
7081 *can_refer = true;
7083 /* First of all double check we have virtual table. */
7084 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
7086 /* Pass down that we lost track of the target. */
7087 if (can_refer)
7088 *can_refer = false;
7089 return NULL_TREE;
7092 init = ctor_for_folding (v);
7094 /* The virtual tables should always be born with constructors
7095 and we always should assume that they are avaialble for
7096 folding. At the moment we do not stream them in all cases,
7097 but it should never happen that ctor seem unreachable. */
7098 gcc_assert (init);
7099 if (init == error_mark_node)
7101 /* Pass down that we lost track of the target. */
7102 if (can_refer)
7103 *can_refer = false;
7104 return NULL_TREE;
7106 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
7107 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
7108 offset *= BITS_PER_UNIT;
7109 offset += token * size;
7111 /* Lookup the value in the constructor that is assumed to be array.
7112 This is equivalent to
7113 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7114 offset, size, NULL);
7115 but in a constant time. We expect that frontend produced a simple
7116 array without indexed initializers. */
7118 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7119 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7120 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7121 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7123 access_index = offset / BITS_PER_UNIT / elt_size;
7124 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7126 /* The C++ FE can now produce indexed fields, and we check if the indexes
7127 match. */
7128 if (access_index < CONSTRUCTOR_NELTS (init))
7130 fn = CONSTRUCTOR_ELT (init, access_index)->value;
7131 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7132 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
7133 STRIP_NOPS (fn);
7135 else
7136 fn = NULL;
7138 /* For type inconsistent program we may end up looking up virtual method
7139 in virtual table that does not contain TOKEN entries. We may overrun
7140 the virtual table and pick up a constant or RTTI info pointer.
7141 In any case the call is undefined. */
7142 if (!fn
7143 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7144 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7145 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7146 else
7148 fn = TREE_OPERAND (fn, 0);
7150 /* When cgraph node is missing and function is not public, we cannot
7151 devirtualize. This can happen in WHOPR when the actual method
7152 ends up in other partition, because we found devirtualization
7153 possibility too late. */
7154 if (!can_refer_decl_in_current_unit_p (fn, vtable))
7156 if (can_refer)
7158 *can_refer = false;
7159 return fn;
7161 return NULL_TREE;
7165 /* Make sure we create a cgraph node for functions we'll reference.
7166 They can be non-existent if the reference comes from an entry
7167 of an external vtable for example. */
7168 cgraph_node::get_create (fn);
7170 return fn;
7173 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7174 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7175 KNOWN_BINFO carries the binfo describing the true type of
7176 OBJ_TYPE_REF_OBJECT(REF).
7177 Set CAN_REFER if non-NULL to false if method
7178 is not referable or if the virtual table is ill-formed (such as rewriten
7179 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7181 tree
7182 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7183 bool *can_refer)
7185 unsigned HOST_WIDE_INT offset;
7186 tree v;
7188 v = BINFO_VTABLE (known_binfo);
7189 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7190 if (!v)
7191 return NULL_TREE;
7193 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
7195 if (can_refer)
7196 *can_refer = false;
7197 return NULL_TREE;
7199 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
7202 /* Given a pointer value T, return a simplified version of an
7203 indirection through T, or NULL_TREE if no simplification is
7204 possible. Note that the resulting type may be different from
7205 the type pointed to in the sense that it is still compatible
7206 from the langhooks point of view. */
7208 tree
7209 gimple_fold_indirect_ref (tree t)
7211 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7212 tree sub = t;
7213 tree subtype;
7215 STRIP_NOPS (sub);
7216 subtype = TREE_TYPE (sub);
7217 if (!POINTER_TYPE_P (subtype)
7218 || TYPE_REF_CAN_ALIAS_ALL (ptype))
7219 return NULL_TREE;
7221 if (TREE_CODE (sub) == ADDR_EXPR)
7223 tree op = TREE_OPERAND (sub, 0);
7224 tree optype = TREE_TYPE (op);
7225 /* *&p => p */
7226 if (useless_type_conversion_p (type, optype))
7227 return op;
7229 /* *(foo *)&fooarray => fooarray[0] */
7230 if (TREE_CODE (optype) == ARRAY_TYPE
7231 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7232 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7234 tree type_domain = TYPE_DOMAIN (optype);
7235 tree min_val = size_zero_node;
7236 if (type_domain && TYPE_MIN_VALUE (type_domain))
7237 min_val = TYPE_MIN_VALUE (type_domain);
7238 if (TREE_CODE (min_val) == INTEGER_CST)
7239 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7241 /* *(foo *)&complexfoo => __real__ complexfoo */
7242 else if (TREE_CODE (optype) == COMPLEX_TYPE
7243 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7244 return fold_build1 (REALPART_EXPR, type, op);
7245 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7246 else if (TREE_CODE (optype) == VECTOR_TYPE
7247 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7249 tree part_width = TYPE_SIZE (type);
7250 tree index = bitsize_int (0);
7251 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7255 /* *(p + CST) -> ... */
7256 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7257 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7259 tree addr = TREE_OPERAND (sub, 0);
7260 tree off = TREE_OPERAND (sub, 1);
7261 tree addrtype;
7263 STRIP_NOPS (addr);
7264 addrtype = TREE_TYPE (addr);
7266 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7267 if (TREE_CODE (addr) == ADDR_EXPR
7268 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7269 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7270 && tree_fits_uhwi_p (off))
7272 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7273 tree part_width = TYPE_SIZE (type);
7274 unsigned HOST_WIDE_INT part_widthi
7275 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7276 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7277 tree index = bitsize_int (indexi);
7278 if (known_lt (offset / part_widthi,
7279 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7280 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7281 part_width, index);
7284 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7285 if (TREE_CODE (addr) == ADDR_EXPR
7286 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7287 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7289 tree size = TYPE_SIZE_UNIT (type);
7290 if (tree_int_cst_equal (size, off))
7291 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7294 /* *(p + CST) -> MEM_REF <p, CST>. */
7295 if (TREE_CODE (addr) != ADDR_EXPR
7296 || DECL_P (TREE_OPERAND (addr, 0)))
7297 return fold_build2 (MEM_REF, type,
7298 addr,
7299 wide_int_to_tree (ptype, wi::to_wide (off)));
7302 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7303 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7304 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7305 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7307 tree type_domain;
7308 tree min_val = size_zero_node;
7309 tree osub = sub;
7310 sub = gimple_fold_indirect_ref (sub);
7311 if (! sub)
7312 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7313 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7314 if (type_domain && TYPE_MIN_VALUE (type_domain))
7315 min_val = TYPE_MIN_VALUE (type_domain);
7316 if (TREE_CODE (min_val) == INTEGER_CST)
7317 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7320 return NULL_TREE;
7323 /* Return true if CODE is an operation that when operating on signed
7324 integer types involves undefined behavior on overflow and the
7325 operation can be expressed with unsigned arithmetic. */
7327 bool
7328 arith_code_with_undefined_signed_overflow (tree_code code)
7330 switch (code)
7332 case PLUS_EXPR:
7333 case MINUS_EXPR:
7334 case MULT_EXPR:
7335 case NEGATE_EXPR:
7336 case POINTER_PLUS_EXPR:
7337 return true;
7338 default:
7339 return false;
7343 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7344 operation that can be transformed to unsigned arithmetic by converting
7345 its operand, carrying out the operation in the corresponding unsigned
7346 type and converting the result back to the original type.
7348 Returns a sequence of statements that replace STMT and also contain
7349 a modified form of STMT itself. */
7351 gimple_seq
7352 rewrite_to_defined_overflow (gimple *stmt)
7354 if (dump_file && (dump_flags & TDF_DETAILS))
7356 fprintf (dump_file, "rewriting stmt with undefined signed "
7357 "overflow ");
7358 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7361 tree lhs = gimple_assign_lhs (stmt);
7362 tree type = unsigned_type_for (TREE_TYPE (lhs));
7363 gimple_seq stmts = NULL;
7364 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7366 tree op = gimple_op (stmt, i);
7367 op = gimple_convert (&stmts, type, op);
7368 gimple_set_op (stmt, i, op);
7370 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7371 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7372 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7373 gimple_seq_add_stmt (&stmts, stmt);
7374 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7375 gimple_seq_add_stmt (&stmts, cvt);
7377 return stmts;
7381 /* The valueization hook we use for the gimple_build API simplification.
7382 This makes us match fold_buildN behavior by only combining with
7383 statements in the sequence(s) we are currently building. */
7385 static tree
7386 gimple_build_valueize (tree op)
7388 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7389 return op;
7390 return NULL_TREE;
7393 /* Build the expression CODE OP0 of type TYPE with location LOC,
7394 simplifying it first if possible. Returns the built
7395 expression value and appends statements possibly defining it
7396 to SEQ. */
7398 tree
7399 gimple_build (gimple_seq *seq, location_t loc,
7400 enum tree_code code, tree type, tree op0)
7402 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7403 if (!res)
7405 res = create_tmp_reg_or_ssa_name (type);
7406 gimple *stmt;
7407 if (code == REALPART_EXPR
7408 || code == IMAGPART_EXPR
7409 || code == VIEW_CONVERT_EXPR)
7410 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7411 else
7412 stmt = gimple_build_assign (res, code, op0);
7413 gimple_set_location (stmt, loc);
7414 gimple_seq_add_stmt_without_update (seq, stmt);
7416 return res;
7419 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7420 simplifying it first if possible. Returns the built
7421 expression value and appends statements possibly defining it
7422 to SEQ. */
7424 tree
7425 gimple_build (gimple_seq *seq, location_t loc,
7426 enum tree_code code, tree type, tree op0, tree op1)
7428 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7429 if (!res)
7431 res = create_tmp_reg_or_ssa_name (type);
7432 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7433 gimple_set_location (stmt, loc);
7434 gimple_seq_add_stmt_without_update (seq, stmt);
7436 return res;
7439 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7440 simplifying it first if possible. Returns the built
7441 expression value and appends statements possibly defining it
7442 to SEQ. */
7444 tree
7445 gimple_build (gimple_seq *seq, location_t loc,
7446 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7448 tree res = gimple_simplify (code, type, op0, op1, op2,
7449 seq, gimple_build_valueize);
7450 if (!res)
7452 res = create_tmp_reg_or_ssa_name (type);
7453 gimple *stmt;
7454 if (code == BIT_FIELD_REF)
7455 stmt = gimple_build_assign (res, code,
7456 build3 (code, type, op0, op1, op2));
7457 else
7458 stmt = gimple_build_assign (res, code, op0, op1, op2);
7459 gimple_set_location (stmt, loc);
7460 gimple_seq_add_stmt_without_update (seq, stmt);
7462 return res;
7465 /* Build the call FN (ARG0) with a result of type TYPE
7466 (or no result if TYPE is void) with location LOC,
7467 simplifying it first if possible. Returns the built
7468 expression value (or NULL_TREE if TYPE is void) and appends
7469 statements possibly defining it to SEQ. */
7471 tree
7472 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7473 tree type, tree arg0)
7475 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7476 if (!res)
7478 gcall *stmt;
7479 if (internal_fn_p (fn))
7480 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7481 else
7483 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7484 stmt = gimple_build_call (decl, 1, arg0);
7486 if (!VOID_TYPE_P (type))
7488 res = create_tmp_reg_or_ssa_name (type);
7489 gimple_call_set_lhs (stmt, res);
7491 gimple_set_location (stmt, loc);
7492 gimple_seq_add_stmt_without_update (seq, stmt);
7494 return res;
7497 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7498 (or no result if TYPE is void) with location LOC,
7499 simplifying it first if possible. Returns the built
7500 expression value (or NULL_TREE if TYPE is void) and appends
7501 statements possibly defining it to SEQ. */
7503 tree
7504 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7505 tree type, tree arg0, tree arg1)
7507 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7508 if (!res)
7510 gcall *stmt;
7511 if (internal_fn_p (fn))
7512 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7513 else
7515 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7516 stmt = gimple_build_call (decl, 2, arg0, arg1);
7518 if (!VOID_TYPE_P (type))
7520 res = create_tmp_reg_or_ssa_name (type);
7521 gimple_call_set_lhs (stmt, res);
7523 gimple_set_location (stmt, loc);
7524 gimple_seq_add_stmt_without_update (seq, stmt);
7526 return res;
7529 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7530 (or no result if TYPE is void) with location LOC,
7531 simplifying it first if possible. Returns the built
7532 expression value (or NULL_TREE if TYPE is void) and appends
7533 statements possibly defining it to SEQ. */
7535 tree
7536 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7537 tree type, tree arg0, tree arg1, tree arg2)
7539 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7540 seq, gimple_build_valueize);
7541 if (!res)
7543 gcall *stmt;
7544 if (internal_fn_p (fn))
7545 stmt = gimple_build_call_internal (as_internal_fn (fn),
7546 3, arg0, arg1, arg2);
7547 else
7549 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7550 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7552 if (!VOID_TYPE_P (type))
7554 res = create_tmp_reg_or_ssa_name (type);
7555 gimple_call_set_lhs (stmt, res);
7557 gimple_set_location (stmt, loc);
7558 gimple_seq_add_stmt_without_update (seq, stmt);
7560 return res;
7563 /* Build the conversion (TYPE) OP with a result of type TYPE
7564 with location LOC if such conversion is neccesary in GIMPLE,
7565 simplifying it first.
7566 Returns the built expression value and appends
7567 statements possibly defining it to SEQ. */
7569 tree
7570 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7572 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7573 return op;
7574 return gimple_build (seq, loc, NOP_EXPR, type, op);
7577 /* Build the conversion (ptrofftype) OP with a result of a type
7578 compatible with ptrofftype with location LOC if such conversion
7579 is neccesary in GIMPLE, simplifying it first.
7580 Returns the built expression value and appends
7581 statements possibly defining it to SEQ. */
7583 tree
7584 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7586 if (ptrofftype_p (TREE_TYPE (op)))
7587 return op;
7588 return gimple_convert (seq, loc, sizetype, op);
7591 /* Build a vector of type TYPE in which each element has the value OP.
7592 Return a gimple value for the result, appending any new statements
7593 to SEQ. */
7595 tree
7596 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7597 tree op)
7599 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7600 && !CONSTANT_CLASS_P (op))
7601 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7603 tree res, vec = build_vector_from_val (type, op);
7604 if (is_gimple_val (vec))
7605 return vec;
7606 if (gimple_in_ssa_p (cfun))
7607 res = make_ssa_name (type);
7608 else
7609 res = create_tmp_reg (type);
7610 gimple *stmt = gimple_build_assign (res, vec);
7611 gimple_set_location (stmt, loc);
7612 gimple_seq_add_stmt_without_update (seq, stmt);
7613 return res;
7616 /* Build a vector from BUILDER, handling the case in which some elements
7617 are non-constant. Return a gimple value for the result, appending any
7618 new instructions to SEQ.
7620 BUILDER must not have a stepped encoding on entry. This is because
7621 the function is not geared up to handle the arithmetic that would
7622 be needed in the variable case, and any code building a vector that
7623 is known to be constant should use BUILDER->build () directly. */
7625 tree
7626 gimple_build_vector (gimple_seq *seq, location_t loc,
7627 tree_vector_builder *builder)
7629 gcc_assert (builder->nelts_per_pattern () <= 2);
7630 unsigned int encoded_nelts = builder->encoded_nelts ();
7631 for (unsigned int i = 0; i < encoded_nelts; ++i)
7632 if (!TREE_CONSTANT ((*builder)[i]))
7634 tree type = builder->type ();
7635 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7636 vec<constructor_elt, va_gc> *v;
7637 vec_alloc (v, nelts);
7638 for (i = 0; i < nelts; ++i)
7639 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7641 tree res;
7642 if (gimple_in_ssa_p (cfun))
7643 res = make_ssa_name (type);
7644 else
7645 res = create_tmp_reg (type);
7646 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7647 gimple_set_location (stmt, loc);
7648 gimple_seq_add_stmt_without_update (seq, stmt);
7649 return res;
7651 return builder->build ();
7654 /* Return true if the result of assignment STMT is known to be non-negative.
7655 If the return value is based on the assumption that signed overflow is
7656 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7657 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7659 static bool
7660 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7661 int depth)
7663 enum tree_code code = gimple_assign_rhs_code (stmt);
7664 switch (get_gimple_rhs_class (code))
7666 case GIMPLE_UNARY_RHS:
7667 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7668 gimple_expr_type (stmt),
7669 gimple_assign_rhs1 (stmt),
7670 strict_overflow_p, depth);
7671 case GIMPLE_BINARY_RHS:
7672 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7673 gimple_expr_type (stmt),
7674 gimple_assign_rhs1 (stmt),
7675 gimple_assign_rhs2 (stmt),
7676 strict_overflow_p, depth);
7677 case GIMPLE_TERNARY_RHS:
7678 return false;
7679 case GIMPLE_SINGLE_RHS:
7680 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7681 strict_overflow_p, depth);
7682 case GIMPLE_INVALID_RHS:
7683 break;
7685 gcc_unreachable ();
7688 /* Return true if return value of call STMT is known to be non-negative.
7689 If the return value is based on the assumption that signed overflow is
7690 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7691 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7693 static bool
7694 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7695 int depth)
7697 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7698 gimple_call_arg (stmt, 0) : NULL_TREE;
7699 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7700 gimple_call_arg (stmt, 1) : NULL_TREE;
7702 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7703 gimple_call_combined_fn (stmt),
7704 arg0,
7705 arg1,
7706 strict_overflow_p, depth);
7709 /* Return true if return value of call STMT is known to be non-negative.
7710 If the return value is based on the assumption that signed overflow is
7711 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7712 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7714 static bool
7715 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7716 int depth)
7718 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7720 tree arg = gimple_phi_arg_def (stmt, i);
7721 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7722 return false;
7724 return true;
7727 /* Return true if STMT is known to compute a non-negative value.
7728 If the return value is based on the assumption that signed overflow is
7729 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7730 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7732 bool
7733 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7734 int depth)
7736 switch (gimple_code (stmt))
7738 case GIMPLE_ASSIGN:
7739 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7740 depth);
7741 case GIMPLE_CALL:
7742 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7743 depth);
7744 case GIMPLE_PHI:
7745 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7746 depth);
7747 default:
7748 return false;
7752 /* Return true if the floating-point value computed by assignment STMT
7753 is known to have an integer value. We also allow +Inf, -Inf and NaN
7754 to be considered integer values. Return false for signaling NaN.
7756 DEPTH is the current nesting depth of the query. */
7758 static bool
7759 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7761 enum tree_code code = gimple_assign_rhs_code (stmt);
7762 switch (get_gimple_rhs_class (code))
7764 case GIMPLE_UNARY_RHS:
7765 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7766 gimple_assign_rhs1 (stmt), depth);
7767 case GIMPLE_BINARY_RHS:
7768 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7769 gimple_assign_rhs1 (stmt),
7770 gimple_assign_rhs2 (stmt), depth);
7771 case GIMPLE_TERNARY_RHS:
7772 return false;
7773 case GIMPLE_SINGLE_RHS:
7774 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7775 case GIMPLE_INVALID_RHS:
7776 break;
7778 gcc_unreachable ();
7781 /* Return true if the floating-point value computed by call STMT is known
7782 to have an integer value. We also allow +Inf, -Inf and NaN to be
7783 considered integer values. Return false for signaling NaN.
7785 DEPTH is the current nesting depth of the query. */
7787 static bool
7788 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7790 tree arg0 = (gimple_call_num_args (stmt) > 0
7791 ? gimple_call_arg (stmt, 0)
7792 : NULL_TREE);
7793 tree arg1 = (gimple_call_num_args (stmt) > 1
7794 ? gimple_call_arg (stmt, 1)
7795 : NULL_TREE);
7796 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7797 arg0, arg1, depth);
7800 /* Return true if the floating-point result of phi STMT is known to have
7801 an integer value. We also allow +Inf, -Inf and NaN to be considered
7802 integer values. Return false for signaling NaN.
7804 DEPTH is the current nesting depth of the query. */
7806 static bool
7807 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7809 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7811 tree arg = gimple_phi_arg_def (stmt, i);
7812 if (!integer_valued_real_single_p (arg, depth + 1))
7813 return false;
7815 return true;
7818 /* Return true if the floating-point value computed by STMT is known
7819 to have an integer value. We also allow +Inf, -Inf and NaN to be
7820 considered integer values. Return false for signaling NaN.
7822 DEPTH is the current nesting depth of the query. */
7824 bool
7825 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7827 switch (gimple_code (stmt))
7829 case GIMPLE_ASSIGN:
7830 return gimple_assign_integer_valued_real_p (stmt, depth);
7831 case GIMPLE_CALL:
7832 return gimple_call_integer_valued_real_p (stmt, depth);
7833 case GIMPLE_PHI:
7834 return gimple_phi_integer_valued_real_p (stmt, depth);
7835 default:
7836 return false;