[Ada] Add special bypass for obsolete code pattern
[official-gcc.git] / gcc / gimple-fold.c
blobfc57fb45e3ac080cf2e36b471ce48227dc8a91f7
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2019 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
69 enum strlen_range_kind {
70 /* Compute the exact constant string length. */
71 SRK_STRLEN,
72 /* Compute the maximum constant string length. */
73 SRK_STRLENMAX,
74 /* Compute a range of string lengths bounded by object sizes. When
75 the length of a string cannot be determined, consider as the upper
76 bound the size of the enclosing object the string may be a member
77 or element of. Also determine the size of the largest character
78 array the string may refer to. */
79 SRK_LENRANGE,
80 /* Determine the integer value of the argument (not string length). */
81 SRK_INT_VALUE
84 static bool
85 get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
87 /* Return true when DECL can be referenced from current unit.
88 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
89 We can get declarations that are not possible to reference for various
90 reasons:
92 1) When analyzing C++ virtual tables.
93 C++ virtual tables do have known constructors even
94 when they are keyed to other compilation unit.
95 Those tables can contain pointers to methods and vars
96 in other units. Those methods have both STATIC and EXTERNAL
97 set.
98 2) In WHOPR mode devirtualization might lead to reference
99 to method that was partitioned elsehwere.
100 In this case we have static VAR_DECL or FUNCTION_DECL
101 that has no corresponding callgraph/varpool node
102 declaring the body.
103 3) COMDAT functions referred by external vtables that
104 we devirtualize only during final compilation stage.
105 At this time we already decided that we will not output
106 the function body and thus we can't reference the symbol
107 directly. */
109 static bool
110 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
112 varpool_node *vnode;
113 struct cgraph_node *node;
114 symtab_node *snode;
116 if (DECL_ABSTRACT_P (decl))
117 return false;
119 /* We are concerned only about static/external vars and functions. */
120 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
121 || !VAR_OR_FUNCTION_DECL_P (decl))
122 return true;
124 /* Static objects can be referred only if they are defined and not optimized
125 out yet. */
126 if (!TREE_PUBLIC (decl))
128 if (DECL_EXTERNAL (decl))
129 return false;
130 /* Before we start optimizing unreachable code we can be sure all
131 static objects are defined. */
132 if (symtab->function_flags_ready)
133 return true;
134 snode = symtab_node::get (decl);
135 if (!snode || !snode->definition)
136 return false;
137 node = dyn_cast <cgraph_node *> (snode);
138 return !node || !node->global.inlined_to;
141 /* We will later output the initializer, so we can refer to it.
142 So we are concerned only when DECL comes from initializer of
143 external var or var that has been optimized out. */
144 if (!from_decl
145 || !VAR_P (from_decl)
146 || (!DECL_EXTERNAL (from_decl)
147 && (vnode = varpool_node::get (from_decl)) != NULL
148 && vnode->definition)
149 || (flag_ltrans
150 && (vnode = varpool_node::get (from_decl)) != NULL
151 && vnode->in_other_partition))
152 return true;
153 /* We are folding reference from external vtable. The vtable may reffer
154 to a symbol keyed to other compilation unit. The other compilation
155 unit may be in separate DSO and the symbol may be hidden. */
156 if (DECL_VISIBILITY_SPECIFIED (decl)
157 && DECL_EXTERNAL (decl)
158 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
159 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
160 return false;
161 /* When function is public, we always can introduce new reference.
162 Exception are the COMDAT functions where introducing a direct
163 reference imply need to include function body in the curren tunit. */
164 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
165 return true;
166 /* We have COMDAT. We are going to check if we still have definition
167 or if the definition is going to be output in other partition.
168 Bypass this when gimplifying; all needed functions will be produced.
170 As observed in PR20991 for already optimized out comdat virtual functions
171 it may be tempting to not necessarily give up because the copy will be
172 output elsewhere when corresponding vtable is output.
173 This is however not possible - ABI specify that COMDATs are output in
174 units where they are used and when the other unit was compiled with LTO
175 it is possible that vtable was kept public while the function itself
176 was privatized. */
177 if (!symtab->function_flags_ready)
178 return true;
180 snode = symtab_node::get (decl);
181 if (!snode
182 || ((!snode->definition || DECL_EXTERNAL (decl))
183 && (!snode->in_other_partition
184 || (!snode->forced_by_abi && !snode->force_output))))
185 return false;
186 node = dyn_cast <cgraph_node *> (snode);
187 return !node || !node->global.inlined_to;
190 /* Create a temporary for TYPE for a statement STMT. If the current function
191 is in SSA form, a SSA name is created. Otherwise a temporary register
192 is made. */
194 tree
195 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
197 if (gimple_in_ssa_p (cfun))
198 return make_ssa_name (type, stmt);
199 else
200 return create_tmp_reg (type);
203 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
204 acceptable form for is_gimple_min_invariant.
205 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
207 tree
208 canonicalize_constructor_val (tree cval, tree from_decl)
210 if (CONSTANT_CLASS_P (cval))
211 return cval;
213 tree orig_cval = cval;
214 STRIP_NOPS (cval);
215 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
216 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
218 tree ptr = TREE_OPERAND (cval, 0);
219 if (is_gimple_min_invariant (ptr))
220 cval = build1_loc (EXPR_LOCATION (cval),
221 ADDR_EXPR, TREE_TYPE (ptr),
222 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
223 ptr,
224 fold_convert (ptr_type_node,
225 TREE_OPERAND (cval, 1))));
227 if (TREE_CODE (cval) == ADDR_EXPR)
229 tree base = NULL_TREE;
230 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
232 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
233 if (base)
234 TREE_OPERAND (cval, 0) = base;
236 else
237 base = get_base_address (TREE_OPERAND (cval, 0));
238 if (!base)
239 return NULL_TREE;
241 if (VAR_OR_FUNCTION_DECL_P (base)
242 && !can_refer_decl_in_current_unit_p (base, from_decl))
243 return NULL_TREE;
244 if (TREE_TYPE (base) == error_mark_node)
245 return NULL_TREE;
246 if (VAR_P (base))
247 TREE_ADDRESSABLE (base) = 1;
248 else if (TREE_CODE (base) == FUNCTION_DECL)
250 /* Make sure we create a cgraph node for functions we'll reference.
251 They can be non-existent if the reference comes from an entry
252 of an external vtable for example. */
253 cgraph_node::get_create (base);
255 /* Fixup types in global initializers. */
256 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
257 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
259 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
260 cval = fold_convert (TREE_TYPE (orig_cval), cval);
261 return cval;
263 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
264 if (TREE_CODE (cval) == INTEGER_CST)
266 if (TREE_OVERFLOW_P (cval))
267 cval = drop_tree_overflow (cval);
268 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
269 cval = fold_convert (TREE_TYPE (orig_cval), cval);
270 return cval;
272 return orig_cval;
275 /* If SYM is a constant variable with known value, return the value.
276 NULL_TREE is returned otherwise. */
278 tree
279 get_symbol_constant_value (tree sym)
281 tree val = ctor_for_folding (sym);
282 if (val != error_mark_node)
284 if (val)
286 val = canonicalize_constructor_val (unshare_expr (val), sym);
287 if (val && is_gimple_min_invariant (val))
288 return val;
289 else
290 return NULL_TREE;
292 /* Variables declared 'const' without an initializer
293 have zero as the initializer if they may not be
294 overridden at link or run time. */
295 if (!val
296 && is_gimple_reg_type (TREE_TYPE (sym)))
297 return build_zero_cst (TREE_TYPE (sym));
300 return NULL_TREE;
305 /* Subroutine of fold_stmt. We perform several simplifications of the
306 memory reference tree EXPR and make sure to re-gimplify them properly
307 after propagation of constant addresses. IS_LHS is true if the
308 reference is supposed to be an lvalue. */
310 static tree
311 maybe_fold_reference (tree expr, bool is_lhs)
313 tree result;
315 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
316 || TREE_CODE (expr) == REALPART_EXPR
317 || TREE_CODE (expr) == IMAGPART_EXPR)
318 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
319 return fold_unary_loc (EXPR_LOCATION (expr),
320 TREE_CODE (expr),
321 TREE_TYPE (expr),
322 TREE_OPERAND (expr, 0));
323 else if (TREE_CODE (expr) == BIT_FIELD_REF
324 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
325 return fold_ternary_loc (EXPR_LOCATION (expr),
326 TREE_CODE (expr),
327 TREE_TYPE (expr),
328 TREE_OPERAND (expr, 0),
329 TREE_OPERAND (expr, 1),
330 TREE_OPERAND (expr, 2));
332 if (!is_lhs
333 && (result = fold_const_aggregate_ref (expr))
334 && is_gimple_min_invariant (result))
335 return result;
337 return NULL_TREE;
341 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
342 replacement rhs for the statement or NULL_TREE if no simplification
343 could be made. It is assumed that the operands have been previously
344 folded. */
346 static tree
347 fold_gimple_assign (gimple_stmt_iterator *si)
349 gimple *stmt = gsi_stmt (*si);
350 enum tree_code subcode = gimple_assign_rhs_code (stmt);
351 location_t loc = gimple_location (stmt);
353 tree result = NULL_TREE;
355 switch (get_gimple_rhs_class (subcode))
357 case GIMPLE_SINGLE_RHS:
359 tree rhs = gimple_assign_rhs1 (stmt);
361 if (TREE_CLOBBER_P (rhs))
362 return NULL_TREE;
364 if (REFERENCE_CLASS_P (rhs))
365 return maybe_fold_reference (rhs, false);
367 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
369 tree val = OBJ_TYPE_REF_EXPR (rhs);
370 if (is_gimple_min_invariant (val))
371 return val;
372 else if (flag_devirtualize && virtual_method_call_p (rhs))
374 bool final;
375 vec <cgraph_node *>targets
376 = possible_polymorphic_call_targets (rhs, stmt, &final);
377 if (final && targets.length () <= 1 && dbg_cnt (devirt))
379 if (dump_enabled_p ())
381 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
382 "resolving virtual function address "
383 "reference to function %s\n",
384 targets.length () == 1
385 ? targets[0]->name ()
386 : "NULL");
388 if (targets.length () == 1)
390 val = fold_convert (TREE_TYPE (val),
391 build_fold_addr_expr_loc
392 (loc, targets[0]->decl));
393 STRIP_USELESS_TYPE_CONVERSION (val);
395 else
396 /* We cannot use __builtin_unreachable here because it
397 cannot have address taken. */
398 val = build_int_cst (TREE_TYPE (val), 0);
399 return val;
404 else if (TREE_CODE (rhs) == ADDR_EXPR)
406 tree ref = TREE_OPERAND (rhs, 0);
407 tree tem = maybe_fold_reference (ref, true);
408 if (tem
409 && TREE_CODE (tem) == MEM_REF
410 && integer_zerop (TREE_OPERAND (tem, 1)))
411 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
412 else if (tem)
413 result = fold_convert (TREE_TYPE (rhs),
414 build_fold_addr_expr_loc (loc, tem));
415 else if (TREE_CODE (ref) == MEM_REF
416 && integer_zerop (TREE_OPERAND (ref, 1)))
417 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
419 if (result)
421 /* Strip away useless type conversions. Both the
422 NON_LVALUE_EXPR that may have been added by fold, and
423 "useless" type conversions that might now be apparent
424 due to propagation. */
425 STRIP_USELESS_TYPE_CONVERSION (result);
427 if (result != rhs && valid_gimple_rhs_p (result))
428 return result;
432 else if (TREE_CODE (rhs) == CONSTRUCTOR
433 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
435 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
436 unsigned i;
437 tree val;
439 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
440 if (! CONSTANT_CLASS_P (val))
441 return NULL_TREE;
443 return build_vector_from_ctor (TREE_TYPE (rhs),
444 CONSTRUCTOR_ELTS (rhs));
447 else if (DECL_P (rhs))
448 return get_symbol_constant_value (rhs);
450 break;
452 case GIMPLE_UNARY_RHS:
453 break;
455 case GIMPLE_BINARY_RHS:
456 break;
458 case GIMPLE_TERNARY_RHS:
459 result = fold_ternary_loc (loc, subcode,
460 TREE_TYPE (gimple_assign_lhs (stmt)),
461 gimple_assign_rhs1 (stmt),
462 gimple_assign_rhs2 (stmt),
463 gimple_assign_rhs3 (stmt));
465 if (result)
467 STRIP_USELESS_TYPE_CONVERSION (result);
468 if (valid_gimple_rhs_p (result))
469 return result;
471 break;
473 case GIMPLE_INVALID_RHS:
474 gcc_unreachable ();
477 return NULL_TREE;
481 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
482 adjusting the replacement stmts location and virtual operands.
483 If the statement has a lhs the last stmt in the sequence is expected
484 to assign to that lhs. */
486 static void
487 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
489 gimple *stmt = gsi_stmt (*si_p);
491 if (gimple_has_location (stmt))
492 annotate_all_with_location (stmts, gimple_location (stmt));
494 /* First iterate over the replacement statements backward, assigning
495 virtual operands to their defining statements. */
496 gimple *laststore = NULL;
497 for (gimple_stmt_iterator i = gsi_last (stmts);
498 !gsi_end_p (i); gsi_prev (&i))
500 gimple *new_stmt = gsi_stmt (i);
501 if ((gimple_assign_single_p (new_stmt)
502 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
503 || (is_gimple_call (new_stmt)
504 && (gimple_call_flags (new_stmt)
505 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
507 tree vdef;
508 if (!laststore)
509 vdef = gimple_vdef (stmt);
510 else
511 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
512 gimple_set_vdef (new_stmt, vdef);
513 if (vdef && TREE_CODE (vdef) == SSA_NAME)
514 SSA_NAME_DEF_STMT (vdef) = new_stmt;
515 laststore = new_stmt;
519 /* Second iterate over the statements forward, assigning virtual
520 operands to their uses. */
521 tree reaching_vuse = gimple_vuse (stmt);
522 for (gimple_stmt_iterator i = gsi_start (stmts);
523 !gsi_end_p (i); gsi_next (&i))
525 gimple *new_stmt = gsi_stmt (i);
526 /* If the new statement possibly has a VUSE, update it with exact SSA
527 name we know will reach this one. */
528 if (gimple_has_mem_ops (new_stmt))
529 gimple_set_vuse (new_stmt, reaching_vuse);
530 gimple_set_modified (new_stmt, true);
531 if (gimple_vdef (new_stmt))
532 reaching_vuse = gimple_vdef (new_stmt);
535 /* If the new sequence does not do a store release the virtual
536 definition of the original statement. */
537 if (reaching_vuse
538 && reaching_vuse == gimple_vuse (stmt))
540 tree vdef = gimple_vdef (stmt);
541 if (vdef
542 && TREE_CODE (vdef) == SSA_NAME)
544 unlink_stmt_vdef (stmt);
545 release_ssa_name (vdef);
549 /* Finally replace the original statement with the sequence. */
550 gsi_replace_with_seq (si_p, stmts, false);
553 /* Convert EXPR into a GIMPLE value suitable for substitution on the
554 RHS of an assignment. Insert the necessary statements before
555 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
556 is replaced. If the call is expected to produces a result, then it
557 is replaced by an assignment of the new RHS to the result variable.
558 If the result is to be ignored, then the call is replaced by a
559 GIMPLE_NOP. A proper VDEF chain is retained by making the first
560 VUSE and the last VDEF of the whole sequence be the same as the replaced
561 statement and using new SSA names for stores in between. */
563 void
564 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
566 tree lhs;
567 gimple *stmt, *new_stmt;
568 gimple_stmt_iterator i;
569 gimple_seq stmts = NULL;
571 stmt = gsi_stmt (*si_p);
573 gcc_assert (is_gimple_call (stmt));
575 push_gimplify_context (gimple_in_ssa_p (cfun));
577 lhs = gimple_call_lhs (stmt);
578 if (lhs == NULL_TREE)
580 gimplify_and_add (expr, &stmts);
581 /* We can end up with folding a memcpy of an empty class assignment
582 which gets optimized away by C++ gimplification. */
583 if (gimple_seq_empty_p (stmts))
585 pop_gimplify_context (NULL);
586 if (gimple_in_ssa_p (cfun))
588 unlink_stmt_vdef (stmt);
589 release_defs (stmt);
591 gsi_replace (si_p, gimple_build_nop (), false);
592 return;
595 else
597 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
598 new_stmt = gimple_build_assign (lhs, tmp);
599 i = gsi_last (stmts);
600 gsi_insert_after_without_update (&i, new_stmt,
601 GSI_CONTINUE_LINKING);
604 pop_gimplify_context (NULL);
606 gsi_replace_with_seq_vops (si_p, stmts);
610 /* Replace the call at *GSI with the gimple value VAL. */
612 void
613 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
615 gimple *stmt = gsi_stmt (*gsi);
616 tree lhs = gimple_call_lhs (stmt);
617 gimple *repl;
618 if (lhs)
620 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
621 val = fold_convert (TREE_TYPE (lhs), val);
622 repl = gimple_build_assign (lhs, val);
624 else
625 repl = gimple_build_nop ();
626 tree vdef = gimple_vdef (stmt);
627 if (vdef && TREE_CODE (vdef) == SSA_NAME)
629 unlink_stmt_vdef (stmt);
630 release_ssa_name (vdef);
632 gsi_replace (gsi, repl, false);
635 /* Replace the call at *GSI with the new call REPL and fold that
636 again. */
638 static void
639 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
641 gimple *stmt = gsi_stmt (*gsi);
642 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
643 gimple_set_location (repl, gimple_location (stmt));
644 gimple_move_vops (repl, stmt);
645 gsi_replace (gsi, repl, false);
646 fold_stmt (gsi);
649 /* Return true if VAR is a VAR_DECL or a component thereof. */
651 static bool
652 var_decl_component_p (tree var)
654 tree inner = var;
655 while (handled_component_p (inner))
656 inner = TREE_OPERAND (inner, 0);
657 return (DECL_P (inner)
658 || (TREE_CODE (inner) == MEM_REF
659 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
662 /* Return TRUE if the SIZE argument, representing the size of an
663 object, is in a range of values of which exactly zero is valid. */
665 static bool
666 size_must_be_zero_p (tree size)
668 if (integer_zerop (size))
669 return true;
671 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
672 return false;
674 tree type = TREE_TYPE (size);
675 int prec = TYPE_PRECISION (type);
677 /* Compute the value of SSIZE_MAX, the largest positive value that
678 can be stored in ssize_t, the signed counterpart of size_t. */
679 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
680 value_range_base valid_range (VR_RANGE,
681 build_int_cst (type, 0),
682 wide_int_to_tree (type, ssize_max));
683 value_range_base vr;
684 get_range_info (size, vr);
685 vr.intersect (&valid_range);
686 return vr.zero_p ();
689 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
690 diagnose (otherwise undefined) overlapping copies without preventing
691 folding. When folded, GCC guarantees that overlapping memcpy has
692 the same semantics as memmove. Call to the library memcpy need not
693 provide the same guarantee. Return false if no simplification can
694 be made. */
696 static bool
697 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
698 tree dest, tree src, enum built_in_function code)
700 gimple *stmt = gsi_stmt (*gsi);
701 tree lhs = gimple_call_lhs (stmt);
702 tree len = gimple_call_arg (stmt, 2);
703 tree destvar, srcvar;
704 location_t loc = gimple_location (stmt);
706 /* If the LEN parameter is a constant zero or in range where
707 the only valid value is zero, return DEST. */
708 if (size_must_be_zero_p (len))
710 gimple *repl;
711 if (gimple_call_lhs (stmt))
712 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
713 else
714 repl = gimple_build_nop ();
715 tree vdef = gimple_vdef (stmt);
716 if (vdef && TREE_CODE (vdef) == SSA_NAME)
718 unlink_stmt_vdef (stmt);
719 release_ssa_name (vdef);
721 gsi_replace (gsi, repl, false);
722 return true;
725 /* If SRC and DEST are the same (and not volatile), return
726 DEST{,+LEN,+LEN-1}. */
727 if (operand_equal_p (src, dest, 0))
729 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
730 It's safe and may even be emitted by GCC itself (see bug
731 32667). */
732 unlink_stmt_vdef (stmt);
733 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
734 release_ssa_name (gimple_vdef (stmt));
735 if (!lhs)
737 gsi_replace (gsi, gimple_build_nop (), false);
738 return true;
740 goto done;
742 else
744 tree srctype, desttype;
745 unsigned int src_align, dest_align;
746 tree off0;
747 const char *tmp_str;
748 unsigned HOST_WIDE_INT tmp_len;
750 /* Build accesses at offset zero with a ref-all character type. */
751 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
752 ptr_mode, true), 0);
754 /* If we can perform the copy efficiently with first doing all loads
755 and then all stores inline it that way. Currently efficiently
756 means that we can load all the memory into a single integer
757 register which is what MOVE_MAX gives us. */
758 src_align = get_pointer_alignment (src);
759 dest_align = get_pointer_alignment (dest);
760 if (tree_fits_uhwi_p (len)
761 && compare_tree_int (len, MOVE_MAX) <= 0
762 /* ??? Don't transform copies from strings with known length this
763 confuses the tree-ssa-strlen.c. This doesn't handle
764 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
765 reason. */
766 && !c_strlen (src, 2)
767 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
768 && memchr (tmp_str, 0, tmp_len) == NULL))
770 unsigned ilen = tree_to_uhwi (len);
771 if (pow2p_hwi (ilen))
773 /* Detect out-of-bounds accesses without issuing warnings.
774 Avoid folding out-of-bounds copies but to avoid false
775 positives for unreachable code defer warning until after
776 DCE has worked its magic.
777 -Wrestrict is still diagnosed. */
778 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
779 dest, src, len, len,
780 false, false))
781 if (warning != OPT_Wrestrict)
782 return false;
784 scalar_int_mode mode;
785 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
786 if (type
787 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
788 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
789 /* If the destination pointer is not aligned we must be able
790 to emit an unaligned store. */
791 && (dest_align >= GET_MODE_ALIGNMENT (mode)
792 || !targetm.slow_unaligned_access (mode, dest_align)
793 || (optab_handler (movmisalign_optab, mode)
794 != CODE_FOR_nothing)))
796 tree srctype = type;
797 tree desttype = type;
798 if (src_align < GET_MODE_ALIGNMENT (mode))
799 srctype = build_aligned_type (type, src_align);
800 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
801 tree tem = fold_const_aggregate_ref (srcmem);
802 if (tem)
803 srcmem = tem;
804 else if (src_align < GET_MODE_ALIGNMENT (mode)
805 && targetm.slow_unaligned_access (mode, src_align)
806 && (optab_handler (movmisalign_optab, mode)
807 == CODE_FOR_nothing))
808 srcmem = NULL_TREE;
809 if (srcmem)
811 gimple *new_stmt;
812 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
814 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
815 srcmem
816 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
817 new_stmt);
818 gimple_assign_set_lhs (new_stmt, srcmem);
819 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
820 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
822 if (dest_align < GET_MODE_ALIGNMENT (mode))
823 desttype = build_aligned_type (type, dest_align);
824 new_stmt
825 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
826 dest, off0),
827 srcmem);
828 gimple_move_vops (new_stmt, stmt);
829 if (!lhs)
831 gsi_replace (gsi, new_stmt, false);
832 return true;
834 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
835 goto done;
841 if (code == BUILT_IN_MEMMOVE)
843 /* Both DEST and SRC must be pointer types.
844 ??? This is what old code did. Is the testing for pointer types
845 really mandatory?
847 If either SRC is readonly or length is 1, we can use memcpy. */
848 if (!dest_align || !src_align)
849 return false;
850 if (readonly_data_expr (src)
851 || (tree_fits_uhwi_p (len)
852 && (MIN (src_align, dest_align) / BITS_PER_UNIT
853 >= tree_to_uhwi (len))))
855 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
856 if (!fn)
857 return false;
858 gimple_call_set_fndecl (stmt, fn);
859 gimple_call_set_arg (stmt, 0, dest);
860 gimple_call_set_arg (stmt, 1, src);
861 fold_stmt (gsi);
862 return true;
865 /* If *src and *dest can't overlap, optimize into memcpy as well. */
866 if (TREE_CODE (src) == ADDR_EXPR
867 && TREE_CODE (dest) == ADDR_EXPR)
869 tree src_base, dest_base, fn;
870 poly_int64 src_offset = 0, dest_offset = 0;
871 poly_uint64 maxsize;
873 srcvar = TREE_OPERAND (src, 0);
874 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
875 if (src_base == NULL)
876 src_base = srcvar;
877 destvar = TREE_OPERAND (dest, 0);
878 dest_base = get_addr_base_and_unit_offset (destvar,
879 &dest_offset);
880 if (dest_base == NULL)
881 dest_base = destvar;
882 if (!poly_int_tree_p (len, &maxsize))
883 maxsize = -1;
884 if (SSA_VAR_P (src_base)
885 && SSA_VAR_P (dest_base))
887 if (operand_equal_p (src_base, dest_base, 0)
888 && ranges_maybe_overlap_p (src_offset, maxsize,
889 dest_offset, maxsize))
890 return false;
892 else if (TREE_CODE (src_base) == MEM_REF
893 && TREE_CODE (dest_base) == MEM_REF)
895 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
896 TREE_OPERAND (dest_base, 0), 0))
897 return false;
898 poly_offset_int full_src_offset
899 = mem_ref_offset (src_base) + src_offset;
900 poly_offset_int full_dest_offset
901 = mem_ref_offset (dest_base) + dest_offset;
902 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
903 full_dest_offset, maxsize))
904 return false;
906 else
907 return false;
909 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
910 if (!fn)
911 return false;
912 gimple_call_set_fndecl (stmt, fn);
913 gimple_call_set_arg (stmt, 0, dest);
914 gimple_call_set_arg (stmt, 1, src);
915 fold_stmt (gsi);
916 return true;
919 /* If the destination and source do not alias optimize into
920 memcpy as well. */
921 if ((is_gimple_min_invariant (dest)
922 || TREE_CODE (dest) == SSA_NAME)
923 && (is_gimple_min_invariant (src)
924 || TREE_CODE (src) == SSA_NAME))
926 ao_ref destr, srcr;
927 ao_ref_init_from_ptr_and_size (&destr, dest, len);
928 ao_ref_init_from_ptr_and_size (&srcr, src, len);
929 if (!refs_may_alias_p_1 (&destr, &srcr, false))
931 tree fn;
932 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
933 if (!fn)
934 return false;
935 gimple_call_set_fndecl (stmt, fn);
936 gimple_call_set_arg (stmt, 0, dest);
937 gimple_call_set_arg (stmt, 1, src);
938 fold_stmt (gsi);
939 return true;
943 return false;
946 if (!tree_fits_shwi_p (len))
947 return false;
948 if (!POINTER_TYPE_P (TREE_TYPE (src))
949 || !POINTER_TYPE_P (TREE_TYPE (dest)))
950 return false;
951 /* In the following try to find a type that is most natural to be
952 used for the memcpy source and destination and that allows
953 the most optimization when memcpy is turned into a plain assignment
954 using that type. In theory we could always use a char[len] type
955 but that only gains us that the destination and source possibly
956 no longer will have their address taken. */
957 srctype = TREE_TYPE (TREE_TYPE (src));
958 if (TREE_CODE (srctype) == ARRAY_TYPE
959 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
960 srctype = TREE_TYPE (srctype);
961 desttype = TREE_TYPE (TREE_TYPE (dest));
962 if (TREE_CODE (desttype) == ARRAY_TYPE
963 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
964 desttype = TREE_TYPE (desttype);
965 if (TREE_ADDRESSABLE (srctype)
966 || TREE_ADDRESSABLE (desttype))
967 return false;
969 /* Make sure we are not copying using a floating-point mode or
970 a type whose size possibly does not match its precision. */
971 if (FLOAT_MODE_P (TYPE_MODE (desttype))
972 || TREE_CODE (desttype) == BOOLEAN_TYPE
973 || TREE_CODE (desttype) == ENUMERAL_TYPE)
974 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
975 if (FLOAT_MODE_P (TYPE_MODE (srctype))
976 || TREE_CODE (srctype) == BOOLEAN_TYPE
977 || TREE_CODE (srctype) == ENUMERAL_TYPE)
978 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
979 if (!srctype)
980 srctype = desttype;
981 if (!desttype)
982 desttype = srctype;
983 if (!srctype)
984 return false;
986 src_align = get_pointer_alignment (src);
987 dest_align = get_pointer_alignment (dest);
988 if (dest_align < TYPE_ALIGN (desttype)
989 || src_align < TYPE_ALIGN (srctype))
990 return false;
992 destvar = NULL_TREE;
993 if (TREE_CODE (dest) == ADDR_EXPR
994 && var_decl_component_p (TREE_OPERAND (dest, 0))
995 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
996 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
998 srcvar = NULL_TREE;
999 if (TREE_CODE (src) == ADDR_EXPR
1000 && var_decl_component_p (TREE_OPERAND (src, 0))
1001 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1003 if (!destvar
1004 || src_align >= TYPE_ALIGN (desttype))
1005 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1006 src, off0);
1007 else if (!STRICT_ALIGNMENT)
1009 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1010 src_align);
1011 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1015 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1016 return false;
1018 if (srcvar == NULL_TREE)
1020 if (src_align >= TYPE_ALIGN (desttype))
1021 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1022 else
1024 if (STRICT_ALIGNMENT)
1025 return false;
1026 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1027 src_align);
1028 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1031 else if (destvar == NULL_TREE)
1033 if (dest_align >= TYPE_ALIGN (srctype))
1034 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1035 else
1037 if (STRICT_ALIGNMENT)
1038 return false;
1039 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1040 dest_align);
1041 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1045 /* Same as above, detect out-of-bounds accesses without issuing
1046 warnings. Avoid folding out-of-bounds copies but to avoid
1047 false positives for unreachable code defer warning until
1048 after DCE has worked its magic.
1049 -Wrestrict is still diagnosed. */
1050 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1051 dest, src, len, len,
1052 false, false))
1053 if (warning != OPT_Wrestrict)
1054 return false;
1056 gimple *new_stmt;
1057 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1059 tree tem = fold_const_aggregate_ref (srcvar);
1060 if (tem)
1061 srcvar = tem;
1062 if (! is_gimple_min_invariant (srcvar))
1064 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1065 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1066 new_stmt);
1067 gimple_assign_set_lhs (new_stmt, srcvar);
1068 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1069 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1071 new_stmt = gimple_build_assign (destvar, srcvar);
1072 goto set_vop_and_replace;
1075 /* We get an aggregate copy. Use an unsigned char[] type to
1076 perform the copying to preserve padding and to avoid any issues
1077 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1078 desttype = build_array_type_nelts (unsigned_char_type_node,
1079 tree_to_uhwi (len));
1080 srctype = desttype;
1081 if (src_align > TYPE_ALIGN (srctype))
1082 srctype = build_aligned_type (srctype, src_align);
1083 if (dest_align > TYPE_ALIGN (desttype))
1084 desttype = build_aligned_type (desttype, dest_align);
1085 new_stmt
1086 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1087 fold_build2 (MEM_REF, srctype, src, off0));
1088 set_vop_and_replace:
1089 gimple_move_vops (new_stmt, stmt);
1090 if (!lhs)
1092 gsi_replace (gsi, new_stmt, false);
1093 return true;
1095 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1098 done:
1099 gimple_seq stmts = NULL;
1100 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1101 len = NULL_TREE;
1102 else if (code == BUILT_IN_MEMPCPY)
1104 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1105 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1106 TREE_TYPE (dest), dest, len);
1108 else
1109 gcc_unreachable ();
1111 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1112 gimple *repl = gimple_build_assign (lhs, dest);
1113 gsi_replace (gsi, repl, false);
1114 return true;
1117 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1118 to built-in memcmp (a, b, len). */
1120 static bool
1121 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1123 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1125 if (!fn)
1126 return false;
1128 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1130 gimple *stmt = gsi_stmt (*gsi);
1131 tree a = gimple_call_arg (stmt, 0);
1132 tree b = gimple_call_arg (stmt, 1);
1133 tree len = gimple_call_arg (stmt, 2);
1135 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1136 replace_call_with_call_and_fold (gsi, repl);
1138 return true;
1141 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1142 to built-in memmove (dest, src, len). */
1144 static bool
1145 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1147 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1149 if (!fn)
1150 return false;
1152 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1153 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1154 len) into memmove (dest, src, len). */
1156 gimple *stmt = gsi_stmt (*gsi);
1157 tree src = gimple_call_arg (stmt, 0);
1158 tree dest = gimple_call_arg (stmt, 1);
1159 tree len = gimple_call_arg (stmt, 2);
1161 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1162 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1163 replace_call_with_call_and_fold (gsi, repl);
1165 return true;
1168 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1169 to built-in memset (dest, 0, len). */
1171 static bool
1172 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1174 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1176 if (!fn)
1177 return false;
1179 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1181 gimple *stmt = gsi_stmt (*gsi);
1182 tree dest = gimple_call_arg (stmt, 0);
1183 tree len = gimple_call_arg (stmt, 1);
1185 gimple_seq seq = NULL;
1186 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1187 gimple_seq_add_stmt_without_update (&seq, repl);
1188 gsi_replace_with_seq_vops (gsi, seq);
1189 fold_stmt (gsi);
1191 return true;
1194 /* Fold function call to builtin memset or bzero at *GSI setting the
1195 memory of size LEN to VAL. Return whether a simplification was made. */
1197 static bool
1198 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1200 gimple *stmt = gsi_stmt (*gsi);
1201 tree etype;
1202 unsigned HOST_WIDE_INT length, cval;
1204 /* If the LEN parameter is zero, return DEST. */
1205 if (integer_zerop (len))
1207 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1208 return true;
1211 if (! tree_fits_uhwi_p (len))
1212 return false;
1214 if (TREE_CODE (c) != INTEGER_CST)
1215 return false;
1217 tree dest = gimple_call_arg (stmt, 0);
1218 tree var = dest;
1219 if (TREE_CODE (var) != ADDR_EXPR)
1220 return false;
1222 var = TREE_OPERAND (var, 0);
1223 if (TREE_THIS_VOLATILE (var))
1224 return false;
1226 etype = TREE_TYPE (var);
1227 if (TREE_CODE (etype) == ARRAY_TYPE)
1228 etype = TREE_TYPE (etype);
1230 if (!INTEGRAL_TYPE_P (etype)
1231 && !POINTER_TYPE_P (etype))
1232 return NULL_TREE;
1234 if (! var_decl_component_p (var))
1235 return NULL_TREE;
1237 length = tree_to_uhwi (len);
1238 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1239 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1240 return NULL_TREE;
1242 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1243 return NULL_TREE;
1245 if (integer_zerop (c))
1246 cval = 0;
1247 else
1249 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1250 return NULL_TREE;
1252 cval = TREE_INT_CST_LOW (c);
1253 cval &= 0xff;
1254 cval |= cval << 8;
1255 cval |= cval << 16;
1256 cval |= (cval << 31) << 1;
1259 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1260 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1261 gimple_move_vops (store, stmt);
1262 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1263 if (gimple_call_lhs (stmt))
1265 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1266 gsi_replace (gsi, asgn, false);
1268 else
1270 gimple_stmt_iterator gsi2 = *gsi;
1271 gsi_prev (gsi);
1272 gsi_remove (&gsi2, true);
1275 return true;
1278 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1280 static bool
1281 get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1282 c_strlen_data *pdata, unsigned eltsize)
1284 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1286 /* The length computed by this invocation of the function. */
1287 tree val = NULL_TREE;
1289 /* True if VAL is an optimistic (tight) bound determined from
1290 the size of the character array in which the string may be
1291 stored. In that case, the computed VAL is used to set
1292 PDATA->MAXBOUND. */
1293 bool tight_bound = false;
1295 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1296 if (TREE_CODE (arg) == ADDR_EXPR
1297 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1299 tree op = TREE_OPERAND (arg, 0);
1300 if (integer_zerop (TREE_OPERAND (op, 1)))
1302 tree aop0 = TREE_OPERAND (op, 0);
1303 if (TREE_CODE (aop0) == INDIRECT_REF
1304 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1305 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1306 pdata, eltsize);
1308 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1309 && rkind == SRK_LENRANGE)
1311 /* Fail if an array is the last member of a struct object
1312 since it could be treated as a (fake) flexible array
1313 member. */
1314 tree idx = TREE_OPERAND (op, 1);
1316 arg = TREE_OPERAND (op, 0);
1317 tree optype = TREE_TYPE (arg);
1318 if (tree dom = TYPE_DOMAIN (optype))
1319 if (tree bound = TYPE_MAX_VALUE (dom))
1320 if (TREE_CODE (bound) == INTEGER_CST
1321 && TREE_CODE (idx) == INTEGER_CST
1322 && tree_int_cst_lt (bound, idx))
1323 return false;
1327 if (rkind == SRK_INT_VALUE)
1329 /* We are computing the maximum value (not string length). */
1330 val = arg;
1331 if (TREE_CODE (val) != INTEGER_CST
1332 || tree_int_cst_sgn (val) < 0)
1333 return false;
1335 else
1337 c_strlen_data lendata = { };
1338 val = c_strlen (arg, 1, &lendata, eltsize);
1340 if (!val && lendata.decl)
1342 /* ARG refers to an unterminated const character array.
1343 DATA.DECL with size DATA.LEN. */
1344 val = lendata.minlen;
1345 pdata->decl = lendata.decl;
1349 if (!val && rkind == SRK_LENRANGE)
1351 if (TREE_CODE (arg) == ADDR_EXPR)
1352 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1353 pdata, eltsize);
1355 if (TREE_CODE (arg) == ARRAY_REF)
1357 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1359 /* Determine the "innermost" array type. */
1360 while (TREE_CODE (optype) == ARRAY_TYPE
1361 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1362 optype = TREE_TYPE (optype);
1364 /* Avoid arrays of pointers. */
1365 tree eltype = TREE_TYPE (optype);
1366 if (TREE_CODE (optype) != ARRAY_TYPE
1367 || !INTEGRAL_TYPE_P (eltype))
1368 return false;
1370 /* Fail when the array bound is unknown or zero. */
1371 val = TYPE_SIZE_UNIT (optype);
1372 if (!val || integer_zerop (val))
1373 return false;
1375 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1376 integer_one_node);
1378 /* Set the minimum size to zero since the string in
1379 the array could have zero length. */
1380 pdata->minlen = ssize_int (0);
1382 tight_bound = true;
1384 else if (TREE_CODE (arg) == COMPONENT_REF
1385 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1386 == ARRAY_TYPE))
1388 /* Use the type of the member array to determine the upper
1389 bound on the length of the array. This may be overly
1390 optimistic if the array itself isn't NUL-terminated and
1391 the caller relies on the subsequent member to contain
1392 the NUL but that would only be considered valid if
1393 the array were the last member of a struct. */
1395 tree fld = TREE_OPERAND (arg, 1);
1397 tree optype = TREE_TYPE (fld);
1399 /* Determine the "innermost" array type. */
1400 while (TREE_CODE (optype) == ARRAY_TYPE
1401 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1402 optype = TREE_TYPE (optype);
1404 /* Fail when the array bound is unknown or zero. */
1405 val = TYPE_SIZE_UNIT (optype);
1406 if (!val || integer_zerop (val))
1407 return false;
1408 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1409 integer_one_node);
1411 /* Set the minimum size to zero since the string in
1412 the array could have zero length. */
1413 pdata->minlen = ssize_int (0);
1415 /* The array size determined above is an optimistic bound
1416 on the length. If the array isn't nul-terminated the
1417 length computed by the library function would be greater.
1418 Even though using strlen to cross the subobject boundary
1419 is undefined, avoid drawing conclusions from the member
1420 type about the length here. */
1421 tight_bound = true;
1423 else if (VAR_P (arg))
1425 /* Avoid handling pointers to arrays. GCC might misuse
1426 a pointer to an array of one bound to point to an array
1427 object of a greater bound. */
1428 tree argtype = TREE_TYPE (arg);
1429 if (TREE_CODE (argtype) == ARRAY_TYPE)
1431 val = TYPE_SIZE_UNIT (argtype);
1432 if (!val
1433 || TREE_CODE (val) != INTEGER_CST
1434 || integer_zerop (val))
1435 return false;
1436 val = wide_int_to_tree (TREE_TYPE (val),
1437 wi::sub (wi::to_wide (val), 1));
1439 /* Set the minimum size to zero since the string in
1440 the array could have zero length. */
1441 pdata->minlen = ssize_int (0);
1446 if (!val)
1447 return false;
1449 /* Adjust the lower bound on the string length as necessary. */
1450 if (!pdata->minlen
1451 || (rkind != SRK_STRLEN
1452 && TREE_CODE (pdata->minlen) == INTEGER_CST
1453 && TREE_CODE (val) == INTEGER_CST
1454 && tree_int_cst_lt (val, pdata->minlen)))
1455 pdata->minlen = val;
1457 if (pdata->maxbound)
1459 /* Adjust the tighter (more optimistic) string length bound
1460 if necessary and proceed to adjust the more conservative
1461 bound. */
1462 if (TREE_CODE (val) == INTEGER_CST)
1464 if (TREE_CODE (pdata->maxbound) == INTEGER_CST)
1466 if (tree_int_cst_lt (pdata->maxbound, val))
1467 pdata->maxbound = val;
1469 else
1470 pdata->maxbound = build_all_ones_cst (size_type_node);
1472 else
1473 pdata->maxbound = val;
1475 else
1476 pdata->maxbound = val;
1478 if (tight_bound)
1480 /* VAL computed above represents an optimistically tight bound
1481 on the length of the string based on the referenced object's
1482 or subobject's type. Determine the conservative upper bound
1483 based on the enclosing object's size if possible. */
1484 if (rkind == SRK_LENRANGE)
1486 poly_int64 offset;
1487 tree base = get_addr_base_and_unit_offset (arg, &offset);
1488 if (!base)
1490 /* When the call above fails due to a non-constant offset
1491 assume the offset is zero and use the size of the whole
1492 enclosing object instead. */
1493 base = get_base_address (arg);
1494 offset = 0;
1496 /* If the base object is a pointer no upper bound on the length
1497 can be determined. Otherwise the maximum length is equal to
1498 the size of the enclosing object minus the offset of
1499 the referenced subobject minus 1 (for the terminating nul). */
1500 tree type = TREE_TYPE (base);
1501 if (TREE_CODE (type) == POINTER_TYPE
1502 || !VAR_P (base) || !(val = DECL_SIZE_UNIT (base)))
1503 val = build_all_ones_cst (size_type_node);
1504 else
1506 val = DECL_SIZE_UNIT (base);
1507 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1508 size_int (offset + 1));
1511 else
1512 return false;
1515 if (pdata->maxlen)
1517 /* Adjust the more conservative bound if possible/necessary
1518 and fail otherwise. */
1519 if (rkind != SRK_STRLEN)
1521 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1522 || TREE_CODE (val) != INTEGER_CST)
1523 return false;
1525 if (tree_int_cst_lt (pdata->maxlen, val))
1526 pdata->maxlen = val;
1527 return true;
1529 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1531 /* Fail if the length of this ARG is different from that
1532 previously determined from another ARG. */
1533 return false;
1537 pdata->maxlen = val;
1538 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1541 /* For an ARG referencing one or more strings, try to obtain the range
1542 of their lengths, or the size of the largest array ARG referes to if
1543 the range of lengths cannot be determined, and store all in *PDATA.
1544 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1545 the maximum constant value.
1546 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1547 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1548 length or if we are unable to determine the length, return false.
1549 VISITED is a bitmap of visited variables.
1550 RKIND determines the kind of value or range to obtain (see
1551 strlen_range_kind).
1552 Set PDATA->DECL if ARG refers to an unterminated constant array.
1553 On input, set ELTSIZE to 1 for normal single byte character strings,
1554 and either 2 or 4 for wide characer strings (the size of wchar_t).
1555 Return true if *PDATA was successfully populated and false otherwise. */
1557 static bool
1558 get_range_strlen (tree arg, bitmap *visited,
1559 strlen_range_kind rkind,
1560 c_strlen_data *pdata, unsigned eltsize)
1563 if (TREE_CODE (arg) != SSA_NAME)
1564 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1566 /* If ARG is registered for SSA update we cannot look at its defining
1567 statement. */
1568 if (name_registered_for_update_p (arg))
1569 return false;
1571 /* If we were already here, break the infinite cycle. */
1572 if (!*visited)
1573 *visited = BITMAP_ALLOC (NULL);
1574 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1575 return true;
1577 tree var = arg;
1578 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1580 switch (gimple_code (def_stmt))
1582 case GIMPLE_ASSIGN:
1583 /* The RHS of the statement defining VAR must either have a
1584 constant length or come from another SSA_NAME with a constant
1585 length. */
1586 if (gimple_assign_single_p (def_stmt)
1587 || gimple_assign_unary_nop_p (def_stmt))
1589 tree rhs = gimple_assign_rhs1 (def_stmt);
1590 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1592 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1594 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1595 gimple_assign_rhs3 (def_stmt) };
1597 for (unsigned int i = 0; i < 2; i++)
1598 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1600 if (rkind != SRK_LENRANGE)
1601 return false;
1602 /* Set the upper bound to the maximum to prevent
1603 it from being adjusted in the next iteration but
1604 leave MINLEN and the more conservative MAXBOUND
1605 determined so far alone (or leave them null if
1606 they haven't been set yet). That the MINLEN is
1607 in fact zero can be determined from MAXLEN being
1608 unbounded but the discovered minimum is used for
1609 diagnostics. */
1610 pdata->maxlen = build_all_ones_cst (size_type_node);
1612 return true;
1614 return false;
1616 case GIMPLE_PHI:
1617 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1618 must have a constant length. */
1619 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1621 tree arg = gimple_phi_arg (def_stmt, i)->def;
1623 /* If this PHI has itself as an argument, we cannot
1624 determine the string length of this argument. However,
1625 if we can find a constant string length for the other
1626 PHI args then we can still be sure that this is a
1627 constant string length. So be optimistic and just
1628 continue with the next argument. */
1629 if (arg == gimple_phi_result (def_stmt))
1630 continue;
1632 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1634 if (rkind != SRK_LENRANGE)
1635 return false;
1636 /* Set the upper bound to the maximum to prevent
1637 it from being adjusted in the next iteration but
1638 leave MINLEN and the more conservative MAXBOUND
1639 determined so far alone (or leave them null if
1640 they haven't been set yet). That the MINLEN is
1641 in fact zero can be determined from MAXLEN being
1642 unbounded but the discovered minimum is used for
1643 diagnostics. */
1644 pdata->maxlen = build_all_ones_cst (size_type_node);
1647 return true;
1649 default:
1650 return false;
1654 /* Try to obtain the range of the lengths of the string(s) referenced
1655 by ARG, or the size of the largest array ARG refers to if the range
1656 of lengths cannot be determined, and store all in *PDATA. ELTSIZE
1657 is the expected size of the string element in bytes: 1 for char and
1658 some power of 2 for wide characters.
1659 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1660 for optimization. Returning false means that a nonzero PDATA->MINLEN
1661 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1662 is -1 (in that case, the actual range is indeterminate, i.e.,
1663 [0, PTRDIFF_MAX - 2]. */
1665 bool
1666 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1668 bitmap visited = NULL;
1670 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
1672 /* On failure extend the length range to an impossible maximum
1673 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1674 members can stay unchanged regardless. */
1675 pdata->minlen = ssize_int (0);
1676 pdata->maxlen = build_all_ones_cst (size_type_node);
1678 else if (!pdata->minlen)
1679 pdata->minlen = ssize_int (0);
1681 /* Unless its null, leave the more conservative MAXBOUND unchanged. */
1682 if (!pdata->maxbound)
1683 pdata->maxbound = pdata->maxlen;
1685 if (visited)
1686 BITMAP_FREE (visited);
1688 return !integer_all_onesp (pdata->maxlen);
1691 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1692 For ARG of pointer types, NONSTR indicates if the caller is prepared
1693 to handle unterminated strings. For integer ARG and when RKIND ==
1694 SRK_INT_VALUE, NONSTR must be null.
1696 If an unterminated array is discovered and our caller handles
1697 unterminated arrays, then bubble up the offending DECL and
1698 return the maximum size. Otherwise return NULL. */
1700 static tree
1701 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1703 /* A non-null NONSTR is meaningless when determining the maximum
1704 value of an integer ARG. */
1705 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1706 /* ARG must have an integral type when RKIND says so. */
1707 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1709 bitmap visited = NULL;
1711 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1712 is unbounded. */
1713 c_strlen_data lendata = { };
1714 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
1715 lendata.maxlen = NULL_TREE;
1716 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1717 lendata.maxlen = NULL_TREE;
1719 if (visited)
1720 BITMAP_FREE (visited);
1722 if (nonstr)
1724 /* For callers prepared to handle unterminated arrays set
1725 *NONSTR to point to the declaration of the array and return
1726 the maximum length/size. */
1727 *nonstr = lendata.decl;
1728 return lendata.maxlen;
1731 /* Fail if the constant array isn't nul-terminated. */
1732 return lendata.decl ? NULL_TREE : lendata.maxlen;
1736 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1737 If LEN is not NULL, it represents the length of the string to be
1738 copied. Return NULL_TREE if no simplification can be made. */
1740 static bool
1741 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1742 tree dest, tree src)
1744 gimple *stmt = gsi_stmt (*gsi);
1745 location_t loc = gimple_location (stmt);
1746 tree fn;
1748 /* If SRC and DEST are the same (and not volatile), return DEST. */
1749 if (operand_equal_p (src, dest, 0))
1751 /* Issue -Wrestrict unless the pointers are null (those do
1752 not point to objects and so do not indicate an overlap;
1753 such calls could be the result of sanitization and jump
1754 threading). */
1755 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1757 tree func = gimple_call_fndecl (stmt);
1759 warning_at (loc, OPT_Wrestrict,
1760 "%qD source argument is the same as destination",
1761 func);
1764 replace_call_with_value (gsi, dest);
1765 return true;
1768 if (optimize_function_for_size_p (cfun))
1769 return false;
1771 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1772 if (!fn)
1773 return false;
1775 /* Set to non-null if ARG refers to an unterminated array. */
1776 tree nonstr = NULL;
1777 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
1779 if (nonstr)
1781 /* Avoid folding calls with unterminated arrays. */
1782 if (!gimple_no_warning_p (stmt))
1783 warn_string_no_nul (loc, "strcpy", src, nonstr);
1784 gimple_set_no_warning (stmt, true);
1785 return false;
1788 if (!len)
1789 return false;
1791 len = fold_convert_loc (loc, size_type_node, len);
1792 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1793 len = force_gimple_operand_gsi (gsi, len, true,
1794 NULL_TREE, true, GSI_SAME_STMT);
1795 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1796 replace_call_with_call_and_fold (gsi, repl);
1797 return true;
1800 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1801 If SLEN is not NULL, it represents the length of the source string.
1802 Return NULL_TREE if no simplification can be made. */
1804 static bool
1805 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1806 tree dest, tree src, tree len)
1808 gimple *stmt = gsi_stmt (*gsi);
1809 location_t loc = gimple_location (stmt);
1810 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1812 /* If the LEN parameter is zero, return DEST. */
1813 if (integer_zerop (len))
1815 /* Avoid warning if the destination refers to a an array/pointer
1816 decorate with attribute nonstring. */
1817 if (!nonstring)
1819 tree fndecl = gimple_call_fndecl (stmt);
1821 /* Warn about the lack of nul termination: the result is not
1822 a (nul-terminated) string. */
1823 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1824 if (slen && !integer_zerop (slen))
1825 warning_at (loc, OPT_Wstringop_truncation,
1826 "%G%qD destination unchanged after copying no bytes "
1827 "from a string of length %E",
1828 stmt, fndecl, slen);
1829 else
1830 warning_at (loc, OPT_Wstringop_truncation,
1831 "%G%qD destination unchanged after copying no bytes",
1832 stmt, fndecl);
1835 replace_call_with_value (gsi, dest);
1836 return true;
1839 /* We can't compare slen with len as constants below if len is not a
1840 constant. */
1841 if (TREE_CODE (len) != INTEGER_CST)
1842 return false;
1844 /* Now, we must be passed a constant src ptr parameter. */
1845 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1846 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1847 return false;
1849 /* The size of the source string including the terminating nul. */
1850 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1852 /* We do not support simplification of this case, though we do
1853 support it when expanding trees into RTL. */
1854 /* FIXME: generate a call to __builtin_memset. */
1855 if (tree_int_cst_lt (ssize, len))
1856 return false;
1858 /* Diagnose truncation that leaves the copy unterminated. */
1859 maybe_diag_stxncpy_trunc (*gsi, src, len);
1861 /* OK transform into builtin memcpy. */
1862 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1863 if (!fn)
1864 return false;
1866 len = fold_convert_loc (loc, size_type_node, len);
1867 len = force_gimple_operand_gsi (gsi, len, true,
1868 NULL_TREE, true, GSI_SAME_STMT);
1869 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1870 replace_call_with_call_and_fold (gsi, repl);
1872 return true;
1875 /* Fold function call to builtin strchr or strrchr.
1876 If both arguments are constant, evaluate and fold the result,
1877 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1878 In general strlen is significantly faster than strchr
1879 due to being a simpler operation. */
1880 static bool
1881 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1883 gimple *stmt = gsi_stmt (*gsi);
1884 tree str = gimple_call_arg (stmt, 0);
1885 tree c = gimple_call_arg (stmt, 1);
1886 location_t loc = gimple_location (stmt);
1887 const char *p;
1888 char ch;
1890 if (!gimple_call_lhs (stmt))
1891 return false;
1893 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1895 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1897 if (p1 == NULL)
1899 replace_call_with_value (gsi, integer_zero_node);
1900 return true;
1903 tree len = build_int_cst (size_type_node, p1 - p);
1904 gimple_seq stmts = NULL;
1905 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1906 POINTER_PLUS_EXPR, str, len);
1907 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1908 gsi_replace_with_seq_vops (gsi, stmts);
1909 return true;
1912 if (!integer_zerop (c))
1913 return false;
1915 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1916 if (is_strrchr && optimize_function_for_size_p (cfun))
1918 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1920 if (strchr_fn)
1922 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1923 replace_call_with_call_and_fold (gsi, repl);
1924 return true;
1927 return false;
1930 tree len;
1931 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1933 if (!strlen_fn)
1934 return false;
1936 /* Create newstr = strlen (str). */
1937 gimple_seq stmts = NULL;
1938 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1939 gimple_set_location (new_stmt, loc);
1940 len = create_tmp_reg_or_ssa_name (size_type_node);
1941 gimple_call_set_lhs (new_stmt, len);
1942 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1944 /* Create (str p+ strlen (str)). */
1945 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1946 POINTER_PLUS_EXPR, str, len);
1947 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1948 gsi_replace_with_seq_vops (gsi, stmts);
1949 /* gsi now points at the assignment to the lhs, get a
1950 stmt iterator to the strlen.
1951 ??? We can't use gsi_for_stmt as that doesn't work when the
1952 CFG isn't built yet. */
1953 gimple_stmt_iterator gsi2 = *gsi;
1954 gsi_prev (&gsi2);
1955 fold_stmt (&gsi2);
1956 return true;
1959 /* Fold function call to builtin strstr.
1960 If both arguments are constant, evaluate and fold the result,
1961 additionally fold strstr (x, "") into x and strstr (x, "c")
1962 into strchr (x, 'c'). */
1963 static bool
1964 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1966 gimple *stmt = gsi_stmt (*gsi);
1967 tree haystack = gimple_call_arg (stmt, 0);
1968 tree needle = gimple_call_arg (stmt, 1);
1969 const char *p, *q;
1971 if (!gimple_call_lhs (stmt))
1972 return false;
1974 q = c_getstr (needle);
1975 if (q == NULL)
1976 return false;
1978 if ((p = c_getstr (haystack)))
1980 const char *r = strstr (p, q);
1982 if (r == NULL)
1984 replace_call_with_value (gsi, integer_zero_node);
1985 return true;
1988 tree len = build_int_cst (size_type_node, r - p);
1989 gimple_seq stmts = NULL;
1990 gimple *new_stmt
1991 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1992 haystack, len);
1993 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1994 gsi_replace_with_seq_vops (gsi, stmts);
1995 return true;
1998 /* For strstr (x, "") return x. */
1999 if (q[0] == '\0')
2001 replace_call_with_value (gsi, haystack);
2002 return true;
2005 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2006 if (q[1] == '\0')
2008 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2009 if (strchr_fn)
2011 tree c = build_int_cst (integer_type_node, q[0]);
2012 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2013 replace_call_with_call_and_fold (gsi, repl);
2014 return true;
2018 return false;
2021 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2022 to the call.
2024 Return NULL_TREE if no simplification was possible, otherwise return the
2025 simplified form of the call as a tree.
2027 The simplified form may be a constant or other expression which
2028 computes the same value, but in a more efficient manner (including
2029 calls to other builtin functions).
2031 The call may contain arguments which need to be evaluated, but
2032 which are not useful to determine the result of the call. In
2033 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2034 COMPOUND_EXPR will be an argument which must be evaluated.
2035 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2036 COMPOUND_EXPR in the chain will contain the tree for the simplified
2037 form of the builtin function call. */
2039 static bool
2040 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2042 gimple *stmt = gsi_stmt (*gsi);
2043 location_t loc = gimple_location (stmt);
2045 const char *p = c_getstr (src);
2047 /* If the string length is zero, return the dst parameter. */
2048 if (p && *p == '\0')
2050 replace_call_with_value (gsi, dst);
2051 return true;
2054 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2055 return false;
2057 /* See if we can store by pieces into (dst + strlen(dst)). */
2058 tree newdst;
2059 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2060 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2062 if (!strlen_fn || !memcpy_fn)
2063 return false;
2065 /* If the length of the source string isn't computable don't
2066 split strcat into strlen and memcpy. */
2067 tree len = get_maxval_strlen (src, SRK_STRLEN);
2068 if (! len)
2069 return false;
2071 /* Create strlen (dst). */
2072 gimple_seq stmts = NULL, stmts2;
2073 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2074 gimple_set_location (repl, loc);
2075 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2076 gimple_call_set_lhs (repl, newdst);
2077 gimple_seq_add_stmt_without_update (&stmts, repl);
2079 /* Create (dst p+ strlen (dst)). */
2080 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2081 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2082 gimple_seq_add_seq_without_update (&stmts, stmts2);
2084 len = fold_convert_loc (loc, size_type_node, len);
2085 len = size_binop_loc (loc, PLUS_EXPR, len,
2086 build_int_cst (size_type_node, 1));
2087 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2088 gimple_seq_add_seq_without_update (&stmts, stmts2);
2090 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2091 gimple_seq_add_stmt_without_update (&stmts, repl);
2092 if (gimple_call_lhs (stmt))
2094 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2095 gimple_seq_add_stmt_without_update (&stmts, repl);
2096 gsi_replace_with_seq_vops (gsi, stmts);
2097 /* gsi now points at the assignment to the lhs, get a
2098 stmt iterator to the memcpy call.
2099 ??? We can't use gsi_for_stmt as that doesn't work when the
2100 CFG isn't built yet. */
2101 gimple_stmt_iterator gsi2 = *gsi;
2102 gsi_prev (&gsi2);
2103 fold_stmt (&gsi2);
2105 else
2107 gsi_replace_with_seq_vops (gsi, stmts);
2108 fold_stmt (gsi);
2110 return true;
2113 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2114 are the arguments to the call. */
2116 static bool
2117 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2119 gimple *stmt = gsi_stmt (*gsi);
2120 tree dest = gimple_call_arg (stmt, 0);
2121 tree src = gimple_call_arg (stmt, 1);
2122 tree size = gimple_call_arg (stmt, 2);
2123 tree fn;
2124 const char *p;
2127 p = c_getstr (src);
2128 /* If the SRC parameter is "", return DEST. */
2129 if (p && *p == '\0')
2131 replace_call_with_value (gsi, dest);
2132 return true;
2135 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2136 return false;
2138 /* If __builtin_strcat_chk is used, assume strcat is available. */
2139 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2140 if (!fn)
2141 return false;
2143 gimple *repl = gimple_build_call (fn, 2, dest, src);
2144 replace_call_with_call_and_fold (gsi, repl);
2145 return true;
2148 /* Simplify a call to the strncat builtin. */
2150 static bool
2151 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2153 gimple *stmt = gsi_stmt (*gsi);
2154 tree dst = gimple_call_arg (stmt, 0);
2155 tree src = gimple_call_arg (stmt, 1);
2156 tree len = gimple_call_arg (stmt, 2);
2158 const char *p = c_getstr (src);
2160 /* If the requested length is zero, or the src parameter string
2161 length is zero, return the dst parameter. */
2162 if (integer_zerop (len) || (p && *p == '\0'))
2164 replace_call_with_value (gsi, dst);
2165 return true;
2168 if (TREE_CODE (len) != INTEGER_CST || !p)
2169 return false;
2171 unsigned srclen = strlen (p);
2173 int cmpsrc = compare_tree_int (len, srclen);
2175 /* Return early if the requested len is less than the string length.
2176 Warnings will be issued elsewhere later. */
2177 if (cmpsrc < 0)
2178 return false;
2180 unsigned HOST_WIDE_INT dstsize;
2182 bool nowarn = gimple_no_warning_p (stmt);
2184 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2186 int cmpdst = compare_tree_int (len, dstsize);
2188 if (cmpdst >= 0)
2190 tree fndecl = gimple_call_fndecl (stmt);
2192 /* Strncat copies (at most) LEN bytes and always appends
2193 the terminating NUL so the specified bound should never
2194 be equal to (or greater than) the size of the destination.
2195 If it is, the copy could overflow. */
2196 location_t loc = gimple_location (stmt);
2197 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2198 cmpdst == 0
2199 ? G_("%G%qD specified bound %E equals "
2200 "destination size")
2201 : G_("%G%qD specified bound %E exceeds "
2202 "destination size %wu"),
2203 stmt, fndecl, len, dstsize);
2204 if (nowarn)
2205 gimple_set_no_warning (stmt, true);
2209 if (!nowarn && cmpsrc == 0)
2211 tree fndecl = gimple_call_fndecl (stmt);
2212 location_t loc = gimple_location (stmt);
2214 /* To avoid possible overflow the specified bound should also
2215 not be equal to the length of the source, even when the size
2216 of the destination is unknown (it's not an uncommon mistake
2217 to specify as the bound to strncpy the length of the source). */
2218 if (warning_at (loc, OPT_Wstringop_overflow_,
2219 "%G%qD specified bound %E equals source length",
2220 stmt, fndecl, len))
2221 gimple_set_no_warning (stmt, true);
2224 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2226 /* If the replacement _DECL isn't initialized, don't do the
2227 transformation. */
2228 if (!fn)
2229 return false;
2231 /* Otherwise, emit a call to strcat. */
2232 gcall *repl = gimple_build_call (fn, 2, dst, src);
2233 replace_call_with_call_and_fold (gsi, repl);
2234 return true;
2237 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2238 LEN, and SIZE. */
2240 static bool
2241 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2243 gimple *stmt = gsi_stmt (*gsi);
2244 tree dest = gimple_call_arg (stmt, 0);
2245 tree src = gimple_call_arg (stmt, 1);
2246 tree len = gimple_call_arg (stmt, 2);
2247 tree size = gimple_call_arg (stmt, 3);
2248 tree fn;
2249 const char *p;
2251 p = c_getstr (src);
2252 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2253 if ((p && *p == '\0')
2254 || integer_zerop (len))
2256 replace_call_with_value (gsi, dest);
2257 return true;
2260 if (! tree_fits_uhwi_p (size))
2261 return false;
2263 if (! integer_all_onesp (size))
2265 tree src_len = c_strlen (src, 1);
2266 if (src_len
2267 && tree_fits_uhwi_p (src_len)
2268 && tree_fits_uhwi_p (len)
2269 && ! tree_int_cst_lt (len, src_len))
2271 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2272 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2273 if (!fn)
2274 return false;
2276 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2277 replace_call_with_call_and_fold (gsi, repl);
2278 return true;
2280 return false;
2283 /* If __builtin_strncat_chk is used, assume strncat is available. */
2284 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2285 if (!fn)
2286 return false;
2288 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2289 replace_call_with_call_and_fold (gsi, repl);
2290 return true;
2293 /* Build and append gimple statements to STMTS that would load a first
2294 character of a memory location identified by STR. LOC is location
2295 of the statement. */
2297 static tree
2298 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2300 tree var;
2302 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2303 tree cst_uchar_ptr_node
2304 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2305 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2307 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2308 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2309 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2311 gimple_assign_set_lhs (stmt, var);
2312 gimple_seq_add_stmt_without_update (stmts, stmt);
2314 return var;
2317 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2318 FCODE is the name of the builtin. */
2320 static bool
2321 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2323 gimple *stmt = gsi_stmt (*gsi);
2324 tree callee = gimple_call_fndecl (stmt);
2325 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2327 tree type = integer_type_node;
2328 tree str1 = gimple_call_arg (stmt, 0);
2329 tree str2 = gimple_call_arg (stmt, 1);
2330 tree lhs = gimple_call_lhs (stmt);
2331 HOST_WIDE_INT length = -1;
2333 /* Handle strncmp and strncasecmp functions. */
2334 if (gimple_call_num_args (stmt) == 3)
2336 tree len = gimple_call_arg (stmt, 2);
2337 if (tree_fits_uhwi_p (len))
2338 length = tree_to_uhwi (len);
2341 /* If the LEN parameter is zero, return zero. */
2342 if (length == 0)
2344 replace_call_with_value (gsi, integer_zero_node);
2345 return true;
2348 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2349 if (operand_equal_p (str1, str2, 0))
2351 replace_call_with_value (gsi, integer_zero_node);
2352 return true;
2355 const char *p1 = c_getstr (str1);
2356 const char *p2 = c_getstr (str2);
2358 /* For known strings, return an immediate value. */
2359 if (p1 && p2)
2361 int r = 0;
2362 bool known_result = false;
2364 switch (fcode)
2366 case BUILT_IN_STRCMP:
2367 case BUILT_IN_STRCMP_EQ:
2369 r = strcmp (p1, p2);
2370 known_result = true;
2371 break;
2373 case BUILT_IN_STRNCMP:
2374 case BUILT_IN_STRNCMP_EQ:
2376 if (length == -1)
2377 break;
2378 r = strncmp (p1, p2, length);
2379 known_result = true;
2380 break;
2382 /* Only handleable situation is where the string are equal (result 0),
2383 which is already handled by operand_equal_p case. */
2384 case BUILT_IN_STRCASECMP:
2385 break;
2386 case BUILT_IN_STRNCASECMP:
2388 if (length == -1)
2389 break;
2390 r = strncmp (p1, p2, length);
2391 if (r == 0)
2392 known_result = true;
2393 break;
2395 default:
2396 gcc_unreachable ();
2399 if (known_result)
2401 replace_call_with_value (gsi, build_cmp_result (type, r));
2402 return true;
2406 bool nonzero_length = length >= 1
2407 || fcode == BUILT_IN_STRCMP
2408 || fcode == BUILT_IN_STRCMP_EQ
2409 || fcode == BUILT_IN_STRCASECMP;
2411 location_t loc = gimple_location (stmt);
2413 /* If the second arg is "", return *(const unsigned char*)arg1. */
2414 if (p2 && *p2 == '\0' && nonzero_length)
2416 gimple_seq stmts = NULL;
2417 tree var = gimple_load_first_char (loc, str1, &stmts);
2418 if (lhs)
2420 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2421 gimple_seq_add_stmt_without_update (&stmts, stmt);
2424 gsi_replace_with_seq_vops (gsi, stmts);
2425 return true;
2428 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2429 if (p1 && *p1 == '\0' && nonzero_length)
2431 gimple_seq stmts = NULL;
2432 tree var = gimple_load_first_char (loc, str2, &stmts);
2434 if (lhs)
2436 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2437 stmt = gimple_build_assign (c, NOP_EXPR, var);
2438 gimple_seq_add_stmt_without_update (&stmts, stmt);
2440 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2441 gimple_seq_add_stmt_without_update (&stmts, stmt);
2444 gsi_replace_with_seq_vops (gsi, stmts);
2445 return true;
2448 /* If len parameter is one, return an expression corresponding to
2449 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2450 if (fcode == BUILT_IN_STRNCMP && length == 1)
2452 gimple_seq stmts = NULL;
2453 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2454 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2456 if (lhs)
2458 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2459 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2460 gimple_seq_add_stmt_without_update (&stmts, convert1);
2462 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2463 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2464 gimple_seq_add_stmt_without_update (&stmts, convert2);
2466 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2467 gimple_seq_add_stmt_without_update (&stmts, stmt);
2470 gsi_replace_with_seq_vops (gsi, stmts);
2471 return true;
2474 /* If length is larger than the length of one constant string,
2475 replace strncmp with corresponding strcmp */
2476 if (fcode == BUILT_IN_STRNCMP
2477 && length > 0
2478 && ((p2 && (size_t) length > strlen (p2))
2479 || (p1 && (size_t) length > strlen (p1))))
2481 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2482 if (!fn)
2483 return false;
2484 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2485 replace_call_with_call_and_fold (gsi, repl);
2486 return true;
2489 return false;
2492 /* Fold a call to the memchr pointed by GSI iterator. */
2494 static bool
2495 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2497 gimple *stmt = gsi_stmt (*gsi);
2498 tree lhs = gimple_call_lhs (stmt);
2499 tree arg1 = gimple_call_arg (stmt, 0);
2500 tree arg2 = gimple_call_arg (stmt, 1);
2501 tree len = gimple_call_arg (stmt, 2);
2503 /* If the LEN parameter is zero, return zero. */
2504 if (integer_zerop (len))
2506 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2507 return true;
2510 char c;
2511 if (TREE_CODE (arg2) != INTEGER_CST
2512 || !tree_fits_uhwi_p (len)
2513 || !target_char_cst_p (arg2, &c))
2514 return false;
2516 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2517 unsigned HOST_WIDE_INT string_length;
2518 const char *p1 = c_getstr (arg1, &string_length);
2520 if (p1)
2522 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2523 if (r == NULL)
2525 tree mem_size, offset_node;
2526 string_constant (arg1, &offset_node, &mem_size, NULL);
2527 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2528 ? 0 : tree_to_uhwi (offset_node);
2529 /* MEM_SIZE is the size of the array the string literal
2530 is stored in. */
2531 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2532 gcc_checking_assert (string_length <= string_size);
2533 if (length <= string_size)
2535 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2536 return true;
2539 else
2541 unsigned HOST_WIDE_INT offset = r - p1;
2542 gimple_seq stmts = NULL;
2543 if (lhs != NULL_TREE)
2545 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2546 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2547 arg1, offset_cst);
2548 gimple_seq_add_stmt_without_update (&stmts, stmt);
2550 else
2551 gimple_seq_add_stmt_without_update (&stmts,
2552 gimple_build_nop ());
2554 gsi_replace_with_seq_vops (gsi, stmts);
2555 return true;
2559 return false;
2562 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2563 to the call. IGNORE is true if the value returned
2564 by the builtin will be ignored. UNLOCKED is true is true if this
2565 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2566 the known length of the string. Return NULL_TREE if no simplification
2567 was possible. */
2569 static bool
2570 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2571 tree arg0, tree arg1,
2572 bool unlocked)
2574 gimple *stmt = gsi_stmt (*gsi);
2576 /* If we're using an unlocked function, assume the other unlocked
2577 functions exist explicitly. */
2578 tree const fn_fputc = (unlocked
2579 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2580 : builtin_decl_implicit (BUILT_IN_FPUTC));
2581 tree const fn_fwrite = (unlocked
2582 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2583 : builtin_decl_implicit (BUILT_IN_FWRITE));
2585 /* If the return value is used, don't do the transformation. */
2586 if (gimple_call_lhs (stmt))
2587 return false;
2589 /* Get the length of the string passed to fputs. If the length
2590 can't be determined, punt. */
2591 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2592 if (!len
2593 || TREE_CODE (len) != INTEGER_CST)
2594 return false;
2596 switch (compare_tree_int (len, 1))
2598 case -1: /* length is 0, delete the call entirely . */
2599 replace_call_with_value (gsi, integer_zero_node);
2600 return true;
2602 case 0: /* length is 1, call fputc. */
2604 const char *p = c_getstr (arg0);
2605 if (p != NULL)
2607 if (!fn_fputc)
2608 return false;
2610 gimple *repl = gimple_build_call (fn_fputc, 2,
2611 build_int_cst
2612 (integer_type_node, p[0]), arg1);
2613 replace_call_with_call_and_fold (gsi, repl);
2614 return true;
2617 /* FALLTHROUGH */
2618 case 1: /* length is greater than 1, call fwrite. */
2620 /* If optimizing for size keep fputs. */
2621 if (optimize_function_for_size_p (cfun))
2622 return false;
2623 /* New argument list transforming fputs(string, stream) to
2624 fwrite(string, 1, len, stream). */
2625 if (!fn_fwrite)
2626 return false;
2628 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2629 size_one_node, len, arg1);
2630 replace_call_with_call_and_fold (gsi, repl);
2631 return true;
2633 default:
2634 gcc_unreachable ();
2636 return false;
2639 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2640 DEST, SRC, LEN, and SIZE are the arguments to the call.
2641 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2642 code of the builtin. If MAXLEN is not NULL, it is maximum length
2643 passed as third argument. */
2645 static bool
2646 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2647 tree dest, tree src, tree len, tree size,
2648 enum built_in_function fcode)
2650 gimple *stmt = gsi_stmt (*gsi);
2651 location_t loc = gimple_location (stmt);
2652 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2653 tree fn;
2655 /* If SRC and DEST are the same (and not volatile), return DEST
2656 (resp. DEST+LEN for __mempcpy_chk). */
2657 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2659 if (fcode != BUILT_IN_MEMPCPY_CHK)
2661 replace_call_with_value (gsi, dest);
2662 return true;
2664 else
2666 gimple_seq stmts = NULL;
2667 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2668 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2669 TREE_TYPE (dest), dest, len);
2670 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2671 replace_call_with_value (gsi, temp);
2672 return true;
2676 if (! tree_fits_uhwi_p (size))
2677 return false;
2679 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2680 if (! integer_all_onesp (size))
2682 if (! tree_fits_uhwi_p (len))
2684 /* If LEN is not constant, try MAXLEN too.
2685 For MAXLEN only allow optimizing into non-_ocs function
2686 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2687 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2689 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2691 /* (void) __mempcpy_chk () can be optimized into
2692 (void) __memcpy_chk (). */
2693 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2694 if (!fn)
2695 return false;
2697 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2698 replace_call_with_call_and_fold (gsi, repl);
2699 return true;
2701 return false;
2704 else
2705 maxlen = len;
2707 if (tree_int_cst_lt (size, maxlen))
2708 return false;
2711 fn = NULL_TREE;
2712 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2713 mem{cpy,pcpy,move,set} is available. */
2714 switch (fcode)
2716 case BUILT_IN_MEMCPY_CHK:
2717 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2718 break;
2719 case BUILT_IN_MEMPCPY_CHK:
2720 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2721 break;
2722 case BUILT_IN_MEMMOVE_CHK:
2723 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2724 break;
2725 case BUILT_IN_MEMSET_CHK:
2726 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2727 break;
2728 default:
2729 break;
2732 if (!fn)
2733 return false;
2735 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2736 replace_call_with_call_and_fold (gsi, repl);
2737 return true;
2740 /* Fold a call to the __st[rp]cpy_chk builtin.
2741 DEST, SRC, and SIZE are the arguments to the call.
2742 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2743 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2744 strings passed as second argument. */
2746 static bool
2747 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2748 tree dest,
2749 tree src, tree size,
2750 enum built_in_function fcode)
2752 gimple *stmt = gsi_stmt (*gsi);
2753 location_t loc = gimple_location (stmt);
2754 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2755 tree len, fn;
2757 /* If SRC and DEST are the same (and not volatile), return DEST. */
2758 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2760 /* Issue -Wrestrict unless the pointers are null (those do
2761 not point to objects and so do not indicate an overlap;
2762 such calls could be the result of sanitization and jump
2763 threading). */
2764 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2766 tree func = gimple_call_fndecl (stmt);
2768 warning_at (loc, OPT_Wrestrict,
2769 "%qD source argument is the same as destination",
2770 func);
2773 replace_call_with_value (gsi, dest);
2774 return true;
2777 if (! tree_fits_uhwi_p (size))
2778 return false;
2780 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
2781 if (! integer_all_onesp (size))
2783 len = c_strlen (src, 1);
2784 if (! len || ! tree_fits_uhwi_p (len))
2786 /* If LEN is not constant, try MAXLEN too.
2787 For MAXLEN only allow optimizing into non-_ocs function
2788 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2789 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2791 if (fcode == BUILT_IN_STPCPY_CHK)
2793 if (! ignore)
2794 return false;
2796 /* If return value of __stpcpy_chk is ignored,
2797 optimize into __strcpy_chk. */
2798 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2799 if (!fn)
2800 return false;
2802 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2803 replace_call_with_call_and_fold (gsi, repl);
2804 return true;
2807 if (! len || TREE_SIDE_EFFECTS (len))
2808 return false;
2810 /* If c_strlen returned something, but not a constant,
2811 transform __strcpy_chk into __memcpy_chk. */
2812 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2813 if (!fn)
2814 return false;
2816 gimple_seq stmts = NULL;
2817 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
2818 len = gimple_convert (&stmts, loc, size_type_node, len);
2819 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2820 build_int_cst (size_type_node, 1));
2821 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2822 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2823 replace_call_with_call_and_fold (gsi, repl);
2824 return true;
2827 else
2828 maxlen = len;
2830 if (! tree_int_cst_lt (maxlen, size))
2831 return false;
2834 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2835 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2836 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2837 if (!fn)
2838 return false;
2840 gimple *repl = gimple_build_call (fn, 2, dest, src);
2841 replace_call_with_call_and_fold (gsi, repl);
2842 return true;
2845 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2846 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2847 length passed as third argument. IGNORE is true if return value can be
2848 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2850 static bool
2851 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2852 tree dest, tree src,
2853 tree len, tree size,
2854 enum built_in_function fcode)
2856 gimple *stmt = gsi_stmt (*gsi);
2857 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2858 tree fn;
2860 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2862 /* If return value of __stpncpy_chk is ignored,
2863 optimize into __strncpy_chk. */
2864 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2865 if (fn)
2867 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2868 replace_call_with_call_and_fold (gsi, repl);
2869 return true;
2873 if (! tree_fits_uhwi_p (size))
2874 return false;
2876 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2877 if (! integer_all_onesp (size))
2879 if (! tree_fits_uhwi_p (len))
2881 /* If LEN is not constant, try MAXLEN too.
2882 For MAXLEN only allow optimizing into non-_ocs function
2883 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2884 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2885 return false;
2887 else
2888 maxlen = len;
2890 if (tree_int_cst_lt (size, maxlen))
2891 return false;
2894 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2895 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2896 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2897 if (!fn)
2898 return false;
2900 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2901 replace_call_with_call_and_fold (gsi, repl);
2902 return true;
2905 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2906 Return NULL_TREE if no simplification can be made. */
2908 static bool
2909 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2911 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2912 location_t loc = gimple_location (stmt);
2913 tree dest = gimple_call_arg (stmt, 0);
2914 tree src = gimple_call_arg (stmt, 1);
2915 tree fn, lenp1;
2917 /* If the result is unused, replace stpcpy with strcpy. */
2918 if (gimple_call_lhs (stmt) == NULL_TREE)
2920 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2921 if (!fn)
2922 return false;
2923 gimple_call_set_fndecl (stmt, fn);
2924 fold_stmt (gsi);
2925 return true;
2928 /* Set to non-null if ARG refers to an unterminated array. */
2929 c_strlen_data data = { };
2930 tree len = c_strlen (src, 1, &data, 1);
2931 if (!len
2932 || TREE_CODE (len) != INTEGER_CST)
2934 data.decl = unterminated_array (src);
2935 if (!data.decl)
2936 return false;
2939 if (data.decl)
2941 /* Avoid folding calls with unterminated arrays. */
2942 if (!gimple_no_warning_p (stmt))
2943 warn_string_no_nul (loc, "stpcpy", src, data.decl);
2944 gimple_set_no_warning (stmt, true);
2945 return false;
2948 if (optimize_function_for_size_p (cfun)
2949 /* If length is zero it's small enough. */
2950 && !integer_zerop (len))
2951 return false;
2953 /* If the source has a known length replace stpcpy with memcpy. */
2954 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2955 if (!fn)
2956 return false;
2958 gimple_seq stmts = NULL;
2959 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2960 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2961 tem, build_int_cst (size_type_node, 1));
2962 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2963 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2964 gimple_move_vops (repl, stmt);
2965 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2966 /* Replace the result with dest + len. */
2967 stmts = NULL;
2968 tem = gimple_convert (&stmts, loc, sizetype, len);
2969 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2970 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2971 POINTER_PLUS_EXPR, dest, tem);
2972 gsi_replace (gsi, ret, false);
2973 /* Finally fold the memcpy call. */
2974 gimple_stmt_iterator gsi2 = *gsi;
2975 gsi_prev (&gsi2);
2976 fold_stmt (&gsi2);
2977 return true;
2980 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2981 NULL_TREE if a normal call should be emitted rather than expanding
2982 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2983 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2984 passed as second argument. */
2986 static bool
2987 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2988 enum built_in_function fcode)
2990 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2991 tree dest, size, len, fn, fmt, flag;
2992 const char *fmt_str;
2994 /* Verify the required arguments in the original call. */
2995 if (gimple_call_num_args (stmt) < 5)
2996 return false;
2998 dest = gimple_call_arg (stmt, 0);
2999 len = gimple_call_arg (stmt, 1);
3000 flag = gimple_call_arg (stmt, 2);
3001 size = gimple_call_arg (stmt, 3);
3002 fmt = gimple_call_arg (stmt, 4);
3004 if (! tree_fits_uhwi_p (size))
3005 return false;
3007 if (! integer_all_onesp (size))
3009 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3010 if (! tree_fits_uhwi_p (len))
3012 /* If LEN is not constant, try MAXLEN too.
3013 For MAXLEN only allow optimizing into non-_ocs function
3014 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3015 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3016 return false;
3018 else
3019 maxlen = len;
3021 if (tree_int_cst_lt (size, maxlen))
3022 return false;
3025 if (!init_target_chars ())
3026 return false;
3028 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3029 or if format doesn't contain % chars or is "%s". */
3030 if (! integer_zerop (flag))
3032 fmt_str = c_getstr (fmt);
3033 if (fmt_str == NULL)
3034 return false;
3035 if (strchr (fmt_str, target_percent) != NULL
3036 && strcmp (fmt_str, target_percent_s))
3037 return false;
3040 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3041 available. */
3042 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3043 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3044 if (!fn)
3045 return false;
3047 /* Replace the called function and the first 5 argument by 3 retaining
3048 trailing varargs. */
3049 gimple_call_set_fndecl (stmt, fn);
3050 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3051 gimple_call_set_arg (stmt, 0, dest);
3052 gimple_call_set_arg (stmt, 1, len);
3053 gimple_call_set_arg (stmt, 2, fmt);
3054 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3055 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3056 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3057 fold_stmt (gsi);
3058 return true;
3061 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3062 Return NULL_TREE if a normal call should be emitted rather than
3063 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3064 or BUILT_IN_VSPRINTF_CHK. */
3066 static bool
3067 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3068 enum built_in_function fcode)
3070 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3071 tree dest, size, len, fn, fmt, flag;
3072 const char *fmt_str;
3073 unsigned nargs = gimple_call_num_args (stmt);
3075 /* Verify the required arguments in the original call. */
3076 if (nargs < 4)
3077 return false;
3078 dest = gimple_call_arg (stmt, 0);
3079 flag = gimple_call_arg (stmt, 1);
3080 size = gimple_call_arg (stmt, 2);
3081 fmt = gimple_call_arg (stmt, 3);
3083 if (! tree_fits_uhwi_p (size))
3084 return false;
3086 len = NULL_TREE;
3088 if (!init_target_chars ())
3089 return false;
3091 /* Check whether the format is a literal string constant. */
3092 fmt_str = c_getstr (fmt);
3093 if (fmt_str != NULL)
3095 /* If the format doesn't contain % args or %%, we know the size. */
3096 if (strchr (fmt_str, target_percent) == 0)
3098 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3099 len = build_int_cstu (size_type_node, strlen (fmt_str));
3101 /* If the format is "%s" and first ... argument is a string literal,
3102 we know the size too. */
3103 else if (fcode == BUILT_IN_SPRINTF_CHK
3104 && strcmp (fmt_str, target_percent_s) == 0)
3106 tree arg;
3108 if (nargs == 5)
3110 arg = gimple_call_arg (stmt, 4);
3111 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3113 len = c_strlen (arg, 1);
3114 if (! len || ! tree_fits_uhwi_p (len))
3115 len = NULL_TREE;
3121 if (! integer_all_onesp (size))
3123 if (! len || ! tree_int_cst_lt (len, size))
3124 return false;
3127 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3128 or if format doesn't contain % chars or is "%s". */
3129 if (! integer_zerop (flag))
3131 if (fmt_str == NULL)
3132 return false;
3133 if (strchr (fmt_str, target_percent) != NULL
3134 && strcmp (fmt_str, target_percent_s))
3135 return false;
3138 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3139 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3140 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3141 if (!fn)
3142 return false;
3144 /* Replace the called function and the first 4 argument by 2 retaining
3145 trailing varargs. */
3146 gimple_call_set_fndecl (stmt, fn);
3147 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3148 gimple_call_set_arg (stmt, 0, dest);
3149 gimple_call_set_arg (stmt, 1, fmt);
3150 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3151 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3152 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3153 fold_stmt (gsi);
3154 return true;
3157 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3158 ORIG may be null if this is a 2-argument call. We don't attempt to
3159 simplify calls with more than 3 arguments.
3161 Return true if simplification was possible, otherwise false. */
3163 bool
3164 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3166 gimple *stmt = gsi_stmt (*gsi);
3167 tree dest = gimple_call_arg (stmt, 0);
3168 tree fmt = gimple_call_arg (stmt, 1);
3169 tree orig = NULL_TREE;
3170 const char *fmt_str = NULL;
3172 /* Verify the required arguments in the original call. We deal with two
3173 types of sprintf() calls: 'sprintf (str, fmt)' and
3174 'sprintf (dest, "%s", orig)'. */
3175 if (gimple_call_num_args (stmt) > 3)
3176 return false;
3178 if (gimple_call_num_args (stmt) == 3)
3179 orig = gimple_call_arg (stmt, 2);
3181 /* Check whether the format is a literal string constant. */
3182 fmt_str = c_getstr (fmt);
3183 if (fmt_str == NULL)
3184 return false;
3186 if (!init_target_chars ())
3187 return false;
3189 /* If the format doesn't contain % args or %%, use strcpy. */
3190 if (strchr (fmt_str, target_percent) == NULL)
3192 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3194 if (!fn)
3195 return false;
3197 /* Don't optimize sprintf (buf, "abc", ptr++). */
3198 if (orig)
3199 return false;
3201 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3202 'format' is known to contain no % formats. */
3203 gimple_seq stmts = NULL;
3204 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3206 /* Propagate the NO_WARNING bit to avoid issuing the same
3207 warning more than once. */
3208 if (gimple_no_warning_p (stmt))
3209 gimple_set_no_warning (repl, true);
3211 gimple_seq_add_stmt_without_update (&stmts, repl);
3212 if (tree lhs = gimple_call_lhs (stmt))
3214 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3215 strlen (fmt_str)));
3216 gimple_seq_add_stmt_without_update (&stmts, repl);
3217 gsi_replace_with_seq_vops (gsi, stmts);
3218 /* gsi now points at the assignment to the lhs, get a
3219 stmt iterator to the memcpy call.
3220 ??? We can't use gsi_for_stmt as that doesn't work when the
3221 CFG isn't built yet. */
3222 gimple_stmt_iterator gsi2 = *gsi;
3223 gsi_prev (&gsi2);
3224 fold_stmt (&gsi2);
3226 else
3228 gsi_replace_with_seq_vops (gsi, stmts);
3229 fold_stmt (gsi);
3231 return true;
3234 /* If the format is "%s", use strcpy if the result isn't used. */
3235 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3237 tree fn;
3238 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3240 if (!fn)
3241 return false;
3243 /* Don't crash on sprintf (str1, "%s"). */
3244 if (!orig)
3245 return false;
3247 tree orig_len = NULL_TREE;
3248 if (gimple_call_lhs (stmt))
3250 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3251 if (!orig_len)
3252 return false;
3255 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3256 gimple_seq stmts = NULL;
3257 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3259 /* Propagate the NO_WARNING bit to avoid issuing the same
3260 warning more than once. */
3261 if (gimple_no_warning_p (stmt))
3262 gimple_set_no_warning (repl, true);
3264 gimple_seq_add_stmt_without_update (&stmts, repl);
3265 if (tree lhs = gimple_call_lhs (stmt))
3267 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3268 TREE_TYPE (orig_len)))
3269 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3270 repl = gimple_build_assign (lhs, orig_len);
3271 gimple_seq_add_stmt_without_update (&stmts, repl);
3272 gsi_replace_with_seq_vops (gsi, stmts);
3273 /* gsi now points at the assignment to the lhs, get a
3274 stmt iterator to the memcpy call.
3275 ??? We can't use gsi_for_stmt as that doesn't work when the
3276 CFG isn't built yet. */
3277 gimple_stmt_iterator gsi2 = *gsi;
3278 gsi_prev (&gsi2);
3279 fold_stmt (&gsi2);
3281 else
3283 gsi_replace_with_seq_vops (gsi, stmts);
3284 fold_stmt (gsi);
3286 return true;
3288 return false;
3291 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3292 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3293 attempt to simplify calls with more than 4 arguments.
3295 Return true if simplification was possible, otherwise false. */
3297 bool
3298 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3300 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3301 tree dest = gimple_call_arg (stmt, 0);
3302 tree destsize = gimple_call_arg (stmt, 1);
3303 tree fmt = gimple_call_arg (stmt, 2);
3304 tree orig = NULL_TREE;
3305 const char *fmt_str = NULL;
3307 if (gimple_call_num_args (stmt) > 4)
3308 return false;
3310 if (gimple_call_num_args (stmt) == 4)
3311 orig = gimple_call_arg (stmt, 3);
3313 if (!tree_fits_uhwi_p (destsize))
3314 return false;
3315 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3317 /* Check whether the format is a literal string constant. */
3318 fmt_str = c_getstr (fmt);
3319 if (fmt_str == NULL)
3320 return false;
3322 if (!init_target_chars ())
3323 return false;
3325 /* If the format doesn't contain % args or %%, use strcpy. */
3326 if (strchr (fmt_str, target_percent) == NULL)
3328 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3329 if (!fn)
3330 return false;
3332 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3333 if (orig)
3334 return false;
3336 /* We could expand this as
3337 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3338 or to
3339 memcpy (str, fmt_with_nul_at_cstm1, cst);
3340 but in the former case that might increase code size
3341 and in the latter case grow .rodata section too much.
3342 So punt for now. */
3343 size_t len = strlen (fmt_str);
3344 if (len >= destlen)
3345 return false;
3347 gimple_seq stmts = NULL;
3348 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3349 gimple_seq_add_stmt_without_update (&stmts, repl);
3350 if (tree lhs = gimple_call_lhs (stmt))
3352 repl = gimple_build_assign (lhs,
3353 build_int_cst (TREE_TYPE (lhs), len));
3354 gimple_seq_add_stmt_without_update (&stmts, repl);
3355 gsi_replace_with_seq_vops (gsi, stmts);
3356 /* gsi now points at the assignment to the lhs, get a
3357 stmt iterator to the memcpy call.
3358 ??? We can't use gsi_for_stmt as that doesn't work when the
3359 CFG isn't built yet. */
3360 gimple_stmt_iterator gsi2 = *gsi;
3361 gsi_prev (&gsi2);
3362 fold_stmt (&gsi2);
3364 else
3366 gsi_replace_with_seq_vops (gsi, stmts);
3367 fold_stmt (gsi);
3369 return true;
3372 /* If the format is "%s", use strcpy if the result isn't used. */
3373 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3375 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3376 if (!fn)
3377 return false;
3379 /* Don't crash on snprintf (str1, cst, "%s"). */
3380 if (!orig)
3381 return false;
3383 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3384 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3385 return false;
3387 /* We could expand this as
3388 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3389 or to
3390 memcpy (str1, str2_with_nul_at_cstm1, cst);
3391 but in the former case that might increase code size
3392 and in the latter case grow .rodata section too much.
3393 So punt for now. */
3394 if (compare_tree_int (orig_len, destlen) >= 0)
3395 return false;
3397 /* Convert snprintf (str1, cst, "%s", str2) into
3398 strcpy (str1, str2) if strlen (str2) < cst. */
3399 gimple_seq stmts = NULL;
3400 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3401 gimple_seq_add_stmt_without_update (&stmts, repl);
3402 if (tree lhs = gimple_call_lhs (stmt))
3404 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3405 TREE_TYPE (orig_len)))
3406 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3407 repl = gimple_build_assign (lhs, orig_len);
3408 gimple_seq_add_stmt_without_update (&stmts, repl);
3409 gsi_replace_with_seq_vops (gsi, stmts);
3410 /* gsi now points at the assignment to the lhs, get a
3411 stmt iterator to the memcpy call.
3412 ??? We can't use gsi_for_stmt as that doesn't work when the
3413 CFG isn't built yet. */
3414 gimple_stmt_iterator gsi2 = *gsi;
3415 gsi_prev (&gsi2);
3416 fold_stmt (&gsi2);
3418 else
3420 gsi_replace_with_seq_vops (gsi, stmts);
3421 fold_stmt (gsi);
3423 return true;
3425 return false;
3428 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3429 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3430 more than 3 arguments, and ARG may be null in the 2-argument case.
3432 Return NULL_TREE if no simplification was possible, otherwise return the
3433 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3434 code of the function to be simplified. */
3436 static bool
3437 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3438 tree fp, tree fmt, tree arg,
3439 enum built_in_function fcode)
3441 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3442 tree fn_fputc, fn_fputs;
3443 const char *fmt_str = NULL;
3445 /* If the return value is used, don't do the transformation. */
3446 if (gimple_call_lhs (stmt) != NULL_TREE)
3447 return false;
3449 /* Check whether the format is a literal string constant. */
3450 fmt_str = c_getstr (fmt);
3451 if (fmt_str == NULL)
3452 return false;
3454 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3456 /* If we're using an unlocked function, assume the other
3457 unlocked functions exist explicitly. */
3458 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3459 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3461 else
3463 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3464 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3467 if (!init_target_chars ())
3468 return false;
3470 /* If the format doesn't contain % args or %%, use strcpy. */
3471 if (strchr (fmt_str, target_percent) == NULL)
3473 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3474 && arg)
3475 return false;
3477 /* If the format specifier was "", fprintf does nothing. */
3478 if (fmt_str[0] == '\0')
3480 replace_call_with_value (gsi, NULL_TREE);
3481 return true;
3484 /* When "string" doesn't contain %, replace all cases of
3485 fprintf (fp, string) with fputs (string, fp). The fputs
3486 builtin will take care of special cases like length == 1. */
3487 if (fn_fputs)
3489 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3490 replace_call_with_call_and_fold (gsi, repl);
3491 return true;
3495 /* The other optimizations can be done only on the non-va_list variants. */
3496 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3497 return false;
3499 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3500 else if (strcmp (fmt_str, target_percent_s) == 0)
3502 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3503 return false;
3504 if (fn_fputs)
3506 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3507 replace_call_with_call_and_fold (gsi, repl);
3508 return true;
3512 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3513 else if (strcmp (fmt_str, target_percent_c) == 0)
3515 if (!arg
3516 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3517 return false;
3518 if (fn_fputc)
3520 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3521 replace_call_with_call_and_fold (gsi, repl);
3522 return true;
3526 return false;
3529 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3530 FMT and ARG are the arguments to the call; we don't fold cases with
3531 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3533 Return NULL_TREE if no simplification was possible, otherwise return the
3534 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3535 code of the function to be simplified. */
3537 static bool
3538 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3539 tree arg, enum built_in_function fcode)
3541 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3542 tree fn_putchar, fn_puts, newarg;
3543 const char *fmt_str = NULL;
3545 /* If the return value is used, don't do the transformation. */
3546 if (gimple_call_lhs (stmt) != NULL_TREE)
3547 return false;
3549 /* Check whether the format is a literal string constant. */
3550 fmt_str = c_getstr (fmt);
3551 if (fmt_str == NULL)
3552 return false;
3554 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3556 /* If we're using an unlocked function, assume the other
3557 unlocked functions exist explicitly. */
3558 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3559 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3561 else
3563 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3564 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3567 if (!init_target_chars ())
3568 return false;
3570 if (strcmp (fmt_str, target_percent_s) == 0
3571 || strchr (fmt_str, target_percent) == NULL)
3573 const char *str;
3575 if (strcmp (fmt_str, target_percent_s) == 0)
3577 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3578 return false;
3580 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3581 return false;
3583 str = c_getstr (arg);
3584 if (str == NULL)
3585 return false;
3587 else
3589 /* The format specifier doesn't contain any '%' characters. */
3590 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3591 && arg)
3592 return false;
3593 str = fmt_str;
3596 /* If the string was "", printf does nothing. */
3597 if (str[0] == '\0')
3599 replace_call_with_value (gsi, NULL_TREE);
3600 return true;
3603 /* If the string has length of 1, call putchar. */
3604 if (str[1] == '\0')
3606 /* Given printf("c"), (where c is any one character,)
3607 convert "c"[0] to an int and pass that to the replacement
3608 function. */
3609 newarg = build_int_cst (integer_type_node, str[0]);
3610 if (fn_putchar)
3612 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3613 replace_call_with_call_and_fold (gsi, repl);
3614 return true;
3617 else
3619 /* If the string was "string\n", call puts("string"). */
3620 size_t len = strlen (str);
3621 if ((unsigned char)str[len - 1] == target_newline
3622 && (size_t) (int) len == len
3623 && (int) len > 0)
3625 char *newstr;
3627 /* Create a NUL-terminated string that's one char shorter
3628 than the original, stripping off the trailing '\n'. */
3629 newstr = xstrdup (str);
3630 newstr[len - 1] = '\0';
3631 newarg = build_string_literal (len, newstr);
3632 free (newstr);
3633 if (fn_puts)
3635 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3636 replace_call_with_call_and_fold (gsi, repl);
3637 return true;
3640 else
3641 /* We'd like to arrange to call fputs(string,stdout) here,
3642 but we need stdout and don't have a way to get it yet. */
3643 return false;
3647 /* The other optimizations can be done only on the non-va_list variants. */
3648 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3649 return false;
3651 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3652 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3654 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3655 return false;
3656 if (fn_puts)
3658 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3659 replace_call_with_call_and_fold (gsi, repl);
3660 return true;
3664 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3665 else if (strcmp (fmt_str, target_percent_c) == 0)
3667 if (!arg || ! useless_type_conversion_p (integer_type_node,
3668 TREE_TYPE (arg)))
3669 return false;
3670 if (fn_putchar)
3672 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3673 replace_call_with_call_and_fold (gsi, repl);
3674 return true;
3678 return false;
3683 /* Fold a call to __builtin_strlen with known length LEN. */
3685 static bool
3686 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3688 gimple *stmt = gsi_stmt (*gsi);
3689 tree arg = gimple_call_arg (stmt, 0);
3691 wide_int minlen;
3692 wide_int maxlen;
3694 c_strlen_data lendata = { };
3695 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
3696 && !lendata.decl
3697 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3698 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
3700 /* The range of lengths refers to either a single constant
3701 string or to the longest and shortest constant string
3702 referenced by the argument of the strlen() call, or to
3703 the strings that can possibly be stored in the arrays
3704 the argument refers to. */
3705 minlen = wi::to_wide (lendata.minlen);
3706 maxlen = wi::to_wide (lendata.maxlen);
3708 else
3710 unsigned prec = TYPE_PRECISION (sizetype);
3712 minlen = wi::shwi (0, prec);
3713 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3716 if (minlen == maxlen)
3718 /* Fold the strlen call to a constant. */
3719 tree type = TREE_TYPE (lendata.minlen);
3720 tree len = force_gimple_operand_gsi (gsi,
3721 wide_int_to_tree (type, minlen),
3722 true, NULL, true, GSI_SAME_STMT);
3723 replace_call_with_value (gsi, len);
3724 return true;
3727 /* Set the strlen() range to [0, MAXLEN]. */
3728 if (tree lhs = gimple_call_lhs (stmt))
3729 set_strlen_range (lhs, maxlen);
3731 return false;
3734 /* Fold a call to __builtin_acc_on_device. */
3736 static bool
3737 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3739 /* Defer folding until we know which compiler we're in. */
3740 if (symtab->state != EXPANSION)
3741 return false;
3743 unsigned val_host = GOMP_DEVICE_HOST;
3744 unsigned val_dev = GOMP_DEVICE_NONE;
3746 #ifdef ACCEL_COMPILER
3747 val_host = GOMP_DEVICE_NOT_HOST;
3748 val_dev = ACCEL_COMPILER_acc_device;
3749 #endif
3751 location_t loc = gimple_location (gsi_stmt (*gsi));
3753 tree host_eq = make_ssa_name (boolean_type_node);
3754 gimple *host_ass = gimple_build_assign
3755 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3756 gimple_set_location (host_ass, loc);
3757 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3759 tree dev_eq = make_ssa_name (boolean_type_node);
3760 gimple *dev_ass = gimple_build_assign
3761 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3762 gimple_set_location (dev_ass, loc);
3763 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3765 tree result = make_ssa_name (boolean_type_node);
3766 gimple *result_ass = gimple_build_assign
3767 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3768 gimple_set_location (result_ass, loc);
3769 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3771 replace_call_with_value (gsi, result);
3773 return true;
3776 /* Fold realloc (0, n) -> malloc (n). */
3778 static bool
3779 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3781 gimple *stmt = gsi_stmt (*gsi);
3782 tree arg = gimple_call_arg (stmt, 0);
3783 tree size = gimple_call_arg (stmt, 1);
3785 if (operand_equal_p (arg, null_pointer_node, 0))
3787 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3788 if (fn_malloc)
3790 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3791 replace_call_with_call_and_fold (gsi, repl);
3792 return true;
3795 return false;
3798 /* Fold the non-target builtin at *GSI and return whether any simplification
3799 was made. */
3801 static bool
3802 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3804 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3805 tree callee = gimple_call_fndecl (stmt);
3807 /* Give up for always_inline inline builtins until they are
3808 inlined. */
3809 if (avoid_folding_inline_builtin (callee))
3810 return false;
3812 unsigned n = gimple_call_num_args (stmt);
3813 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3814 switch (fcode)
3816 case BUILT_IN_BCMP:
3817 return gimple_fold_builtin_bcmp (gsi);
3818 case BUILT_IN_BCOPY:
3819 return gimple_fold_builtin_bcopy (gsi);
3820 case BUILT_IN_BZERO:
3821 return gimple_fold_builtin_bzero (gsi);
3823 case BUILT_IN_MEMSET:
3824 return gimple_fold_builtin_memset (gsi,
3825 gimple_call_arg (stmt, 1),
3826 gimple_call_arg (stmt, 2));
3827 case BUILT_IN_MEMCPY:
3828 case BUILT_IN_MEMPCPY:
3829 case BUILT_IN_MEMMOVE:
3830 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3831 gimple_call_arg (stmt, 1), fcode);
3832 case BUILT_IN_SPRINTF_CHK:
3833 case BUILT_IN_VSPRINTF_CHK:
3834 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3835 case BUILT_IN_STRCAT_CHK:
3836 return gimple_fold_builtin_strcat_chk (gsi);
3837 case BUILT_IN_STRNCAT_CHK:
3838 return gimple_fold_builtin_strncat_chk (gsi);
3839 case BUILT_IN_STRLEN:
3840 return gimple_fold_builtin_strlen (gsi);
3841 case BUILT_IN_STRCPY:
3842 return gimple_fold_builtin_strcpy (gsi,
3843 gimple_call_arg (stmt, 0),
3844 gimple_call_arg (stmt, 1));
3845 case BUILT_IN_STRNCPY:
3846 return gimple_fold_builtin_strncpy (gsi,
3847 gimple_call_arg (stmt, 0),
3848 gimple_call_arg (stmt, 1),
3849 gimple_call_arg (stmt, 2));
3850 case BUILT_IN_STRCAT:
3851 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3852 gimple_call_arg (stmt, 1));
3853 case BUILT_IN_STRNCAT:
3854 return gimple_fold_builtin_strncat (gsi);
3855 case BUILT_IN_INDEX:
3856 case BUILT_IN_STRCHR:
3857 return gimple_fold_builtin_strchr (gsi, false);
3858 case BUILT_IN_RINDEX:
3859 case BUILT_IN_STRRCHR:
3860 return gimple_fold_builtin_strchr (gsi, true);
3861 case BUILT_IN_STRSTR:
3862 return gimple_fold_builtin_strstr (gsi);
3863 case BUILT_IN_STRCMP:
3864 case BUILT_IN_STRCMP_EQ:
3865 case BUILT_IN_STRCASECMP:
3866 case BUILT_IN_STRNCMP:
3867 case BUILT_IN_STRNCMP_EQ:
3868 case BUILT_IN_STRNCASECMP:
3869 return gimple_fold_builtin_string_compare (gsi);
3870 case BUILT_IN_MEMCHR:
3871 return gimple_fold_builtin_memchr (gsi);
3872 case BUILT_IN_FPUTS:
3873 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3874 gimple_call_arg (stmt, 1), false);
3875 case BUILT_IN_FPUTS_UNLOCKED:
3876 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3877 gimple_call_arg (stmt, 1), true);
3878 case BUILT_IN_MEMCPY_CHK:
3879 case BUILT_IN_MEMPCPY_CHK:
3880 case BUILT_IN_MEMMOVE_CHK:
3881 case BUILT_IN_MEMSET_CHK:
3882 return gimple_fold_builtin_memory_chk (gsi,
3883 gimple_call_arg (stmt, 0),
3884 gimple_call_arg (stmt, 1),
3885 gimple_call_arg (stmt, 2),
3886 gimple_call_arg (stmt, 3),
3887 fcode);
3888 case BUILT_IN_STPCPY:
3889 return gimple_fold_builtin_stpcpy (gsi);
3890 case BUILT_IN_STRCPY_CHK:
3891 case BUILT_IN_STPCPY_CHK:
3892 return gimple_fold_builtin_stxcpy_chk (gsi,
3893 gimple_call_arg (stmt, 0),
3894 gimple_call_arg (stmt, 1),
3895 gimple_call_arg (stmt, 2),
3896 fcode);
3897 case BUILT_IN_STRNCPY_CHK:
3898 case BUILT_IN_STPNCPY_CHK:
3899 return gimple_fold_builtin_stxncpy_chk (gsi,
3900 gimple_call_arg (stmt, 0),
3901 gimple_call_arg (stmt, 1),
3902 gimple_call_arg (stmt, 2),
3903 gimple_call_arg (stmt, 3),
3904 fcode);
3905 case BUILT_IN_SNPRINTF_CHK:
3906 case BUILT_IN_VSNPRINTF_CHK:
3907 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3909 case BUILT_IN_FPRINTF:
3910 case BUILT_IN_FPRINTF_UNLOCKED:
3911 case BUILT_IN_VFPRINTF:
3912 if (n == 2 || n == 3)
3913 return gimple_fold_builtin_fprintf (gsi,
3914 gimple_call_arg (stmt, 0),
3915 gimple_call_arg (stmt, 1),
3916 n == 3
3917 ? gimple_call_arg (stmt, 2)
3918 : NULL_TREE,
3919 fcode);
3920 break;
3921 case BUILT_IN_FPRINTF_CHK:
3922 case BUILT_IN_VFPRINTF_CHK:
3923 if (n == 3 || n == 4)
3924 return gimple_fold_builtin_fprintf (gsi,
3925 gimple_call_arg (stmt, 0),
3926 gimple_call_arg (stmt, 2),
3927 n == 4
3928 ? gimple_call_arg (stmt, 3)
3929 : NULL_TREE,
3930 fcode);
3931 break;
3932 case BUILT_IN_PRINTF:
3933 case BUILT_IN_PRINTF_UNLOCKED:
3934 case BUILT_IN_VPRINTF:
3935 if (n == 1 || n == 2)
3936 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3937 n == 2
3938 ? gimple_call_arg (stmt, 1)
3939 : NULL_TREE, fcode);
3940 break;
3941 case BUILT_IN_PRINTF_CHK:
3942 case BUILT_IN_VPRINTF_CHK:
3943 if (n == 2 || n == 3)
3944 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3945 n == 3
3946 ? gimple_call_arg (stmt, 2)
3947 : NULL_TREE, fcode);
3948 break;
3949 case BUILT_IN_ACC_ON_DEVICE:
3950 return gimple_fold_builtin_acc_on_device (gsi,
3951 gimple_call_arg (stmt, 0));
3952 case BUILT_IN_REALLOC:
3953 return gimple_fold_builtin_realloc (gsi);
3955 default:;
3958 /* Try the generic builtin folder. */
3959 bool ignore = (gimple_call_lhs (stmt) == NULL);
3960 tree result = fold_call_stmt (stmt, ignore);
3961 if (result)
3963 if (ignore)
3964 STRIP_NOPS (result);
3965 else
3966 result = fold_convert (gimple_call_return_type (stmt), result);
3967 if (!update_call_from_tree (gsi, result))
3968 gimplify_and_update_call_from_tree (gsi, result);
3969 return true;
3972 return false;
3975 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3976 function calls to constants, where possible. */
3978 static tree
3979 fold_internal_goacc_dim (const gimple *call)
3981 int axis = oacc_get_ifn_dim_arg (call);
3982 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3983 tree result = NULL_TREE;
3984 tree type = TREE_TYPE (gimple_call_lhs (call));
3986 switch (gimple_call_internal_fn (call))
3988 case IFN_GOACC_DIM_POS:
3989 /* If the size is 1, we know the answer. */
3990 if (size == 1)
3991 result = build_int_cst (type, 0);
3992 break;
3993 case IFN_GOACC_DIM_SIZE:
3994 /* If the size is not dynamic, we know the answer. */
3995 if (size)
3996 result = build_int_cst (type, size);
3997 break;
3998 default:
3999 break;
4002 return result;
4005 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4006 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4007 &var where var is only addressable because of such calls. */
4009 bool
4010 optimize_atomic_compare_exchange_p (gimple *stmt)
4012 if (gimple_call_num_args (stmt) != 6
4013 || !flag_inline_atomics
4014 || !optimize
4015 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
4016 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4017 || !gimple_vdef (stmt)
4018 || !gimple_vuse (stmt))
4019 return false;
4021 tree fndecl = gimple_call_fndecl (stmt);
4022 switch (DECL_FUNCTION_CODE (fndecl))
4024 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
4025 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
4026 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
4027 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
4028 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
4029 break;
4030 default:
4031 return false;
4034 tree expected = gimple_call_arg (stmt, 1);
4035 if (TREE_CODE (expected) != ADDR_EXPR
4036 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
4037 return false;
4039 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
4040 if (!is_gimple_reg_type (etype)
4041 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
4042 || TREE_THIS_VOLATILE (etype)
4043 || VECTOR_TYPE_P (etype)
4044 || TREE_CODE (etype) == COMPLEX_TYPE
4045 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4046 might not preserve all the bits. See PR71716. */
4047 || SCALAR_FLOAT_TYPE_P (etype)
4048 || maybe_ne (TYPE_PRECISION (etype),
4049 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4050 return false;
4052 tree weak = gimple_call_arg (stmt, 3);
4053 if (!integer_zerop (weak) && !integer_onep (weak))
4054 return false;
4056 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4057 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4058 machine_mode mode = TYPE_MODE (itype);
4060 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
4061 == CODE_FOR_nothing
4062 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
4063 return false;
4065 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
4066 return false;
4068 return true;
4071 /* Fold
4072 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4073 into
4074 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4075 i = IMAGPART_EXPR <t>;
4076 r = (_Bool) i;
4077 e = REALPART_EXPR <t>; */
4079 void
4080 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
4082 gimple *stmt = gsi_stmt (*gsi);
4083 tree fndecl = gimple_call_fndecl (stmt);
4084 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4085 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4086 tree ctype = build_complex_type (itype);
4087 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
4088 bool throws = false;
4089 edge e = NULL;
4090 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4091 expected);
4092 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4093 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4094 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4096 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4097 build1 (VIEW_CONVERT_EXPR, itype,
4098 gimple_assign_lhs (g)));
4099 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4101 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4102 + int_size_in_bytes (itype);
4103 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4104 gimple_call_arg (stmt, 0),
4105 gimple_assign_lhs (g),
4106 gimple_call_arg (stmt, 2),
4107 build_int_cst (integer_type_node, flag),
4108 gimple_call_arg (stmt, 4),
4109 gimple_call_arg (stmt, 5));
4110 tree lhs = make_ssa_name (ctype);
4111 gimple_call_set_lhs (g, lhs);
4112 gimple_move_vops (g, stmt);
4113 tree oldlhs = gimple_call_lhs (stmt);
4114 if (stmt_can_throw_internal (cfun, stmt))
4116 throws = true;
4117 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4119 gimple_call_set_nothrow (as_a <gcall *> (g),
4120 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4121 gimple_call_set_lhs (stmt, NULL_TREE);
4122 gsi_replace (gsi, g, true);
4123 if (oldlhs)
4125 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4126 build1 (IMAGPART_EXPR, itype, lhs));
4127 if (throws)
4129 gsi_insert_on_edge_immediate (e, g);
4130 *gsi = gsi_for_stmt (g);
4132 else
4133 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4134 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4135 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4137 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4138 build1 (REALPART_EXPR, itype, lhs));
4139 if (throws && oldlhs == NULL_TREE)
4141 gsi_insert_on_edge_immediate (e, g);
4142 *gsi = gsi_for_stmt (g);
4144 else
4145 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4146 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4148 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4149 VIEW_CONVERT_EXPR,
4150 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4151 gimple_assign_lhs (g)));
4152 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4154 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4155 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4156 *gsi = gsiret;
4159 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4160 doesn't fit into TYPE. The test for overflow should be regardless of
4161 -fwrapv, and even for unsigned types. */
4163 bool
4164 arith_overflowed_p (enum tree_code code, const_tree type,
4165 const_tree arg0, const_tree arg1)
4167 widest2_int warg0 = widest2_int_cst (arg0);
4168 widest2_int warg1 = widest2_int_cst (arg1);
4169 widest2_int wres;
4170 switch (code)
4172 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4173 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4174 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4175 default: gcc_unreachable ();
4177 signop sign = TYPE_SIGN (type);
4178 if (sign == UNSIGNED && wi::neg_p (wres))
4179 return true;
4180 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4183 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
4184 for the memory it references, otherwise return null. VECTYPE is the
4185 type of the memory vector. */
4187 static tree
4188 gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
4190 tree ptr = gimple_call_arg (call, 0);
4191 tree alias_align = gimple_call_arg (call, 1);
4192 tree mask = gimple_call_arg (call, 2);
4193 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
4194 return NULL_TREE;
4196 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align) * BITS_PER_UNIT;
4197 if (TYPE_ALIGN (vectype) != align)
4198 vectype = build_aligned_type (vectype, align);
4199 tree offset = build_zero_cst (TREE_TYPE (alias_align));
4200 return fold_build2 (MEM_REF, vectype, ptr, offset);
4203 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
4205 static bool
4206 gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
4208 tree lhs = gimple_call_lhs (call);
4209 if (!lhs)
4210 return false;
4212 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
4214 gassign *new_stmt = gimple_build_assign (lhs, rhs);
4215 gimple_set_location (new_stmt, gimple_location (call));
4216 gimple_move_vops (new_stmt, call);
4217 gsi_replace (gsi, new_stmt, false);
4218 return true;
4220 return false;
4223 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
4225 static bool
4226 gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
4228 tree rhs = gimple_call_arg (call, 3);
4229 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
4231 gassign *new_stmt = gimple_build_assign (lhs, rhs);
4232 gimple_set_location (new_stmt, gimple_location (call));
4233 gimple_move_vops (new_stmt, call);
4234 gsi_replace (gsi, new_stmt, false);
4235 return true;
4237 return false;
4240 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4241 The statement may be replaced by another statement, e.g., if the call
4242 simplifies to a constant value. Return true if any changes were made.
4243 It is assumed that the operands have been previously folded. */
4245 static bool
4246 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4248 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4249 tree callee;
4250 bool changed = false;
4251 unsigned i;
4253 /* Fold *& in call arguments. */
4254 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4255 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4257 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4258 if (tmp)
4260 gimple_call_set_arg (stmt, i, tmp);
4261 changed = true;
4265 /* Check for virtual calls that became direct calls. */
4266 callee = gimple_call_fn (stmt);
4267 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4269 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4271 if (dump_file && virtual_method_call_p (callee)
4272 && !possible_polymorphic_call_target_p
4273 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4274 (OBJ_TYPE_REF_EXPR (callee)))))
4276 fprintf (dump_file,
4277 "Type inheritance inconsistent devirtualization of ");
4278 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4279 fprintf (dump_file, " to ");
4280 print_generic_expr (dump_file, callee, TDF_SLIM);
4281 fprintf (dump_file, "\n");
4284 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4285 changed = true;
4287 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4289 bool final;
4290 vec <cgraph_node *>targets
4291 = possible_polymorphic_call_targets (callee, stmt, &final);
4292 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4294 tree lhs = gimple_call_lhs (stmt);
4295 if (dump_enabled_p ())
4297 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4298 "folding virtual function call to %s\n",
4299 targets.length () == 1
4300 ? targets[0]->name ()
4301 : "__builtin_unreachable");
4303 if (targets.length () == 1)
4305 tree fndecl = targets[0]->decl;
4306 gimple_call_set_fndecl (stmt, fndecl);
4307 changed = true;
4308 /* If changing the call to __cxa_pure_virtual
4309 or similar noreturn function, adjust gimple_call_fntype
4310 too. */
4311 if (gimple_call_noreturn_p (stmt)
4312 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4313 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4314 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4315 == void_type_node))
4316 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4317 /* If the call becomes noreturn, remove the lhs. */
4318 if (lhs
4319 && gimple_call_noreturn_p (stmt)
4320 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4321 || should_remove_lhs_p (lhs)))
4323 if (TREE_CODE (lhs) == SSA_NAME)
4325 tree var = create_tmp_var (TREE_TYPE (lhs));
4326 tree def = get_or_create_ssa_default_def (cfun, var);
4327 gimple *new_stmt = gimple_build_assign (lhs, def);
4328 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4330 gimple_call_set_lhs (stmt, NULL_TREE);
4332 maybe_remove_unused_call_args (cfun, stmt);
4334 else
4336 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4337 gimple *new_stmt = gimple_build_call (fndecl, 0);
4338 gimple_set_location (new_stmt, gimple_location (stmt));
4339 /* If the call had a SSA name as lhs morph that into
4340 an uninitialized value. */
4341 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4343 tree var = create_tmp_var (TREE_TYPE (lhs));
4344 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4345 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4346 set_ssa_default_def (cfun, var, lhs);
4348 gimple_move_vops (new_stmt, stmt);
4349 gsi_replace (gsi, new_stmt, false);
4350 return true;
4356 /* Check for indirect calls that became direct calls, and then
4357 no longer require a static chain. */
4358 if (gimple_call_chain (stmt))
4360 tree fn = gimple_call_fndecl (stmt);
4361 if (fn && !DECL_STATIC_CHAIN (fn))
4363 gimple_call_set_chain (stmt, NULL);
4364 changed = true;
4366 else
4368 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4369 if (tmp)
4371 gimple_call_set_chain (stmt, tmp);
4372 changed = true;
4377 if (inplace)
4378 return changed;
4380 /* Check for builtins that CCP can handle using information not
4381 available in the generic fold routines. */
4382 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4384 if (gimple_fold_builtin (gsi))
4385 changed = true;
4387 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4389 changed |= targetm.gimple_fold_builtin (gsi);
4391 else if (gimple_call_internal_p (stmt))
4393 enum tree_code subcode = ERROR_MARK;
4394 tree result = NULL_TREE;
4395 bool cplx_result = false;
4396 tree overflow = NULL_TREE;
4397 switch (gimple_call_internal_fn (stmt))
4399 case IFN_BUILTIN_EXPECT:
4400 result = fold_builtin_expect (gimple_location (stmt),
4401 gimple_call_arg (stmt, 0),
4402 gimple_call_arg (stmt, 1),
4403 gimple_call_arg (stmt, 2),
4404 NULL_TREE);
4405 break;
4406 case IFN_UBSAN_OBJECT_SIZE:
4408 tree offset = gimple_call_arg (stmt, 1);
4409 tree objsize = gimple_call_arg (stmt, 2);
4410 if (integer_all_onesp (objsize)
4411 || (TREE_CODE (offset) == INTEGER_CST
4412 && TREE_CODE (objsize) == INTEGER_CST
4413 && tree_int_cst_le (offset, objsize)))
4415 replace_call_with_value (gsi, NULL_TREE);
4416 return true;
4419 break;
4420 case IFN_UBSAN_PTR:
4421 if (integer_zerop (gimple_call_arg (stmt, 1)))
4423 replace_call_with_value (gsi, NULL_TREE);
4424 return true;
4426 break;
4427 case IFN_UBSAN_BOUNDS:
4429 tree index = gimple_call_arg (stmt, 1);
4430 tree bound = gimple_call_arg (stmt, 2);
4431 if (TREE_CODE (index) == INTEGER_CST
4432 && TREE_CODE (bound) == INTEGER_CST)
4434 index = fold_convert (TREE_TYPE (bound), index);
4435 if (TREE_CODE (index) == INTEGER_CST
4436 && tree_int_cst_le (index, bound))
4438 replace_call_with_value (gsi, NULL_TREE);
4439 return true;
4443 break;
4444 case IFN_GOACC_DIM_SIZE:
4445 case IFN_GOACC_DIM_POS:
4446 result = fold_internal_goacc_dim (stmt);
4447 break;
4448 case IFN_UBSAN_CHECK_ADD:
4449 subcode = PLUS_EXPR;
4450 break;
4451 case IFN_UBSAN_CHECK_SUB:
4452 subcode = MINUS_EXPR;
4453 break;
4454 case IFN_UBSAN_CHECK_MUL:
4455 subcode = MULT_EXPR;
4456 break;
4457 case IFN_ADD_OVERFLOW:
4458 subcode = PLUS_EXPR;
4459 cplx_result = true;
4460 break;
4461 case IFN_SUB_OVERFLOW:
4462 subcode = MINUS_EXPR;
4463 cplx_result = true;
4464 break;
4465 case IFN_MUL_OVERFLOW:
4466 subcode = MULT_EXPR;
4467 cplx_result = true;
4468 break;
4469 case IFN_MASK_LOAD:
4470 changed |= gimple_fold_mask_load (gsi, stmt);
4471 break;
4472 case IFN_MASK_STORE:
4473 changed |= gimple_fold_mask_store (gsi, stmt);
4474 break;
4475 default:
4476 break;
4478 if (subcode != ERROR_MARK)
4480 tree arg0 = gimple_call_arg (stmt, 0);
4481 tree arg1 = gimple_call_arg (stmt, 1);
4482 tree type = TREE_TYPE (arg0);
4483 if (cplx_result)
4485 tree lhs = gimple_call_lhs (stmt);
4486 if (lhs == NULL_TREE)
4487 type = NULL_TREE;
4488 else
4489 type = TREE_TYPE (TREE_TYPE (lhs));
4491 if (type == NULL_TREE)
4493 /* x = y + 0; x = y - 0; x = y * 0; */
4494 else if (integer_zerop (arg1))
4495 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4496 /* x = 0 + y; x = 0 * y; */
4497 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4498 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4499 /* x = y - y; */
4500 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4501 result = integer_zero_node;
4502 /* x = y * 1; x = 1 * y; */
4503 else if (subcode == MULT_EXPR && integer_onep (arg1))
4504 result = arg0;
4505 else if (subcode == MULT_EXPR && integer_onep (arg0))
4506 result = arg1;
4507 else if (TREE_CODE (arg0) == INTEGER_CST
4508 && TREE_CODE (arg1) == INTEGER_CST)
4510 if (cplx_result)
4511 result = int_const_binop (subcode, fold_convert (type, arg0),
4512 fold_convert (type, arg1));
4513 else
4514 result = int_const_binop (subcode, arg0, arg1);
4515 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4517 if (cplx_result)
4518 overflow = build_one_cst (type);
4519 else
4520 result = NULL_TREE;
4523 if (result)
4525 if (result == integer_zero_node)
4526 result = build_zero_cst (type);
4527 else if (cplx_result && TREE_TYPE (result) != type)
4529 if (TREE_CODE (result) == INTEGER_CST)
4531 if (arith_overflowed_p (PLUS_EXPR, type, result,
4532 integer_zero_node))
4533 overflow = build_one_cst (type);
4535 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4536 && TYPE_UNSIGNED (type))
4537 || (TYPE_PRECISION (type)
4538 < (TYPE_PRECISION (TREE_TYPE (result))
4539 + (TYPE_UNSIGNED (TREE_TYPE (result))
4540 && !TYPE_UNSIGNED (type)))))
4541 result = NULL_TREE;
4542 if (result)
4543 result = fold_convert (type, result);
4548 if (result)
4550 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4551 result = drop_tree_overflow (result);
4552 if (cplx_result)
4554 if (overflow == NULL_TREE)
4555 overflow = build_zero_cst (TREE_TYPE (result));
4556 tree ctype = build_complex_type (TREE_TYPE (result));
4557 if (TREE_CODE (result) == INTEGER_CST
4558 && TREE_CODE (overflow) == INTEGER_CST)
4559 result = build_complex (ctype, result, overflow);
4560 else
4561 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4562 ctype, result, overflow);
4564 if (!update_call_from_tree (gsi, result))
4565 gimplify_and_update_call_from_tree (gsi, result);
4566 changed = true;
4570 return changed;
4574 /* Return true whether NAME has a use on STMT. */
4576 static bool
4577 has_use_on_stmt (tree name, gimple *stmt)
4579 imm_use_iterator iter;
4580 use_operand_p use_p;
4581 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4582 if (USE_STMT (use_p) == stmt)
4583 return true;
4584 return false;
4587 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4588 gimple_simplify.
4590 Replaces *GSI with the simplification result in RCODE and OPS
4591 and the associated statements in *SEQ. Does the replacement
4592 according to INPLACE and returns true if the operation succeeded. */
4594 static bool
4595 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4596 gimple_match_op *res_op,
4597 gimple_seq *seq, bool inplace)
4599 gimple *stmt = gsi_stmt (*gsi);
4600 tree *ops = res_op->ops;
4601 unsigned int num_ops = res_op->num_ops;
4603 /* Play safe and do not allow abnormals to be mentioned in
4604 newly created statements. See also maybe_push_res_to_seq.
4605 As an exception allow such uses if there was a use of the
4606 same SSA name on the old stmt. */
4607 for (unsigned int i = 0; i < num_ops; ++i)
4608 if (TREE_CODE (ops[i]) == SSA_NAME
4609 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4610 && !has_use_on_stmt (ops[i], stmt))
4611 return false;
4613 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4614 for (unsigned int i = 0; i < 2; ++i)
4615 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4616 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4617 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4618 return false;
4620 /* Don't insert new statements when INPLACE is true, even if we could
4621 reuse STMT for the final statement. */
4622 if (inplace && !gimple_seq_empty_p (*seq))
4623 return false;
4625 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4627 gcc_assert (res_op->code.is_tree_code ());
4628 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4629 /* GIMPLE_CONDs condition may not throw. */
4630 && (!flag_exceptions
4631 || !cfun->can_throw_non_call_exceptions
4632 || !operation_could_trap_p (res_op->code,
4633 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4634 false, NULL_TREE)))
4635 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4636 else if (res_op->code == SSA_NAME)
4637 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4638 build_zero_cst (TREE_TYPE (ops[0])));
4639 else if (res_op->code == INTEGER_CST)
4641 if (integer_zerop (ops[0]))
4642 gimple_cond_make_false (cond_stmt);
4643 else
4644 gimple_cond_make_true (cond_stmt);
4646 else if (!inplace)
4648 tree res = maybe_push_res_to_seq (res_op, seq);
4649 if (!res)
4650 return false;
4651 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4652 build_zero_cst (TREE_TYPE (res)));
4654 else
4655 return false;
4656 if (dump_file && (dump_flags & TDF_DETAILS))
4658 fprintf (dump_file, "gimple_simplified to ");
4659 if (!gimple_seq_empty_p (*seq))
4660 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4661 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4662 0, TDF_SLIM);
4664 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4665 return true;
4667 else if (is_gimple_assign (stmt)
4668 && res_op->code.is_tree_code ())
4670 if (!inplace
4671 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4673 maybe_build_generic_op (res_op);
4674 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4675 res_op->op_or_null (0),
4676 res_op->op_or_null (1),
4677 res_op->op_or_null (2));
4678 if (dump_file && (dump_flags & TDF_DETAILS))
4680 fprintf (dump_file, "gimple_simplified to ");
4681 if (!gimple_seq_empty_p (*seq))
4682 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4683 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4684 0, TDF_SLIM);
4686 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4687 return true;
4690 else if (res_op->code.is_fn_code ()
4691 && gimple_call_combined_fn (stmt) == res_op->code)
4693 gcc_assert (num_ops == gimple_call_num_args (stmt));
4694 for (unsigned int i = 0; i < num_ops; ++i)
4695 gimple_call_set_arg (stmt, i, ops[i]);
4696 if (dump_file && (dump_flags & TDF_DETAILS))
4698 fprintf (dump_file, "gimple_simplified to ");
4699 if (!gimple_seq_empty_p (*seq))
4700 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4701 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4703 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4704 return true;
4706 else if (!inplace)
4708 if (gimple_has_lhs (stmt))
4710 tree lhs = gimple_get_lhs (stmt);
4711 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4712 return false;
4713 if (dump_file && (dump_flags & TDF_DETAILS))
4715 fprintf (dump_file, "gimple_simplified to ");
4716 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4718 gsi_replace_with_seq_vops (gsi, *seq);
4719 return true;
4721 else
4722 gcc_unreachable ();
4725 return false;
4728 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4730 static bool
4731 maybe_canonicalize_mem_ref_addr (tree *t)
4733 bool res = false;
4735 if (TREE_CODE (*t) == ADDR_EXPR)
4736 t = &TREE_OPERAND (*t, 0);
4738 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4739 generic vector extension. The actual vector referenced is
4740 view-converted to an array type for this purpose. If the index
4741 is constant the canonical representation in the middle-end is a
4742 BIT_FIELD_REF so re-write the former to the latter here. */
4743 if (TREE_CODE (*t) == ARRAY_REF
4744 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4745 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4746 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4748 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4749 if (VECTOR_TYPE_P (vtype))
4751 tree low = array_ref_low_bound (*t);
4752 if (TREE_CODE (low) == INTEGER_CST)
4754 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4756 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4757 wi::to_widest (low));
4758 idx = wi::mul (idx, wi::to_widest
4759 (TYPE_SIZE (TREE_TYPE (*t))));
4760 widest_int ext
4761 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4762 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4764 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4765 TREE_TYPE (*t),
4766 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4767 TYPE_SIZE (TREE_TYPE (*t)),
4768 wide_int_to_tree (bitsizetype, idx));
4769 res = true;
4776 while (handled_component_p (*t))
4777 t = &TREE_OPERAND (*t, 0);
4779 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4780 of invariant addresses into a SSA name MEM_REF address. */
4781 if (TREE_CODE (*t) == MEM_REF
4782 || TREE_CODE (*t) == TARGET_MEM_REF)
4784 tree addr = TREE_OPERAND (*t, 0);
4785 if (TREE_CODE (addr) == ADDR_EXPR
4786 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4787 || handled_component_p (TREE_OPERAND (addr, 0))))
4789 tree base;
4790 poly_int64 coffset;
4791 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4792 &coffset);
4793 if (!base)
4794 gcc_unreachable ();
4796 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4797 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4798 TREE_OPERAND (*t, 1),
4799 size_int (coffset));
4800 res = true;
4802 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4803 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4806 /* Canonicalize back MEM_REFs to plain reference trees if the object
4807 accessed is a decl that has the same access semantics as the MEM_REF. */
4808 if (TREE_CODE (*t) == MEM_REF
4809 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4810 && integer_zerop (TREE_OPERAND (*t, 1))
4811 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4813 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4814 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4815 if (/* Same volatile qualification. */
4816 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4817 /* Same TBAA behavior with -fstrict-aliasing. */
4818 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4819 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4820 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4821 /* Same alignment. */
4822 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4823 /* We have to look out here to not drop a required conversion
4824 from the rhs to the lhs if *t appears on the lhs or vice-versa
4825 if it appears on the rhs. Thus require strict type
4826 compatibility. */
4827 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4829 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4830 res = true;
4834 /* Canonicalize TARGET_MEM_REF in particular with respect to
4835 the indexes becoming constant. */
4836 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4838 tree tem = maybe_fold_tmr (*t);
4839 if (tem)
4841 *t = tem;
4842 res = true;
4846 return res;
4849 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4850 distinguishes both cases. */
4852 static bool
4853 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4855 bool changed = false;
4856 gimple *stmt = gsi_stmt (*gsi);
4857 bool nowarning = gimple_no_warning_p (stmt);
4858 unsigned i;
4859 fold_defer_overflow_warnings ();
4861 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4862 after propagation.
4863 ??? This shouldn't be done in generic folding but in the
4864 propagation helpers which also know whether an address was
4865 propagated.
4866 Also canonicalize operand order. */
4867 switch (gimple_code (stmt))
4869 case GIMPLE_ASSIGN:
4870 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4872 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4873 if ((REFERENCE_CLASS_P (*rhs)
4874 || TREE_CODE (*rhs) == ADDR_EXPR)
4875 && maybe_canonicalize_mem_ref_addr (rhs))
4876 changed = true;
4877 tree *lhs = gimple_assign_lhs_ptr (stmt);
4878 if (REFERENCE_CLASS_P (*lhs)
4879 && maybe_canonicalize_mem_ref_addr (lhs))
4880 changed = true;
4882 else
4884 /* Canonicalize operand order. */
4885 enum tree_code code = gimple_assign_rhs_code (stmt);
4886 if (TREE_CODE_CLASS (code) == tcc_comparison
4887 || commutative_tree_code (code)
4888 || commutative_ternary_tree_code (code))
4890 tree rhs1 = gimple_assign_rhs1 (stmt);
4891 tree rhs2 = gimple_assign_rhs2 (stmt);
4892 if (tree_swap_operands_p (rhs1, rhs2))
4894 gimple_assign_set_rhs1 (stmt, rhs2);
4895 gimple_assign_set_rhs2 (stmt, rhs1);
4896 if (TREE_CODE_CLASS (code) == tcc_comparison)
4897 gimple_assign_set_rhs_code (stmt,
4898 swap_tree_comparison (code));
4899 changed = true;
4903 break;
4904 case GIMPLE_CALL:
4906 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4908 tree *arg = gimple_call_arg_ptr (stmt, i);
4909 if (REFERENCE_CLASS_P (*arg)
4910 && maybe_canonicalize_mem_ref_addr (arg))
4911 changed = true;
4913 tree *lhs = gimple_call_lhs_ptr (stmt);
4914 if (*lhs
4915 && REFERENCE_CLASS_P (*lhs)
4916 && maybe_canonicalize_mem_ref_addr (lhs))
4917 changed = true;
4918 break;
4920 case GIMPLE_ASM:
4922 gasm *asm_stmt = as_a <gasm *> (stmt);
4923 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4925 tree link = gimple_asm_output_op (asm_stmt, i);
4926 tree op = TREE_VALUE (link);
4927 if (REFERENCE_CLASS_P (op)
4928 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4929 changed = true;
4931 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4933 tree link = gimple_asm_input_op (asm_stmt, i);
4934 tree op = TREE_VALUE (link);
4935 if ((REFERENCE_CLASS_P (op)
4936 || TREE_CODE (op) == ADDR_EXPR)
4937 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4938 changed = true;
4941 break;
4942 case GIMPLE_DEBUG:
4943 if (gimple_debug_bind_p (stmt))
4945 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4946 if (*val
4947 && (REFERENCE_CLASS_P (*val)
4948 || TREE_CODE (*val) == ADDR_EXPR)
4949 && maybe_canonicalize_mem_ref_addr (val))
4950 changed = true;
4952 break;
4953 case GIMPLE_COND:
4955 /* Canonicalize operand order. */
4956 tree lhs = gimple_cond_lhs (stmt);
4957 tree rhs = gimple_cond_rhs (stmt);
4958 if (tree_swap_operands_p (lhs, rhs))
4960 gcond *gc = as_a <gcond *> (stmt);
4961 gimple_cond_set_lhs (gc, rhs);
4962 gimple_cond_set_rhs (gc, lhs);
4963 gimple_cond_set_code (gc,
4964 swap_tree_comparison (gimple_cond_code (gc)));
4965 changed = true;
4968 default:;
4971 /* Dispatch to pattern-based folding. */
4972 if (!inplace
4973 || is_gimple_assign (stmt)
4974 || gimple_code (stmt) == GIMPLE_COND)
4976 gimple_seq seq = NULL;
4977 gimple_match_op res_op;
4978 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
4979 valueize, valueize))
4981 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
4982 changed = true;
4983 else
4984 gimple_seq_discard (seq);
4988 stmt = gsi_stmt (*gsi);
4990 /* Fold the main computation performed by the statement. */
4991 switch (gimple_code (stmt))
4993 case GIMPLE_ASSIGN:
4995 /* Try to canonicalize for boolean-typed X the comparisons
4996 X == 0, X == 1, X != 0, and X != 1. */
4997 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4998 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5000 tree lhs = gimple_assign_lhs (stmt);
5001 tree op1 = gimple_assign_rhs1 (stmt);
5002 tree op2 = gimple_assign_rhs2 (stmt);
5003 tree type = TREE_TYPE (op1);
5005 /* Check whether the comparison operands are of the same boolean
5006 type as the result type is.
5007 Check that second operand is an integer-constant with value
5008 one or zero. */
5009 if (TREE_CODE (op2) == INTEGER_CST
5010 && (integer_zerop (op2) || integer_onep (op2))
5011 && useless_type_conversion_p (TREE_TYPE (lhs), type))
5013 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
5014 bool is_logical_not = false;
5016 /* X == 0 and X != 1 is a logical-not.of X
5017 X == 1 and X != 0 is X */
5018 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
5019 || (cmp_code == NE_EXPR && integer_onep (op2)))
5020 is_logical_not = true;
5022 if (is_logical_not == false)
5023 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
5024 /* Only for one-bit precision typed X the transformation
5025 !X -> ~X is valied. */
5026 else if (TYPE_PRECISION (type) == 1)
5027 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
5028 /* Otherwise we use !X -> X ^ 1. */
5029 else
5030 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
5031 build_int_cst (type, 1));
5032 changed = true;
5033 break;
5037 unsigned old_num_ops = gimple_num_ops (stmt);
5038 tree lhs = gimple_assign_lhs (stmt);
5039 tree new_rhs = fold_gimple_assign (gsi);
5040 if (new_rhs
5041 && !useless_type_conversion_p (TREE_TYPE (lhs),
5042 TREE_TYPE (new_rhs)))
5043 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
5044 if (new_rhs
5045 && (!inplace
5046 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
5048 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
5049 changed = true;
5051 break;
5054 case GIMPLE_CALL:
5055 changed |= gimple_fold_call (gsi, inplace);
5056 break;
5058 case GIMPLE_ASM:
5059 /* Fold *& in asm operands. */
5061 gasm *asm_stmt = as_a <gasm *> (stmt);
5062 size_t noutputs;
5063 const char **oconstraints;
5064 const char *constraint;
5065 bool allows_mem, allows_reg;
5067 noutputs = gimple_asm_noutputs (asm_stmt);
5068 oconstraints = XALLOCAVEC (const char *, noutputs);
5070 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
5072 tree link = gimple_asm_output_op (asm_stmt, i);
5073 tree op = TREE_VALUE (link);
5074 oconstraints[i]
5075 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5076 if (REFERENCE_CLASS_P (op)
5077 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
5079 TREE_VALUE (link) = op;
5080 changed = true;
5083 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5085 tree link = gimple_asm_input_op (asm_stmt, i);
5086 tree op = TREE_VALUE (link);
5087 constraint
5088 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5089 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5090 oconstraints, &allows_mem, &allows_reg);
5091 if (REFERENCE_CLASS_P (op)
5092 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
5093 != NULL_TREE)
5095 TREE_VALUE (link) = op;
5096 changed = true;
5100 break;
5102 case GIMPLE_DEBUG:
5103 if (gimple_debug_bind_p (stmt))
5105 tree val = gimple_debug_bind_get_value (stmt);
5106 if (val
5107 && REFERENCE_CLASS_P (val))
5109 tree tem = maybe_fold_reference (val, false);
5110 if (tem)
5112 gimple_debug_bind_set_value (stmt, tem);
5113 changed = true;
5116 else if (val
5117 && TREE_CODE (val) == ADDR_EXPR)
5119 tree ref = TREE_OPERAND (val, 0);
5120 tree tem = maybe_fold_reference (ref, false);
5121 if (tem)
5123 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
5124 gimple_debug_bind_set_value (stmt, tem);
5125 changed = true;
5129 break;
5131 case GIMPLE_RETURN:
5133 greturn *ret_stmt = as_a<greturn *> (stmt);
5134 tree ret = gimple_return_retval(ret_stmt);
5136 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
5138 tree val = valueize (ret);
5139 if (val && val != ret
5140 && may_propagate_copy (ret, val))
5142 gimple_return_set_retval (ret_stmt, val);
5143 changed = true;
5147 break;
5149 default:;
5152 stmt = gsi_stmt (*gsi);
5154 /* Fold *& on the lhs. */
5155 if (gimple_has_lhs (stmt))
5157 tree lhs = gimple_get_lhs (stmt);
5158 if (lhs && REFERENCE_CLASS_P (lhs))
5160 tree new_lhs = maybe_fold_reference (lhs, true);
5161 if (new_lhs)
5163 gimple_set_lhs (stmt, new_lhs);
5164 changed = true;
5169 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
5170 return changed;
5173 /* Valueziation callback that ends up not following SSA edges. */
5175 tree
5176 no_follow_ssa_edges (tree)
5178 return NULL_TREE;
5181 /* Valueization callback that ends up following single-use SSA edges only. */
5183 tree
5184 follow_single_use_edges (tree val)
5186 if (TREE_CODE (val) == SSA_NAME
5187 && !has_single_use (val))
5188 return NULL_TREE;
5189 return val;
5192 /* Valueization callback that follows all SSA edges. */
5194 tree
5195 follow_all_ssa_edges (tree val)
5197 return val;
5200 /* Fold the statement pointed to by GSI. In some cases, this function may
5201 replace the whole statement with a new one. Returns true iff folding
5202 makes any changes.
5203 The statement pointed to by GSI should be in valid gimple form but may
5204 be in unfolded state as resulting from for example constant propagation
5205 which can produce *&x = 0. */
5207 bool
5208 fold_stmt (gimple_stmt_iterator *gsi)
5210 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5213 bool
5214 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5216 return fold_stmt_1 (gsi, false, valueize);
5219 /* Perform the minimal folding on statement *GSI. Only operations like
5220 *&x created by constant propagation are handled. The statement cannot
5221 be replaced with a new one. Return true if the statement was
5222 changed, false otherwise.
5223 The statement *GSI should be in valid gimple form but may
5224 be in unfolded state as resulting from for example constant propagation
5225 which can produce *&x = 0. */
5227 bool
5228 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5230 gimple *stmt = gsi_stmt (*gsi);
5231 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5232 gcc_assert (gsi_stmt (*gsi) == stmt);
5233 return changed;
5236 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5237 if EXPR is null or we don't know how.
5238 If non-null, the result always has boolean type. */
5240 static tree
5241 canonicalize_bool (tree expr, bool invert)
5243 if (!expr)
5244 return NULL_TREE;
5245 else if (invert)
5247 if (integer_nonzerop (expr))
5248 return boolean_false_node;
5249 else if (integer_zerop (expr))
5250 return boolean_true_node;
5251 else if (TREE_CODE (expr) == SSA_NAME)
5252 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5253 build_int_cst (TREE_TYPE (expr), 0));
5254 else if (COMPARISON_CLASS_P (expr))
5255 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5256 boolean_type_node,
5257 TREE_OPERAND (expr, 0),
5258 TREE_OPERAND (expr, 1));
5259 else
5260 return NULL_TREE;
5262 else
5264 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5265 return expr;
5266 if (integer_nonzerop (expr))
5267 return boolean_true_node;
5268 else if (integer_zerop (expr))
5269 return boolean_false_node;
5270 else if (TREE_CODE (expr) == SSA_NAME)
5271 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5272 build_int_cst (TREE_TYPE (expr), 0));
5273 else if (COMPARISON_CLASS_P (expr))
5274 return fold_build2 (TREE_CODE (expr),
5275 boolean_type_node,
5276 TREE_OPERAND (expr, 0),
5277 TREE_OPERAND (expr, 1));
5278 else
5279 return NULL_TREE;
5283 /* Check to see if a boolean expression EXPR is logically equivalent to the
5284 comparison (OP1 CODE OP2). Check for various identities involving
5285 SSA_NAMEs. */
5287 static bool
5288 same_bool_comparison_p (const_tree expr, enum tree_code code,
5289 const_tree op1, const_tree op2)
5291 gimple *s;
5293 /* The obvious case. */
5294 if (TREE_CODE (expr) == code
5295 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5296 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5297 return true;
5299 /* Check for comparing (name, name != 0) and the case where expr
5300 is an SSA_NAME with a definition matching the comparison. */
5301 if (TREE_CODE (expr) == SSA_NAME
5302 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5304 if (operand_equal_p (expr, op1, 0))
5305 return ((code == NE_EXPR && integer_zerop (op2))
5306 || (code == EQ_EXPR && integer_nonzerop (op2)));
5307 s = SSA_NAME_DEF_STMT (expr);
5308 if (is_gimple_assign (s)
5309 && gimple_assign_rhs_code (s) == code
5310 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5311 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5312 return true;
5315 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5316 of name is a comparison, recurse. */
5317 if (TREE_CODE (op1) == SSA_NAME
5318 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5320 s = SSA_NAME_DEF_STMT (op1);
5321 if (is_gimple_assign (s)
5322 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5324 enum tree_code c = gimple_assign_rhs_code (s);
5325 if ((c == NE_EXPR && integer_zerop (op2))
5326 || (c == EQ_EXPR && integer_nonzerop (op2)))
5327 return same_bool_comparison_p (expr, c,
5328 gimple_assign_rhs1 (s),
5329 gimple_assign_rhs2 (s));
5330 if ((c == EQ_EXPR && integer_zerop (op2))
5331 || (c == NE_EXPR && integer_nonzerop (op2)))
5332 return same_bool_comparison_p (expr,
5333 invert_tree_comparison (c, false),
5334 gimple_assign_rhs1 (s),
5335 gimple_assign_rhs2 (s));
5338 return false;
5341 /* Check to see if two boolean expressions OP1 and OP2 are logically
5342 equivalent. */
5344 static bool
5345 same_bool_result_p (const_tree op1, const_tree op2)
5347 /* Simple cases first. */
5348 if (operand_equal_p (op1, op2, 0))
5349 return true;
5351 /* Check the cases where at least one of the operands is a comparison.
5352 These are a bit smarter than operand_equal_p in that they apply some
5353 identifies on SSA_NAMEs. */
5354 if (COMPARISON_CLASS_P (op2)
5355 && same_bool_comparison_p (op1, TREE_CODE (op2),
5356 TREE_OPERAND (op2, 0),
5357 TREE_OPERAND (op2, 1)))
5358 return true;
5359 if (COMPARISON_CLASS_P (op1)
5360 && same_bool_comparison_p (op2, TREE_CODE (op1),
5361 TREE_OPERAND (op1, 0),
5362 TREE_OPERAND (op1, 1)))
5363 return true;
5365 /* Default case. */
5366 return false;
5369 /* Forward declarations for some mutually recursive functions. */
5371 static tree
5372 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5373 enum tree_code code2, tree op2a, tree op2b);
5374 static tree
5375 and_var_with_comparison (tree var, bool invert,
5376 enum tree_code code2, tree op2a, tree op2b);
5377 static tree
5378 and_var_with_comparison_1 (gimple *stmt,
5379 enum tree_code code2, tree op2a, tree op2b);
5380 static tree
5381 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5382 enum tree_code code2, tree op2a, tree op2b);
5383 static tree
5384 or_var_with_comparison (tree var, bool invert,
5385 enum tree_code code2, tree op2a, tree op2b);
5386 static tree
5387 or_var_with_comparison_1 (gimple *stmt,
5388 enum tree_code code2, tree op2a, tree op2b);
5390 /* Helper function for and_comparisons_1: try to simplify the AND of the
5391 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5392 If INVERT is true, invert the value of the VAR before doing the AND.
5393 Return NULL_EXPR if we can't simplify this to a single expression. */
5395 static tree
5396 and_var_with_comparison (tree var, bool invert,
5397 enum tree_code code2, tree op2a, tree op2b)
5399 tree t;
5400 gimple *stmt = SSA_NAME_DEF_STMT (var);
5402 /* We can only deal with variables whose definitions are assignments. */
5403 if (!is_gimple_assign (stmt))
5404 return NULL_TREE;
5406 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5407 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5408 Then we only have to consider the simpler non-inverted cases. */
5409 if (invert)
5410 t = or_var_with_comparison_1 (stmt,
5411 invert_tree_comparison (code2, false),
5412 op2a, op2b);
5413 else
5414 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5415 return canonicalize_bool (t, invert);
5418 /* Try to simplify the AND of the ssa variable defined by the assignment
5419 STMT with the comparison specified by (OP2A CODE2 OP2B).
5420 Return NULL_EXPR if we can't simplify this to a single expression. */
5422 static tree
5423 and_var_with_comparison_1 (gimple *stmt,
5424 enum tree_code code2, tree op2a, tree op2b)
5426 tree var = gimple_assign_lhs (stmt);
5427 tree true_test_var = NULL_TREE;
5428 tree false_test_var = NULL_TREE;
5429 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5431 /* Check for identities like (var AND (var == 0)) => false. */
5432 if (TREE_CODE (op2a) == SSA_NAME
5433 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5435 if ((code2 == NE_EXPR && integer_zerop (op2b))
5436 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5438 true_test_var = op2a;
5439 if (var == true_test_var)
5440 return var;
5442 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5443 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5445 false_test_var = op2a;
5446 if (var == false_test_var)
5447 return boolean_false_node;
5451 /* If the definition is a comparison, recurse on it. */
5452 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5454 tree t = and_comparisons_1 (innercode,
5455 gimple_assign_rhs1 (stmt),
5456 gimple_assign_rhs2 (stmt),
5457 code2,
5458 op2a,
5459 op2b);
5460 if (t)
5461 return t;
5464 /* If the definition is an AND or OR expression, we may be able to
5465 simplify by reassociating. */
5466 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5467 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5469 tree inner1 = gimple_assign_rhs1 (stmt);
5470 tree inner2 = gimple_assign_rhs2 (stmt);
5471 gimple *s;
5472 tree t;
5473 tree partial = NULL_TREE;
5474 bool is_and = (innercode == BIT_AND_EXPR);
5476 /* Check for boolean identities that don't require recursive examination
5477 of inner1/inner2:
5478 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5479 inner1 AND (inner1 OR inner2) => inner1
5480 !inner1 AND (inner1 AND inner2) => false
5481 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5482 Likewise for similar cases involving inner2. */
5483 if (inner1 == true_test_var)
5484 return (is_and ? var : inner1);
5485 else if (inner2 == true_test_var)
5486 return (is_and ? var : inner2);
5487 else if (inner1 == false_test_var)
5488 return (is_and
5489 ? boolean_false_node
5490 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5491 else if (inner2 == false_test_var)
5492 return (is_and
5493 ? boolean_false_node
5494 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5496 /* Next, redistribute/reassociate the AND across the inner tests.
5497 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5498 if (TREE_CODE (inner1) == SSA_NAME
5499 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5500 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5501 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5502 gimple_assign_rhs1 (s),
5503 gimple_assign_rhs2 (s),
5504 code2, op2a, op2b)))
5506 /* Handle the AND case, where we are reassociating:
5507 (inner1 AND inner2) AND (op2a code2 op2b)
5508 => (t AND inner2)
5509 If the partial result t is a constant, we win. Otherwise
5510 continue on to try reassociating with the other inner test. */
5511 if (is_and)
5513 if (integer_onep (t))
5514 return inner2;
5515 else if (integer_zerop (t))
5516 return boolean_false_node;
5519 /* Handle the OR case, where we are redistributing:
5520 (inner1 OR inner2) AND (op2a code2 op2b)
5521 => (t OR (inner2 AND (op2a code2 op2b))) */
5522 else if (integer_onep (t))
5523 return boolean_true_node;
5525 /* Save partial result for later. */
5526 partial = t;
5529 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5530 if (TREE_CODE (inner2) == SSA_NAME
5531 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5532 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5533 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5534 gimple_assign_rhs1 (s),
5535 gimple_assign_rhs2 (s),
5536 code2, op2a, op2b)))
5538 /* Handle the AND case, where we are reassociating:
5539 (inner1 AND inner2) AND (op2a code2 op2b)
5540 => (inner1 AND t) */
5541 if (is_and)
5543 if (integer_onep (t))
5544 return inner1;
5545 else if (integer_zerop (t))
5546 return boolean_false_node;
5547 /* If both are the same, we can apply the identity
5548 (x AND x) == x. */
5549 else if (partial && same_bool_result_p (t, partial))
5550 return t;
5553 /* Handle the OR case. where we are redistributing:
5554 (inner1 OR inner2) AND (op2a code2 op2b)
5555 => (t OR (inner1 AND (op2a code2 op2b)))
5556 => (t OR partial) */
5557 else
5559 if (integer_onep (t))
5560 return boolean_true_node;
5561 else if (partial)
5563 /* We already got a simplification for the other
5564 operand to the redistributed OR expression. The
5565 interesting case is when at least one is false.
5566 Or, if both are the same, we can apply the identity
5567 (x OR x) == x. */
5568 if (integer_zerop (partial))
5569 return t;
5570 else if (integer_zerop (t))
5571 return partial;
5572 else if (same_bool_result_p (t, partial))
5573 return t;
5578 return NULL_TREE;
5581 /* Try to simplify the AND of two comparisons defined by
5582 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5583 If this can be done without constructing an intermediate value,
5584 return the resulting tree; otherwise NULL_TREE is returned.
5585 This function is deliberately asymmetric as it recurses on SSA_DEFs
5586 in the first comparison but not the second. */
5588 static tree
5589 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5590 enum tree_code code2, tree op2a, tree op2b)
5592 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5594 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5595 if (operand_equal_p (op1a, op2a, 0)
5596 && operand_equal_p (op1b, op2b, 0))
5598 /* Result will be either NULL_TREE, or a combined comparison. */
5599 tree t = combine_comparisons (UNKNOWN_LOCATION,
5600 TRUTH_ANDIF_EXPR, code1, code2,
5601 truth_type, op1a, op1b);
5602 if (t)
5603 return t;
5606 /* Likewise the swapped case of the above. */
5607 if (operand_equal_p (op1a, op2b, 0)
5608 && operand_equal_p (op1b, op2a, 0))
5610 /* Result will be either NULL_TREE, or a combined comparison. */
5611 tree t = combine_comparisons (UNKNOWN_LOCATION,
5612 TRUTH_ANDIF_EXPR, code1,
5613 swap_tree_comparison (code2),
5614 truth_type, op1a, op1b);
5615 if (t)
5616 return t;
5619 /* If both comparisons are of the same value against constants, we might
5620 be able to merge them. */
5621 if (operand_equal_p (op1a, op2a, 0)
5622 && TREE_CODE (op1b) == INTEGER_CST
5623 && TREE_CODE (op2b) == INTEGER_CST)
5625 int cmp = tree_int_cst_compare (op1b, op2b);
5627 /* If we have (op1a == op1b), we should either be able to
5628 return that or FALSE, depending on whether the constant op1b
5629 also satisfies the other comparison against op2b. */
5630 if (code1 == EQ_EXPR)
5632 bool done = true;
5633 bool val;
5634 switch (code2)
5636 case EQ_EXPR: val = (cmp == 0); break;
5637 case NE_EXPR: val = (cmp != 0); break;
5638 case LT_EXPR: val = (cmp < 0); break;
5639 case GT_EXPR: val = (cmp > 0); break;
5640 case LE_EXPR: val = (cmp <= 0); break;
5641 case GE_EXPR: val = (cmp >= 0); break;
5642 default: done = false;
5644 if (done)
5646 if (val)
5647 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5648 else
5649 return boolean_false_node;
5652 /* Likewise if the second comparison is an == comparison. */
5653 else if (code2 == EQ_EXPR)
5655 bool done = true;
5656 bool val;
5657 switch (code1)
5659 case EQ_EXPR: val = (cmp == 0); break;
5660 case NE_EXPR: val = (cmp != 0); break;
5661 case LT_EXPR: val = (cmp > 0); break;
5662 case GT_EXPR: val = (cmp < 0); break;
5663 case LE_EXPR: val = (cmp >= 0); break;
5664 case GE_EXPR: val = (cmp <= 0); break;
5665 default: done = false;
5667 if (done)
5669 if (val)
5670 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5671 else
5672 return boolean_false_node;
5676 /* Same business with inequality tests. */
5677 else if (code1 == NE_EXPR)
5679 bool val;
5680 switch (code2)
5682 case EQ_EXPR: val = (cmp != 0); break;
5683 case NE_EXPR: val = (cmp == 0); break;
5684 case LT_EXPR: val = (cmp >= 0); break;
5685 case GT_EXPR: val = (cmp <= 0); break;
5686 case LE_EXPR: val = (cmp > 0); break;
5687 case GE_EXPR: val = (cmp < 0); break;
5688 default:
5689 val = false;
5691 if (val)
5692 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5694 else if (code2 == NE_EXPR)
5696 bool val;
5697 switch (code1)
5699 case EQ_EXPR: val = (cmp == 0); break;
5700 case NE_EXPR: val = (cmp != 0); break;
5701 case LT_EXPR: val = (cmp <= 0); break;
5702 case GT_EXPR: val = (cmp >= 0); break;
5703 case LE_EXPR: val = (cmp < 0); break;
5704 case GE_EXPR: val = (cmp > 0); break;
5705 default:
5706 val = false;
5708 if (val)
5709 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5712 /* Chose the more restrictive of two < or <= comparisons. */
5713 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5714 && (code2 == LT_EXPR || code2 == LE_EXPR))
5716 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5717 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5718 else
5719 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5722 /* Likewise chose the more restrictive of two > or >= comparisons. */
5723 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5724 && (code2 == GT_EXPR || code2 == GE_EXPR))
5726 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5727 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5728 else
5729 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5732 /* Check for singleton ranges. */
5733 else if (cmp == 0
5734 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5735 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5736 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5738 /* Check for disjoint ranges. */
5739 else if (cmp <= 0
5740 && (code1 == LT_EXPR || code1 == LE_EXPR)
5741 && (code2 == GT_EXPR || code2 == GE_EXPR))
5742 return boolean_false_node;
5743 else if (cmp >= 0
5744 && (code1 == GT_EXPR || code1 == GE_EXPR)
5745 && (code2 == LT_EXPR || code2 == LE_EXPR))
5746 return boolean_false_node;
5749 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5750 NAME's definition is a truth value. See if there are any simplifications
5751 that can be done against the NAME's definition. */
5752 if (TREE_CODE (op1a) == SSA_NAME
5753 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5754 && (integer_zerop (op1b) || integer_onep (op1b)))
5756 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5757 || (code1 == NE_EXPR && integer_onep (op1b)));
5758 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5759 switch (gimple_code (stmt))
5761 case GIMPLE_ASSIGN:
5762 /* Try to simplify by copy-propagating the definition. */
5763 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5765 case GIMPLE_PHI:
5766 /* If every argument to the PHI produces the same result when
5767 ANDed with the second comparison, we win.
5768 Do not do this unless the type is bool since we need a bool
5769 result here anyway. */
5770 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5772 tree result = NULL_TREE;
5773 unsigned i;
5774 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5776 tree arg = gimple_phi_arg_def (stmt, i);
5778 /* If this PHI has itself as an argument, ignore it.
5779 If all the other args produce the same result,
5780 we're still OK. */
5781 if (arg == gimple_phi_result (stmt))
5782 continue;
5783 else if (TREE_CODE (arg) == INTEGER_CST)
5785 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5787 if (!result)
5788 result = boolean_false_node;
5789 else if (!integer_zerop (result))
5790 return NULL_TREE;
5792 else if (!result)
5793 result = fold_build2 (code2, boolean_type_node,
5794 op2a, op2b);
5795 else if (!same_bool_comparison_p (result,
5796 code2, op2a, op2b))
5797 return NULL_TREE;
5799 else if (TREE_CODE (arg) == SSA_NAME
5800 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5802 tree temp;
5803 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5804 /* In simple cases we can look through PHI nodes,
5805 but we have to be careful with loops.
5806 See PR49073. */
5807 if (! dom_info_available_p (CDI_DOMINATORS)
5808 || gimple_bb (def_stmt) == gimple_bb (stmt)
5809 || dominated_by_p (CDI_DOMINATORS,
5810 gimple_bb (def_stmt),
5811 gimple_bb (stmt)))
5812 return NULL_TREE;
5813 temp = and_var_with_comparison (arg, invert, code2,
5814 op2a, op2b);
5815 if (!temp)
5816 return NULL_TREE;
5817 else if (!result)
5818 result = temp;
5819 else if (!same_bool_result_p (result, temp))
5820 return NULL_TREE;
5822 else
5823 return NULL_TREE;
5825 return result;
5828 default:
5829 break;
5832 return NULL_TREE;
5835 /* Try to simplify the AND of two comparisons, specified by
5836 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5837 If this can be simplified to a single expression (without requiring
5838 introducing more SSA variables to hold intermediate values),
5839 return the resulting tree. Otherwise return NULL_TREE.
5840 If the result expression is non-null, it has boolean type. */
5842 tree
5843 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5844 enum tree_code code2, tree op2a, tree op2b)
5846 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5847 if (t)
5848 return t;
5849 else
5850 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5853 /* Helper function for or_comparisons_1: try to simplify the OR of the
5854 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5855 If INVERT is true, invert the value of VAR before doing the OR.
5856 Return NULL_EXPR if we can't simplify this to a single expression. */
5858 static tree
5859 or_var_with_comparison (tree var, bool invert,
5860 enum tree_code code2, tree op2a, tree op2b)
5862 tree t;
5863 gimple *stmt = SSA_NAME_DEF_STMT (var);
5865 /* We can only deal with variables whose definitions are assignments. */
5866 if (!is_gimple_assign (stmt))
5867 return NULL_TREE;
5869 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5870 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5871 Then we only have to consider the simpler non-inverted cases. */
5872 if (invert)
5873 t = and_var_with_comparison_1 (stmt,
5874 invert_tree_comparison (code2, false),
5875 op2a, op2b);
5876 else
5877 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5878 return canonicalize_bool (t, invert);
5881 /* Try to simplify the OR of the ssa variable defined by the assignment
5882 STMT with the comparison specified by (OP2A CODE2 OP2B).
5883 Return NULL_EXPR if we can't simplify this to a single expression. */
5885 static tree
5886 or_var_with_comparison_1 (gimple *stmt,
5887 enum tree_code code2, tree op2a, tree op2b)
5889 tree var = gimple_assign_lhs (stmt);
5890 tree true_test_var = NULL_TREE;
5891 tree false_test_var = NULL_TREE;
5892 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5894 /* Check for identities like (var OR (var != 0)) => true . */
5895 if (TREE_CODE (op2a) == SSA_NAME
5896 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5898 if ((code2 == NE_EXPR && integer_zerop (op2b))
5899 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5901 true_test_var = op2a;
5902 if (var == true_test_var)
5903 return var;
5905 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5906 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5908 false_test_var = op2a;
5909 if (var == false_test_var)
5910 return boolean_true_node;
5914 /* If the definition is a comparison, recurse on it. */
5915 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5917 tree t = or_comparisons_1 (innercode,
5918 gimple_assign_rhs1 (stmt),
5919 gimple_assign_rhs2 (stmt),
5920 code2,
5921 op2a,
5922 op2b);
5923 if (t)
5924 return t;
5927 /* If the definition is an AND or OR expression, we may be able to
5928 simplify by reassociating. */
5929 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5930 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5932 tree inner1 = gimple_assign_rhs1 (stmt);
5933 tree inner2 = gimple_assign_rhs2 (stmt);
5934 gimple *s;
5935 tree t;
5936 tree partial = NULL_TREE;
5937 bool is_or = (innercode == BIT_IOR_EXPR);
5939 /* Check for boolean identities that don't require recursive examination
5940 of inner1/inner2:
5941 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5942 inner1 OR (inner1 AND inner2) => inner1
5943 !inner1 OR (inner1 OR inner2) => true
5944 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5946 if (inner1 == true_test_var)
5947 return (is_or ? var : inner1);
5948 else if (inner2 == true_test_var)
5949 return (is_or ? var : inner2);
5950 else if (inner1 == false_test_var)
5951 return (is_or
5952 ? boolean_true_node
5953 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5954 else if (inner2 == false_test_var)
5955 return (is_or
5956 ? boolean_true_node
5957 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5959 /* Next, redistribute/reassociate the OR across the inner tests.
5960 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5961 if (TREE_CODE (inner1) == SSA_NAME
5962 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5963 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5964 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5965 gimple_assign_rhs1 (s),
5966 gimple_assign_rhs2 (s),
5967 code2, op2a, op2b)))
5969 /* Handle the OR case, where we are reassociating:
5970 (inner1 OR inner2) OR (op2a code2 op2b)
5971 => (t OR inner2)
5972 If the partial result t is a constant, we win. Otherwise
5973 continue on to try reassociating with the other inner test. */
5974 if (is_or)
5976 if (integer_onep (t))
5977 return boolean_true_node;
5978 else if (integer_zerop (t))
5979 return inner2;
5982 /* Handle the AND case, where we are redistributing:
5983 (inner1 AND inner2) OR (op2a code2 op2b)
5984 => (t AND (inner2 OR (op2a code op2b))) */
5985 else if (integer_zerop (t))
5986 return boolean_false_node;
5988 /* Save partial result for later. */
5989 partial = t;
5992 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5993 if (TREE_CODE (inner2) == SSA_NAME
5994 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5995 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5996 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5997 gimple_assign_rhs1 (s),
5998 gimple_assign_rhs2 (s),
5999 code2, op2a, op2b)))
6001 /* Handle the OR case, where we are reassociating:
6002 (inner1 OR inner2) OR (op2a code2 op2b)
6003 => (inner1 OR t)
6004 => (t OR partial) */
6005 if (is_or)
6007 if (integer_zerop (t))
6008 return inner1;
6009 else if (integer_onep (t))
6010 return boolean_true_node;
6011 /* If both are the same, we can apply the identity
6012 (x OR x) == x. */
6013 else if (partial && same_bool_result_p (t, partial))
6014 return t;
6017 /* Handle the AND case, where we are redistributing:
6018 (inner1 AND inner2) OR (op2a code2 op2b)
6019 => (t AND (inner1 OR (op2a code2 op2b)))
6020 => (t AND partial) */
6021 else
6023 if (integer_zerop (t))
6024 return boolean_false_node;
6025 else if (partial)
6027 /* We already got a simplification for the other
6028 operand to the redistributed AND expression. The
6029 interesting case is when at least one is true.
6030 Or, if both are the same, we can apply the identity
6031 (x AND x) == x. */
6032 if (integer_onep (partial))
6033 return t;
6034 else if (integer_onep (t))
6035 return partial;
6036 else if (same_bool_result_p (t, partial))
6037 return t;
6042 return NULL_TREE;
6045 /* Try to simplify the OR of two comparisons defined by
6046 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6047 If this can be done without constructing an intermediate value,
6048 return the resulting tree; otherwise NULL_TREE is returned.
6049 This function is deliberately asymmetric as it recurses on SSA_DEFs
6050 in the first comparison but not the second. */
6052 static tree
6053 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
6054 enum tree_code code2, tree op2a, tree op2b)
6056 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6058 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
6059 if (operand_equal_p (op1a, op2a, 0)
6060 && operand_equal_p (op1b, op2b, 0))
6062 /* Result will be either NULL_TREE, or a combined comparison. */
6063 tree t = combine_comparisons (UNKNOWN_LOCATION,
6064 TRUTH_ORIF_EXPR, code1, code2,
6065 truth_type, op1a, op1b);
6066 if (t)
6067 return t;
6070 /* Likewise the swapped case of the above. */
6071 if (operand_equal_p (op1a, op2b, 0)
6072 && operand_equal_p (op1b, op2a, 0))
6074 /* Result will be either NULL_TREE, or a combined comparison. */
6075 tree t = combine_comparisons (UNKNOWN_LOCATION,
6076 TRUTH_ORIF_EXPR, code1,
6077 swap_tree_comparison (code2),
6078 truth_type, op1a, op1b);
6079 if (t)
6080 return t;
6083 /* If both comparisons are of the same value against constants, we might
6084 be able to merge them. */
6085 if (operand_equal_p (op1a, op2a, 0)
6086 && TREE_CODE (op1b) == INTEGER_CST
6087 && TREE_CODE (op2b) == INTEGER_CST)
6089 int cmp = tree_int_cst_compare (op1b, op2b);
6091 /* If we have (op1a != op1b), we should either be able to
6092 return that or TRUE, depending on whether the constant op1b
6093 also satisfies the other comparison against op2b. */
6094 if (code1 == NE_EXPR)
6096 bool done = true;
6097 bool val;
6098 switch (code2)
6100 case EQ_EXPR: val = (cmp == 0); break;
6101 case NE_EXPR: val = (cmp != 0); break;
6102 case LT_EXPR: val = (cmp < 0); break;
6103 case GT_EXPR: val = (cmp > 0); break;
6104 case LE_EXPR: val = (cmp <= 0); break;
6105 case GE_EXPR: val = (cmp >= 0); break;
6106 default: done = false;
6108 if (done)
6110 if (val)
6111 return boolean_true_node;
6112 else
6113 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6116 /* Likewise if the second comparison is a != comparison. */
6117 else if (code2 == NE_EXPR)
6119 bool done = true;
6120 bool val;
6121 switch (code1)
6123 case EQ_EXPR: val = (cmp == 0); break;
6124 case NE_EXPR: val = (cmp != 0); break;
6125 case LT_EXPR: val = (cmp > 0); break;
6126 case GT_EXPR: val = (cmp < 0); break;
6127 case LE_EXPR: val = (cmp >= 0); break;
6128 case GE_EXPR: val = (cmp <= 0); break;
6129 default: done = false;
6131 if (done)
6133 if (val)
6134 return boolean_true_node;
6135 else
6136 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6140 /* See if an equality test is redundant with the other comparison. */
6141 else if (code1 == EQ_EXPR)
6143 bool val;
6144 switch (code2)
6146 case EQ_EXPR: val = (cmp == 0); break;
6147 case NE_EXPR: val = (cmp != 0); break;
6148 case LT_EXPR: val = (cmp < 0); break;
6149 case GT_EXPR: val = (cmp > 0); break;
6150 case LE_EXPR: val = (cmp <= 0); break;
6151 case GE_EXPR: val = (cmp >= 0); break;
6152 default:
6153 val = false;
6155 if (val)
6156 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6158 else if (code2 == EQ_EXPR)
6160 bool val;
6161 switch (code1)
6163 case EQ_EXPR: val = (cmp == 0); break;
6164 case NE_EXPR: val = (cmp != 0); break;
6165 case LT_EXPR: val = (cmp > 0); break;
6166 case GT_EXPR: val = (cmp < 0); break;
6167 case LE_EXPR: val = (cmp >= 0); break;
6168 case GE_EXPR: val = (cmp <= 0); break;
6169 default:
6170 val = false;
6172 if (val)
6173 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6176 /* Chose the less restrictive of two < or <= comparisons. */
6177 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
6178 && (code2 == LT_EXPR || code2 == LE_EXPR))
6180 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
6181 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6182 else
6183 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6186 /* Likewise chose the less restrictive of two > or >= comparisons. */
6187 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
6188 && (code2 == GT_EXPR || code2 == GE_EXPR))
6190 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
6191 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6192 else
6193 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6196 /* Check for singleton ranges. */
6197 else if (cmp == 0
6198 && ((code1 == LT_EXPR && code2 == GT_EXPR)
6199 || (code1 == GT_EXPR && code2 == LT_EXPR)))
6200 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
6202 /* Check for less/greater pairs that don't restrict the range at all. */
6203 else if (cmp >= 0
6204 && (code1 == LT_EXPR || code1 == LE_EXPR)
6205 && (code2 == GT_EXPR || code2 == GE_EXPR))
6206 return boolean_true_node;
6207 else if (cmp <= 0
6208 && (code1 == GT_EXPR || code1 == GE_EXPR)
6209 && (code2 == LT_EXPR || code2 == LE_EXPR))
6210 return boolean_true_node;
6213 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6214 NAME's definition is a truth value. See if there are any simplifications
6215 that can be done against the NAME's definition. */
6216 if (TREE_CODE (op1a) == SSA_NAME
6217 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6218 && (integer_zerop (op1b) || integer_onep (op1b)))
6220 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6221 || (code1 == NE_EXPR && integer_onep (op1b)));
6222 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6223 switch (gimple_code (stmt))
6225 case GIMPLE_ASSIGN:
6226 /* Try to simplify by copy-propagating the definition. */
6227 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
6229 case GIMPLE_PHI:
6230 /* If every argument to the PHI produces the same result when
6231 ORed with the second comparison, we win.
6232 Do not do this unless the type is bool since we need a bool
6233 result here anyway. */
6234 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6236 tree result = NULL_TREE;
6237 unsigned i;
6238 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6240 tree arg = gimple_phi_arg_def (stmt, i);
6242 /* If this PHI has itself as an argument, ignore it.
6243 If all the other args produce the same result,
6244 we're still OK. */
6245 if (arg == gimple_phi_result (stmt))
6246 continue;
6247 else if (TREE_CODE (arg) == INTEGER_CST)
6249 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6251 if (!result)
6252 result = boolean_true_node;
6253 else if (!integer_onep (result))
6254 return NULL_TREE;
6256 else if (!result)
6257 result = fold_build2 (code2, boolean_type_node,
6258 op2a, op2b);
6259 else if (!same_bool_comparison_p (result,
6260 code2, op2a, op2b))
6261 return NULL_TREE;
6263 else if (TREE_CODE (arg) == SSA_NAME
6264 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6266 tree temp;
6267 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6268 /* In simple cases we can look through PHI nodes,
6269 but we have to be careful with loops.
6270 See PR49073. */
6271 if (! dom_info_available_p (CDI_DOMINATORS)
6272 || gimple_bb (def_stmt) == gimple_bb (stmt)
6273 || dominated_by_p (CDI_DOMINATORS,
6274 gimple_bb (def_stmt),
6275 gimple_bb (stmt)))
6276 return NULL_TREE;
6277 temp = or_var_with_comparison (arg, invert, code2,
6278 op2a, op2b);
6279 if (!temp)
6280 return NULL_TREE;
6281 else if (!result)
6282 result = temp;
6283 else if (!same_bool_result_p (result, temp))
6284 return NULL_TREE;
6286 else
6287 return NULL_TREE;
6289 return result;
6292 default:
6293 break;
6296 return NULL_TREE;
6299 /* Try to simplify the OR of two comparisons, specified by
6300 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6301 If this can be simplified to a single expression (without requiring
6302 introducing more SSA variables to hold intermediate values),
6303 return the resulting tree. Otherwise return NULL_TREE.
6304 If the result expression is non-null, it has boolean type. */
6306 tree
6307 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6308 enum tree_code code2, tree op2a, tree op2b)
6310 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6311 if (t)
6312 return t;
6313 else
6314 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6318 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6320 Either NULL_TREE, a simplified but non-constant or a constant
6321 is returned.
6323 ??? This should go into a gimple-fold-inline.h file to be eventually
6324 privatized with the single valueize function used in the various TUs
6325 to avoid the indirect function call overhead. */
6327 tree
6328 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6329 tree (*gvalueize) (tree))
6331 gimple_match_op res_op;
6332 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6333 edges if there are intermediate VARYING defs. For this reason
6334 do not follow SSA edges here even though SCCVN can technically
6335 just deal fine with that. */
6336 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6338 tree res = NULL_TREE;
6339 if (gimple_simplified_result_is_gimple_val (&res_op))
6340 res = res_op.ops[0];
6341 else if (mprts_hook)
6342 res = mprts_hook (&res_op);
6343 if (res)
6345 if (dump_file && dump_flags & TDF_DETAILS)
6347 fprintf (dump_file, "Match-and-simplified ");
6348 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6349 fprintf (dump_file, " to ");
6350 print_generic_expr (dump_file, res);
6351 fprintf (dump_file, "\n");
6353 return res;
6357 location_t loc = gimple_location (stmt);
6358 switch (gimple_code (stmt))
6360 case GIMPLE_ASSIGN:
6362 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6364 switch (get_gimple_rhs_class (subcode))
6366 case GIMPLE_SINGLE_RHS:
6368 tree rhs = gimple_assign_rhs1 (stmt);
6369 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6371 if (TREE_CODE (rhs) == SSA_NAME)
6373 /* If the RHS is an SSA_NAME, return its known constant value,
6374 if any. */
6375 return (*valueize) (rhs);
6377 /* Handle propagating invariant addresses into address
6378 operations. */
6379 else if (TREE_CODE (rhs) == ADDR_EXPR
6380 && !is_gimple_min_invariant (rhs))
6382 poly_int64 offset = 0;
6383 tree base;
6384 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6385 &offset,
6386 valueize);
6387 if (base
6388 && (CONSTANT_CLASS_P (base)
6389 || decl_address_invariant_p (base)))
6390 return build_invariant_address (TREE_TYPE (rhs),
6391 base, offset);
6393 else if (TREE_CODE (rhs) == CONSTRUCTOR
6394 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6395 && known_eq (CONSTRUCTOR_NELTS (rhs),
6396 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6398 unsigned i, nelts;
6399 tree val;
6401 nelts = CONSTRUCTOR_NELTS (rhs);
6402 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6403 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6405 val = (*valueize) (val);
6406 if (TREE_CODE (val) == INTEGER_CST
6407 || TREE_CODE (val) == REAL_CST
6408 || TREE_CODE (val) == FIXED_CST)
6409 vec.quick_push (val);
6410 else
6411 return NULL_TREE;
6414 return vec.build ();
6416 if (subcode == OBJ_TYPE_REF)
6418 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6419 /* If callee is constant, we can fold away the wrapper. */
6420 if (is_gimple_min_invariant (val))
6421 return val;
6424 if (kind == tcc_reference)
6426 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6427 || TREE_CODE (rhs) == REALPART_EXPR
6428 || TREE_CODE (rhs) == IMAGPART_EXPR)
6429 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6431 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6432 return fold_unary_loc (EXPR_LOCATION (rhs),
6433 TREE_CODE (rhs),
6434 TREE_TYPE (rhs), val);
6436 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6437 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6439 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6440 return fold_ternary_loc (EXPR_LOCATION (rhs),
6441 TREE_CODE (rhs),
6442 TREE_TYPE (rhs), val,
6443 TREE_OPERAND (rhs, 1),
6444 TREE_OPERAND (rhs, 2));
6446 else if (TREE_CODE (rhs) == MEM_REF
6447 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6449 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6450 if (TREE_CODE (val) == ADDR_EXPR
6451 && is_gimple_min_invariant (val))
6453 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6454 unshare_expr (val),
6455 TREE_OPERAND (rhs, 1));
6456 if (tem)
6457 rhs = tem;
6460 return fold_const_aggregate_ref_1 (rhs, valueize);
6462 else if (kind == tcc_declaration)
6463 return get_symbol_constant_value (rhs);
6464 return rhs;
6467 case GIMPLE_UNARY_RHS:
6468 return NULL_TREE;
6470 case GIMPLE_BINARY_RHS:
6471 /* Translate &x + CST into an invariant form suitable for
6472 further propagation. */
6473 if (subcode == POINTER_PLUS_EXPR)
6475 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6476 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6477 if (TREE_CODE (op0) == ADDR_EXPR
6478 && TREE_CODE (op1) == INTEGER_CST)
6480 tree off = fold_convert (ptr_type_node, op1);
6481 return build_fold_addr_expr_loc
6482 (loc,
6483 fold_build2 (MEM_REF,
6484 TREE_TYPE (TREE_TYPE (op0)),
6485 unshare_expr (op0), off));
6488 /* Canonicalize bool != 0 and bool == 0 appearing after
6489 valueization. While gimple_simplify handles this
6490 it can get confused by the ~X == 1 -> X == 0 transform
6491 which we cant reduce to a SSA name or a constant
6492 (and we have no way to tell gimple_simplify to not
6493 consider those transforms in the first place). */
6494 else if (subcode == EQ_EXPR
6495 || subcode == NE_EXPR)
6497 tree lhs = gimple_assign_lhs (stmt);
6498 tree op0 = gimple_assign_rhs1 (stmt);
6499 if (useless_type_conversion_p (TREE_TYPE (lhs),
6500 TREE_TYPE (op0)))
6502 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6503 op0 = (*valueize) (op0);
6504 if (TREE_CODE (op0) == INTEGER_CST)
6505 std::swap (op0, op1);
6506 if (TREE_CODE (op1) == INTEGER_CST
6507 && ((subcode == NE_EXPR && integer_zerop (op1))
6508 || (subcode == EQ_EXPR && integer_onep (op1))))
6509 return op0;
6512 return NULL_TREE;
6514 case GIMPLE_TERNARY_RHS:
6516 /* Handle ternary operators that can appear in GIMPLE form. */
6517 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6518 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6519 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6520 return fold_ternary_loc (loc, subcode,
6521 gimple_expr_type (stmt), op0, op1, op2);
6524 default:
6525 gcc_unreachable ();
6529 case GIMPLE_CALL:
6531 tree fn;
6532 gcall *call_stmt = as_a <gcall *> (stmt);
6534 if (gimple_call_internal_p (stmt))
6536 enum tree_code subcode = ERROR_MARK;
6537 switch (gimple_call_internal_fn (stmt))
6539 case IFN_UBSAN_CHECK_ADD:
6540 subcode = PLUS_EXPR;
6541 break;
6542 case IFN_UBSAN_CHECK_SUB:
6543 subcode = MINUS_EXPR;
6544 break;
6545 case IFN_UBSAN_CHECK_MUL:
6546 subcode = MULT_EXPR;
6547 break;
6548 case IFN_BUILTIN_EXPECT:
6550 tree arg0 = gimple_call_arg (stmt, 0);
6551 tree op0 = (*valueize) (arg0);
6552 if (TREE_CODE (op0) == INTEGER_CST)
6553 return op0;
6554 return NULL_TREE;
6556 default:
6557 return NULL_TREE;
6559 tree arg0 = gimple_call_arg (stmt, 0);
6560 tree arg1 = gimple_call_arg (stmt, 1);
6561 tree op0 = (*valueize) (arg0);
6562 tree op1 = (*valueize) (arg1);
6564 if (TREE_CODE (op0) != INTEGER_CST
6565 || TREE_CODE (op1) != INTEGER_CST)
6567 switch (subcode)
6569 case MULT_EXPR:
6570 /* x * 0 = 0 * x = 0 without overflow. */
6571 if (integer_zerop (op0) || integer_zerop (op1))
6572 return build_zero_cst (TREE_TYPE (arg0));
6573 break;
6574 case MINUS_EXPR:
6575 /* y - y = 0 without overflow. */
6576 if (operand_equal_p (op0, op1, 0))
6577 return build_zero_cst (TREE_TYPE (arg0));
6578 break;
6579 default:
6580 break;
6583 tree res
6584 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6585 if (res
6586 && TREE_CODE (res) == INTEGER_CST
6587 && !TREE_OVERFLOW (res))
6588 return res;
6589 return NULL_TREE;
6592 fn = (*valueize) (gimple_call_fn (stmt));
6593 if (TREE_CODE (fn) == ADDR_EXPR
6594 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
6595 && gimple_builtin_call_types_compatible_p (stmt,
6596 TREE_OPERAND (fn, 0)))
6598 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6599 tree retval;
6600 unsigned i;
6601 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6602 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6603 retval = fold_builtin_call_array (loc,
6604 gimple_call_return_type (call_stmt),
6605 fn, gimple_call_num_args (stmt), args);
6606 if (retval)
6608 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6609 STRIP_NOPS (retval);
6610 retval = fold_convert (gimple_call_return_type (call_stmt),
6611 retval);
6613 return retval;
6615 return NULL_TREE;
6618 default:
6619 return NULL_TREE;
6623 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6624 Returns NULL_TREE if folding to a constant is not possible, otherwise
6625 returns a constant according to is_gimple_min_invariant. */
6627 tree
6628 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6630 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6631 if (res && is_gimple_min_invariant (res))
6632 return res;
6633 return NULL_TREE;
6637 /* The following set of functions are supposed to fold references using
6638 their constant initializers. */
6640 /* See if we can find constructor defining value of BASE.
6641 When we know the consructor with constant offset (such as
6642 base is array[40] and we do know constructor of array), then
6643 BIT_OFFSET is adjusted accordingly.
6645 As a special case, return error_mark_node when constructor
6646 is not explicitly available, but it is known to be zero
6647 such as 'static const int a;'. */
6648 static tree
6649 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6650 tree (*valueize)(tree))
6652 poly_int64 bit_offset2, size, max_size;
6653 bool reverse;
6655 if (TREE_CODE (base) == MEM_REF)
6657 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6658 if (!boff.to_shwi (bit_offset))
6659 return NULL_TREE;
6661 if (valueize
6662 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6663 base = valueize (TREE_OPERAND (base, 0));
6664 if (!base || TREE_CODE (base) != ADDR_EXPR)
6665 return NULL_TREE;
6666 base = TREE_OPERAND (base, 0);
6668 else if (valueize
6669 && TREE_CODE (base) == SSA_NAME)
6670 base = valueize (base);
6672 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6673 DECL_INITIAL. If BASE is a nested reference into another
6674 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6675 the inner reference. */
6676 switch (TREE_CODE (base))
6678 case VAR_DECL:
6679 case CONST_DECL:
6681 tree init = ctor_for_folding (base);
6683 /* Our semantic is exact opposite of ctor_for_folding;
6684 NULL means unknown, while error_mark_node is 0. */
6685 if (init == error_mark_node)
6686 return NULL_TREE;
6687 if (!init)
6688 return error_mark_node;
6689 return init;
6692 case VIEW_CONVERT_EXPR:
6693 return get_base_constructor (TREE_OPERAND (base, 0),
6694 bit_offset, valueize);
6696 case ARRAY_REF:
6697 case COMPONENT_REF:
6698 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6699 &reverse);
6700 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6701 return NULL_TREE;
6702 *bit_offset += bit_offset2;
6703 return get_base_constructor (base, bit_offset, valueize);
6705 case CONSTRUCTOR:
6706 return base;
6708 default:
6709 if (CONSTANT_CLASS_P (base))
6710 return base;
6712 return NULL_TREE;
6716 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6717 to the memory at bit OFFSET. When non-null, TYPE is the expected
6718 type of the reference; otherwise the type of the referenced element
6719 is used instead. When SIZE is zero, attempt to fold a reference to
6720 the entire element which OFFSET refers to. Increment *SUBOFF by
6721 the bit offset of the accessed element. */
6723 static tree
6724 fold_array_ctor_reference (tree type, tree ctor,
6725 unsigned HOST_WIDE_INT offset,
6726 unsigned HOST_WIDE_INT size,
6727 tree from_decl,
6728 unsigned HOST_WIDE_INT *suboff)
6730 offset_int low_bound;
6731 offset_int elt_size;
6732 offset_int access_index;
6733 tree domain_type = NULL_TREE;
6734 HOST_WIDE_INT inner_offset;
6736 /* Compute low bound and elt size. */
6737 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6738 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6739 if (domain_type && TYPE_MIN_VALUE (domain_type))
6741 /* Static constructors for variably sized objects make no sense. */
6742 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6743 return NULL_TREE;
6744 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6746 else
6747 low_bound = 0;
6748 /* Static constructors for variably sized objects make no sense. */
6749 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6750 return NULL_TREE;
6751 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6753 /* When TYPE is non-null, verify that it specifies a constant-sized
6754 access of a multiple of the array element size. Avoid division
6755 by zero below when ELT_SIZE is zero, such as with the result of
6756 an initializer for a zero-length array or an empty struct. */
6757 if (elt_size == 0
6758 || (type
6759 && (!TYPE_SIZE_UNIT (type)
6760 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
6761 return NULL_TREE;
6763 /* Compute the array index we look for. */
6764 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6765 elt_size);
6766 access_index += low_bound;
6768 /* And offset within the access. */
6769 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6771 if (size > elt_size.to_uhwi () * BITS_PER_UNIT)
6773 /* native_encode_expr constraints. */
6774 if (size > MAX_BITSIZE_MODE_ANY_MODE
6775 || size % BITS_PER_UNIT != 0
6776 || inner_offset % BITS_PER_UNIT != 0)
6777 return NULL_TREE;
6779 unsigned ctor_idx;
6780 tree val = get_array_ctor_element_at_index (ctor, access_index,
6781 &ctor_idx);
6782 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
6783 return build_zero_cst (type);
6785 /* native-encode adjacent ctor elements. */
6786 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6787 unsigned bufoff = 0;
6788 offset_int index = 0;
6789 offset_int max_index = access_index;
6790 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
6791 if (!val)
6792 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6793 else if (!CONSTANT_CLASS_P (val))
6794 return NULL_TREE;
6795 if (!elt->index)
6797 else if (TREE_CODE (elt->index) == RANGE_EXPR)
6799 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
6800 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
6802 else
6803 index = max_index = wi::to_offset (elt->index);
6804 index = wi::umax (index, access_index);
6807 int len = native_encode_expr (val, buf + bufoff,
6808 elt_size.to_uhwi (),
6809 inner_offset / BITS_PER_UNIT);
6810 if (len != elt_size - inner_offset / BITS_PER_UNIT)
6811 return NULL_TREE;
6812 inner_offset = 0;
6813 bufoff += len;
6815 access_index += 1;
6816 if (wi::cmpu (access_index, index) == 0)
6817 val = elt->value;
6818 else if (wi::cmpu (access_index, max_index) > 0)
6820 ctor_idx++;
6821 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
6823 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6824 ++max_index;
6826 else
6828 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
6829 index = 0;
6830 max_index = access_index;
6831 if (!elt->index)
6833 else if (TREE_CODE (elt->index) == RANGE_EXPR)
6835 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
6836 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
6838 else
6839 index = max_index = wi::to_offset (elt->index);
6840 index = wi::umax (index, access_index);
6841 if (wi::cmpu (access_index, index) == 0)
6842 val = elt->value;
6843 else
6844 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6848 while (bufoff < size / BITS_PER_UNIT);
6849 *suboff += size;
6850 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
6853 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6855 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6857 /* For the final reference to the entire accessed element
6858 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6859 may be null) in favor of the type of the element, and set
6860 SIZE to the size of the accessed element. */
6861 inner_offset = 0;
6862 type = TREE_TYPE (val);
6863 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6866 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6867 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6868 suboff);
6871 /* Memory not explicitly mentioned in constructor is 0 (or
6872 the reference is out of range). */
6873 return type ? build_zero_cst (type) : NULL_TREE;
6876 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6877 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6878 is the expected type of the reference; otherwise the type of
6879 the referenced member is used instead. When SIZE is zero,
6880 attempt to fold a reference to the entire member which OFFSET
6881 refers to; in this case. Increment *SUBOFF by the bit offset
6882 of the accessed member. */
6884 static tree
6885 fold_nonarray_ctor_reference (tree type, tree ctor,
6886 unsigned HOST_WIDE_INT offset,
6887 unsigned HOST_WIDE_INT size,
6888 tree from_decl,
6889 unsigned HOST_WIDE_INT *suboff)
6891 unsigned HOST_WIDE_INT cnt;
6892 tree cfield, cval;
6894 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6895 cval)
6897 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6898 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6899 tree field_size = DECL_SIZE (cfield);
6901 if (!field_size)
6903 /* Determine the size of the flexible array member from
6904 the size of the initializer provided for it. */
6905 field_size = TYPE_SIZE (TREE_TYPE (cval));
6908 /* Variable sized objects in static constructors makes no sense,
6909 but field_size can be NULL for flexible array members. */
6910 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6911 && TREE_CODE (byte_offset) == INTEGER_CST
6912 && (field_size != NULL_TREE
6913 ? TREE_CODE (field_size) == INTEGER_CST
6914 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6916 /* Compute bit offset of the field. */
6917 offset_int bitoffset
6918 = (wi::to_offset (field_offset)
6919 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6920 /* Compute bit offset where the field ends. */
6921 offset_int bitoffset_end;
6922 if (field_size != NULL_TREE)
6923 bitoffset_end = bitoffset + wi::to_offset (field_size);
6924 else
6925 bitoffset_end = 0;
6927 /* Compute the bit offset of the end of the desired access.
6928 As a special case, if the size of the desired access is
6929 zero, assume the access is to the entire field (and let
6930 the caller make any necessary adjustments by storing
6931 the actual bounds of the field in FIELDBOUNDS). */
6932 offset_int access_end = offset_int (offset);
6933 if (size)
6934 access_end += size;
6935 else
6936 access_end = bitoffset_end;
6938 /* Is there any overlap between the desired access at
6939 [OFFSET, OFFSET+SIZE) and the offset of the field within
6940 the object at [BITOFFSET, BITOFFSET_END)? */
6941 if (wi::cmps (access_end, bitoffset) > 0
6942 && (field_size == NULL_TREE
6943 || wi::lts_p (offset, bitoffset_end)))
6945 *suboff += bitoffset.to_uhwi ();
6947 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6949 /* For the final reference to the entire accessed member
6950 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6951 be null) in favor of the type of the member, and set
6952 SIZE to the size of the accessed member. */
6953 offset = bitoffset.to_uhwi ();
6954 type = TREE_TYPE (cval);
6955 size = (bitoffset_end - bitoffset).to_uhwi ();
6958 /* We do have overlap. Now see if the field is large enough
6959 to cover the access. Give up for accesses that extend
6960 beyond the end of the object or that span multiple fields. */
6961 if (wi::cmps (access_end, bitoffset_end) > 0)
6962 return NULL_TREE;
6963 if (offset < bitoffset)
6964 return NULL_TREE;
6966 offset_int inner_offset = offset_int (offset) - bitoffset;
6967 return fold_ctor_reference (type, cval,
6968 inner_offset.to_uhwi (), size,
6969 from_decl, suboff);
6972 /* Memory not explicitly mentioned in constructor is 0. */
6973 return type ? build_zero_cst (type) : NULL_TREE;
6976 /* CTOR is value initializing memory. Fold a reference of TYPE and
6977 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
6978 is zero, attempt to fold a reference to the entire subobject
6979 which OFFSET refers to. This is used when folding accesses to
6980 string members of aggregates. When non-null, set *SUBOFF to
6981 the bit offset of the accessed subobject. */
6983 tree
6984 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6985 const poly_uint64 &poly_size, tree from_decl,
6986 unsigned HOST_WIDE_INT *suboff /* = NULL */)
6988 tree ret;
6990 /* We found the field with exact match. */
6991 if (type
6992 && useless_type_conversion_p (type, TREE_TYPE (ctor))
6993 && known_eq (poly_offset, 0U))
6994 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6996 /* The remaining optimizations need a constant size and offset. */
6997 unsigned HOST_WIDE_INT size, offset;
6998 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6999 return NULL_TREE;
7001 /* We are at the end of walk, see if we can view convert the
7002 result. */
7003 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
7004 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
7005 && !compare_tree_int (TYPE_SIZE (type), size)
7006 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
7008 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
7009 if (ret)
7011 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
7012 if (ret)
7013 STRIP_USELESS_TYPE_CONVERSION (ret);
7015 return ret;
7017 /* For constants and byte-aligned/sized reads try to go through
7018 native_encode/interpret. */
7019 if (CONSTANT_CLASS_P (ctor)
7020 && BITS_PER_UNIT == 8
7021 && offset % BITS_PER_UNIT == 0
7022 && size % BITS_PER_UNIT == 0
7023 && size <= MAX_BITSIZE_MODE_ANY_MODE)
7025 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7026 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
7027 offset / BITS_PER_UNIT);
7028 if (len > 0)
7029 return native_interpret_expr (type, buf, len);
7031 if (TREE_CODE (ctor) == CONSTRUCTOR)
7033 unsigned HOST_WIDE_INT dummy = 0;
7034 if (!suboff)
7035 suboff = &dummy;
7037 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
7038 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
7039 return fold_array_ctor_reference (type, ctor, offset, size,
7040 from_decl, suboff);
7042 return fold_nonarray_ctor_reference (type, ctor, offset, size,
7043 from_decl, suboff);
7046 return NULL_TREE;
7049 /* Return the tree representing the element referenced by T if T is an
7050 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
7051 names using VALUEIZE. Return NULL_TREE otherwise. */
7053 tree
7054 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
7056 tree ctor, idx, base;
7057 poly_int64 offset, size, max_size;
7058 tree tem;
7059 bool reverse;
7061 if (TREE_THIS_VOLATILE (t))
7062 return NULL_TREE;
7064 if (DECL_P (t))
7065 return get_symbol_constant_value (t);
7067 tem = fold_read_from_constant_string (t);
7068 if (tem)
7069 return tem;
7071 switch (TREE_CODE (t))
7073 case ARRAY_REF:
7074 case ARRAY_RANGE_REF:
7075 /* Constant indexes are handled well by get_base_constructor.
7076 Only special case variable offsets.
7077 FIXME: This code can't handle nested references with variable indexes
7078 (they will be handled only by iteration of ccp). Perhaps we can bring
7079 get_ref_base_and_extent here and make it use a valueize callback. */
7080 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
7081 && valueize
7082 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
7083 && poly_int_tree_p (idx))
7085 tree low_bound, unit_size;
7087 /* If the resulting bit-offset is constant, track it. */
7088 if ((low_bound = array_ref_low_bound (t),
7089 poly_int_tree_p (low_bound))
7090 && (unit_size = array_ref_element_size (t),
7091 tree_fits_uhwi_p (unit_size)))
7093 poly_offset_int woffset
7094 = wi::sext (wi::to_poly_offset (idx)
7095 - wi::to_poly_offset (low_bound),
7096 TYPE_PRECISION (TREE_TYPE (idx)));
7097 woffset *= tree_to_uhwi (unit_size);
7098 woffset *= BITS_PER_UNIT;
7099 if (woffset.to_shwi (&offset))
7101 base = TREE_OPERAND (t, 0);
7102 ctor = get_base_constructor (base, &offset, valueize);
7103 /* Empty constructor. Always fold to 0. */
7104 if (ctor == error_mark_node)
7105 return build_zero_cst (TREE_TYPE (t));
7106 /* Out of bound array access. Value is undefined,
7107 but don't fold. */
7108 if (maybe_lt (offset, 0))
7109 return NULL_TREE;
7110 /* We cannot determine ctor. */
7111 if (!ctor)
7112 return NULL_TREE;
7113 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
7114 tree_to_uhwi (unit_size)
7115 * BITS_PER_UNIT,
7116 base);
7120 /* Fallthru. */
7122 case COMPONENT_REF:
7123 case BIT_FIELD_REF:
7124 case TARGET_MEM_REF:
7125 case MEM_REF:
7126 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
7127 ctor = get_base_constructor (base, &offset, valueize);
7129 /* Empty constructor. Always fold to 0. */
7130 if (ctor == error_mark_node)
7131 return build_zero_cst (TREE_TYPE (t));
7132 /* We do not know precise address. */
7133 if (!known_size_p (max_size) || maybe_ne (max_size, size))
7134 return NULL_TREE;
7135 /* We cannot determine ctor. */
7136 if (!ctor)
7137 return NULL_TREE;
7139 /* Out of bound array access. Value is undefined, but don't fold. */
7140 if (maybe_lt (offset, 0))
7141 return NULL_TREE;
7143 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
7144 base);
7146 case REALPART_EXPR:
7147 case IMAGPART_EXPR:
7149 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
7150 if (c && TREE_CODE (c) == COMPLEX_CST)
7151 return fold_build1_loc (EXPR_LOCATION (t),
7152 TREE_CODE (t), TREE_TYPE (t), c);
7153 break;
7156 default:
7157 break;
7160 return NULL_TREE;
7163 tree
7164 fold_const_aggregate_ref (tree t)
7166 return fold_const_aggregate_ref_1 (t, NULL);
7169 /* Lookup virtual method with index TOKEN in a virtual table V
7170 at OFFSET.
7171 Set CAN_REFER if non-NULL to false if method
7172 is not referable or if the virtual table is ill-formed (such as rewriten
7173 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7175 tree
7176 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
7177 tree v,
7178 unsigned HOST_WIDE_INT offset,
7179 bool *can_refer)
7181 tree vtable = v, init, fn;
7182 unsigned HOST_WIDE_INT size;
7183 unsigned HOST_WIDE_INT elt_size, access_index;
7184 tree domain_type;
7186 if (can_refer)
7187 *can_refer = true;
7189 /* First of all double check we have virtual table. */
7190 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
7192 /* Pass down that we lost track of the target. */
7193 if (can_refer)
7194 *can_refer = false;
7195 return NULL_TREE;
7198 init = ctor_for_folding (v);
7200 /* The virtual tables should always be born with constructors
7201 and we always should assume that they are avaialble for
7202 folding. At the moment we do not stream them in all cases,
7203 but it should never happen that ctor seem unreachable. */
7204 gcc_assert (init);
7205 if (init == error_mark_node)
7207 /* Pass down that we lost track of the target. */
7208 if (can_refer)
7209 *can_refer = false;
7210 return NULL_TREE;
7212 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
7213 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
7214 offset *= BITS_PER_UNIT;
7215 offset += token * size;
7217 /* Lookup the value in the constructor that is assumed to be array.
7218 This is equivalent to
7219 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7220 offset, size, NULL);
7221 but in a constant time. We expect that frontend produced a simple
7222 array without indexed initializers. */
7224 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7225 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7226 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7227 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7229 access_index = offset / BITS_PER_UNIT / elt_size;
7230 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7232 /* The C++ FE can now produce indexed fields, and we check if the indexes
7233 match. */
7234 if (access_index < CONSTRUCTOR_NELTS (init))
7236 fn = CONSTRUCTOR_ELT (init, access_index)->value;
7237 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7238 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
7239 STRIP_NOPS (fn);
7241 else
7242 fn = NULL;
7244 /* For type inconsistent program we may end up looking up virtual method
7245 in virtual table that does not contain TOKEN entries. We may overrun
7246 the virtual table and pick up a constant or RTTI info pointer.
7247 In any case the call is undefined. */
7248 if (!fn
7249 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7250 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7251 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7252 else
7254 fn = TREE_OPERAND (fn, 0);
7256 /* When cgraph node is missing and function is not public, we cannot
7257 devirtualize. This can happen in WHOPR when the actual method
7258 ends up in other partition, because we found devirtualization
7259 possibility too late. */
7260 if (!can_refer_decl_in_current_unit_p (fn, vtable))
7262 if (can_refer)
7264 *can_refer = false;
7265 return fn;
7267 return NULL_TREE;
7271 /* Make sure we create a cgraph node for functions we'll reference.
7272 They can be non-existent if the reference comes from an entry
7273 of an external vtable for example. */
7274 cgraph_node::get_create (fn);
7276 return fn;
7279 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7280 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7281 KNOWN_BINFO carries the binfo describing the true type of
7282 OBJ_TYPE_REF_OBJECT(REF).
7283 Set CAN_REFER if non-NULL to false if method
7284 is not referable or if the virtual table is ill-formed (such as rewriten
7285 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7287 tree
7288 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7289 bool *can_refer)
7291 unsigned HOST_WIDE_INT offset;
7292 tree v;
7294 v = BINFO_VTABLE (known_binfo);
7295 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7296 if (!v)
7297 return NULL_TREE;
7299 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
7301 if (can_refer)
7302 *can_refer = false;
7303 return NULL_TREE;
7305 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
7308 /* Given a pointer value T, return a simplified version of an
7309 indirection through T, or NULL_TREE if no simplification is
7310 possible. Note that the resulting type may be different from
7311 the type pointed to in the sense that it is still compatible
7312 from the langhooks point of view. */
7314 tree
7315 gimple_fold_indirect_ref (tree t)
7317 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7318 tree sub = t;
7319 tree subtype;
7321 STRIP_NOPS (sub);
7322 subtype = TREE_TYPE (sub);
7323 if (!POINTER_TYPE_P (subtype)
7324 || TYPE_REF_CAN_ALIAS_ALL (ptype))
7325 return NULL_TREE;
7327 if (TREE_CODE (sub) == ADDR_EXPR)
7329 tree op = TREE_OPERAND (sub, 0);
7330 tree optype = TREE_TYPE (op);
7331 /* *&p => p */
7332 if (useless_type_conversion_p (type, optype))
7333 return op;
7335 /* *(foo *)&fooarray => fooarray[0] */
7336 if (TREE_CODE (optype) == ARRAY_TYPE
7337 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7338 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7340 tree type_domain = TYPE_DOMAIN (optype);
7341 tree min_val = size_zero_node;
7342 if (type_domain && TYPE_MIN_VALUE (type_domain))
7343 min_val = TYPE_MIN_VALUE (type_domain);
7344 if (TREE_CODE (min_val) == INTEGER_CST)
7345 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7347 /* *(foo *)&complexfoo => __real__ complexfoo */
7348 else if (TREE_CODE (optype) == COMPLEX_TYPE
7349 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7350 return fold_build1 (REALPART_EXPR, type, op);
7351 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7352 else if (TREE_CODE (optype) == VECTOR_TYPE
7353 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7355 tree part_width = TYPE_SIZE (type);
7356 tree index = bitsize_int (0);
7357 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7361 /* *(p + CST) -> ... */
7362 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7363 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7365 tree addr = TREE_OPERAND (sub, 0);
7366 tree off = TREE_OPERAND (sub, 1);
7367 tree addrtype;
7369 STRIP_NOPS (addr);
7370 addrtype = TREE_TYPE (addr);
7372 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7373 if (TREE_CODE (addr) == ADDR_EXPR
7374 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7375 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7376 && tree_fits_uhwi_p (off))
7378 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7379 tree part_width = TYPE_SIZE (type);
7380 unsigned HOST_WIDE_INT part_widthi
7381 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7382 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7383 tree index = bitsize_int (indexi);
7384 if (known_lt (offset / part_widthi,
7385 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7386 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7387 part_width, index);
7390 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7391 if (TREE_CODE (addr) == ADDR_EXPR
7392 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7393 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7395 tree size = TYPE_SIZE_UNIT (type);
7396 if (tree_int_cst_equal (size, off))
7397 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7400 /* *(p + CST) -> MEM_REF <p, CST>. */
7401 if (TREE_CODE (addr) != ADDR_EXPR
7402 || DECL_P (TREE_OPERAND (addr, 0)))
7403 return fold_build2 (MEM_REF, type,
7404 addr,
7405 wide_int_to_tree (ptype, wi::to_wide (off)));
7408 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7409 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7410 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7411 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7413 tree type_domain;
7414 tree min_val = size_zero_node;
7415 tree osub = sub;
7416 sub = gimple_fold_indirect_ref (sub);
7417 if (! sub)
7418 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7419 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7420 if (type_domain && TYPE_MIN_VALUE (type_domain))
7421 min_val = TYPE_MIN_VALUE (type_domain);
7422 if (TREE_CODE (min_val) == INTEGER_CST)
7423 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7426 return NULL_TREE;
7429 /* Return true if CODE is an operation that when operating on signed
7430 integer types involves undefined behavior on overflow and the
7431 operation can be expressed with unsigned arithmetic. */
7433 bool
7434 arith_code_with_undefined_signed_overflow (tree_code code)
7436 switch (code)
7438 case ABS_EXPR:
7439 case PLUS_EXPR:
7440 case MINUS_EXPR:
7441 case MULT_EXPR:
7442 case NEGATE_EXPR:
7443 case POINTER_PLUS_EXPR:
7444 return true;
7445 default:
7446 return false;
7450 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7451 operation that can be transformed to unsigned arithmetic by converting
7452 its operand, carrying out the operation in the corresponding unsigned
7453 type and converting the result back to the original type.
7455 Returns a sequence of statements that replace STMT and also contain
7456 a modified form of STMT itself. */
7458 gimple_seq
7459 rewrite_to_defined_overflow (gimple *stmt)
7461 if (dump_file && (dump_flags & TDF_DETAILS))
7463 fprintf (dump_file, "rewriting stmt with undefined signed "
7464 "overflow ");
7465 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7468 tree lhs = gimple_assign_lhs (stmt);
7469 tree type = unsigned_type_for (TREE_TYPE (lhs));
7470 gimple_seq stmts = NULL;
7471 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
7472 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
7473 else
7474 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7476 tree op = gimple_op (stmt, i);
7477 op = gimple_convert (&stmts, type, op);
7478 gimple_set_op (stmt, i, op);
7480 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7481 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7482 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7483 gimple_seq_add_stmt (&stmts, stmt);
7484 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7485 gimple_seq_add_stmt (&stmts, cvt);
7487 return stmts;
7491 /* The valueization hook we use for the gimple_build API simplification.
7492 This makes us match fold_buildN behavior by only combining with
7493 statements in the sequence(s) we are currently building. */
7495 static tree
7496 gimple_build_valueize (tree op)
7498 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7499 return op;
7500 return NULL_TREE;
7503 /* Build the expression CODE OP0 of type TYPE with location LOC,
7504 simplifying it first if possible. Returns the built
7505 expression value and appends statements possibly defining it
7506 to SEQ. */
7508 tree
7509 gimple_build (gimple_seq *seq, location_t loc,
7510 enum tree_code code, tree type, tree op0)
7512 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7513 if (!res)
7515 res = create_tmp_reg_or_ssa_name (type);
7516 gimple *stmt;
7517 if (code == REALPART_EXPR
7518 || code == IMAGPART_EXPR
7519 || code == VIEW_CONVERT_EXPR)
7520 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7521 else
7522 stmt = gimple_build_assign (res, code, op0);
7523 gimple_set_location (stmt, loc);
7524 gimple_seq_add_stmt_without_update (seq, stmt);
7526 return res;
7529 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7530 simplifying it first if possible. Returns the built
7531 expression value and appends statements possibly defining it
7532 to SEQ. */
7534 tree
7535 gimple_build (gimple_seq *seq, location_t loc,
7536 enum tree_code code, tree type, tree op0, tree op1)
7538 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7539 if (!res)
7541 res = create_tmp_reg_or_ssa_name (type);
7542 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7543 gimple_set_location (stmt, loc);
7544 gimple_seq_add_stmt_without_update (seq, stmt);
7546 return res;
7549 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7550 simplifying it first if possible. Returns the built
7551 expression value and appends statements possibly defining it
7552 to SEQ. */
7554 tree
7555 gimple_build (gimple_seq *seq, location_t loc,
7556 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7558 tree res = gimple_simplify (code, type, op0, op1, op2,
7559 seq, gimple_build_valueize);
7560 if (!res)
7562 res = create_tmp_reg_or_ssa_name (type);
7563 gimple *stmt;
7564 if (code == BIT_FIELD_REF)
7565 stmt = gimple_build_assign (res, code,
7566 build3 (code, type, op0, op1, op2));
7567 else
7568 stmt = gimple_build_assign (res, code, op0, op1, op2);
7569 gimple_set_location (stmt, loc);
7570 gimple_seq_add_stmt_without_update (seq, stmt);
7572 return res;
7575 /* Build the call FN (ARG0) with a result of type TYPE
7576 (or no result if TYPE is void) with location LOC,
7577 simplifying it first if possible. Returns the built
7578 expression value (or NULL_TREE if TYPE is void) and appends
7579 statements possibly defining it to SEQ. */
7581 tree
7582 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7583 tree type, tree arg0)
7585 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7586 if (!res)
7588 gcall *stmt;
7589 if (internal_fn_p (fn))
7590 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7591 else
7593 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7594 stmt = gimple_build_call (decl, 1, arg0);
7596 if (!VOID_TYPE_P (type))
7598 res = create_tmp_reg_or_ssa_name (type);
7599 gimple_call_set_lhs (stmt, res);
7601 gimple_set_location (stmt, loc);
7602 gimple_seq_add_stmt_without_update (seq, stmt);
7604 return res;
7607 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7608 (or no result if TYPE is void) with location LOC,
7609 simplifying it first if possible. Returns the built
7610 expression value (or NULL_TREE if TYPE is void) and appends
7611 statements possibly defining it to SEQ. */
7613 tree
7614 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7615 tree type, tree arg0, tree arg1)
7617 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7618 if (!res)
7620 gcall *stmt;
7621 if (internal_fn_p (fn))
7622 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7623 else
7625 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7626 stmt = gimple_build_call (decl, 2, arg0, arg1);
7628 if (!VOID_TYPE_P (type))
7630 res = create_tmp_reg_or_ssa_name (type);
7631 gimple_call_set_lhs (stmt, res);
7633 gimple_set_location (stmt, loc);
7634 gimple_seq_add_stmt_without_update (seq, stmt);
7636 return res;
7639 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7640 (or no result if TYPE is void) with location LOC,
7641 simplifying it first if possible. Returns the built
7642 expression value (or NULL_TREE if TYPE is void) and appends
7643 statements possibly defining it to SEQ. */
7645 tree
7646 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7647 tree type, tree arg0, tree arg1, tree arg2)
7649 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7650 seq, gimple_build_valueize);
7651 if (!res)
7653 gcall *stmt;
7654 if (internal_fn_p (fn))
7655 stmt = gimple_build_call_internal (as_internal_fn (fn),
7656 3, arg0, arg1, arg2);
7657 else
7659 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7660 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7662 if (!VOID_TYPE_P (type))
7664 res = create_tmp_reg_or_ssa_name (type);
7665 gimple_call_set_lhs (stmt, res);
7667 gimple_set_location (stmt, loc);
7668 gimple_seq_add_stmt_without_update (seq, stmt);
7670 return res;
7673 /* Build the conversion (TYPE) OP with a result of type TYPE
7674 with location LOC if such conversion is neccesary in GIMPLE,
7675 simplifying it first.
7676 Returns the built expression value and appends
7677 statements possibly defining it to SEQ. */
7679 tree
7680 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7682 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7683 return op;
7684 return gimple_build (seq, loc, NOP_EXPR, type, op);
7687 /* Build the conversion (ptrofftype) OP with a result of a type
7688 compatible with ptrofftype with location LOC if such conversion
7689 is neccesary in GIMPLE, simplifying it first.
7690 Returns the built expression value and appends
7691 statements possibly defining it to SEQ. */
7693 tree
7694 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7696 if (ptrofftype_p (TREE_TYPE (op)))
7697 return op;
7698 return gimple_convert (seq, loc, sizetype, op);
7701 /* Build a vector of type TYPE in which each element has the value OP.
7702 Return a gimple value for the result, appending any new statements
7703 to SEQ. */
7705 tree
7706 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7707 tree op)
7709 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7710 && !CONSTANT_CLASS_P (op))
7711 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7713 tree res, vec = build_vector_from_val (type, op);
7714 if (is_gimple_val (vec))
7715 return vec;
7716 if (gimple_in_ssa_p (cfun))
7717 res = make_ssa_name (type);
7718 else
7719 res = create_tmp_reg (type);
7720 gimple *stmt = gimple_build_assign (res, vec);
7721 gimple_set_location (stmt, loc);
7722 gimple_seq_add_stmt_without_update (seq, stmt);
7723 return res;
7726 /* Build a vector from BUILDER, handling the case in which some elements
7727 are non-constant. Return a gimple value for the result, appending any
7728 new instructions to SEQ.
7730 BUILDER must not have a stepped encoding on entry. This is because
7731 the function is not geared up to handle the arithmetic that would
7732 be needed in the variable case, and any code building a vector that
7733 is known to be constant should use BUILDER->build () directly. */
7735 tree
7736 gimple_build_vector (gimple_seq *seq, location_t loc,
7737 tree_vector_builder *builder)
7739 gcc_assert (builder->nelts_per_pattern () <= 2);
7740 unsigned int encoded_nelts = builder->encoded_nelts ();
7741 for (unsigned int i = 0; i < encoded_nelts; ++i)
7742 if (!TREE_CONSTANT ((*builder)[i]))
7744 tree type = builder->type ();
7745 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7746 vec<constructor_elt, va_gc> *v;
7747 vec_alloc (v, nelts);
7748 for (i = 0; i < nelts; ++i)
7749 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7751 tree res;
7752 if (gimple_in_ssa_p (cfun))
7753 res = make_ssa_name (type);
7754 else
7755 res = create_tmp_reg (type);
7756 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7757 gimple_set_location (stmt, loc);
7758 gimple_seq_add_stmt_without_update (seq, stmt);
7759 return res;
7761 return builder->build ();
7764 /* Return true if the result of assignment STMT is known to be non-negative.
7765 If the return value is based on the assumption that signed overflow is
7766 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7767 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7769 static bool
7770 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7771 int depth)
7773 enum tree_code code = gimple_assign_rhs_code (stmt);
7774 switch (get_gimple_rhs_class (code))
7776 case GIMPLE_UNARY_RHS:
7777 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7778 gimple_expr_type (stmt),
7779 gimple_assign_rhs1 (stmt),
7780 strict_overflow_p, depth);
7781 case GIMPLE_BINARY_RHS:
7782 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7783 gimple_expr_type (stmt),
7784 gimple_assign_rhs1 (stmt),
7785 gimple_assign_rhs2 (stmt),
7786 strict_overflow_p, depth);
7787 case GIMPLE_TERNARY_RHS:
7788 return false;
7789 case GIMPLE_SINGLE_RHS:
7790 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7791 strict_overflow_p, depth);
7792 case GIMPLE_INVALID_RHS:
7793 break;
7795 gcc_unreachable ();
7798 /* Return true if return value of call STMT is known to be non-negative.
7799 If the return value is based on the assumption that signed overflow is
7800 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7801 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7803 static bool
7804 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7805 int depth)
7807 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7808 gimple_call_arg (stmt, 0) : NULL_TREE;
7809 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7810 gimple_call_arg (stmt, 1) : NULL_TREE;
7812 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7813 gimple_call_combined_fn (stmt),
7814 arg0,
7815 arg1,
7816 strict_overflow_p, depth);
7819 /* Return true if return value of call STMT is known to be non-negative.
7820 If the return value is based on the assumption that signed overflow is
7821 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7822 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7824 static bool
7825 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7826 int depth)
7828 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7830 tree arg = gimple_phi_arg_def (stmt, i);
7831 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7832 return false;
7834 return true;
7837 /* Return true if STMT is known to compute a non-negative value.
7838 If the return value is based on the assumption that signed overflow is
7839 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7840 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7842 bool
7843 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7844 int depth)
7846 switch (gimple_code (stmt))
7848 case GIMPLE_ASSIGN:
7849 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7850 depth);
7851 case GIMPLE_CALL:
7852 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7853 depth);
7854 case GIMPLE_PHI:
7855 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7856 depth);
7857 default:
7858 return false;
7862 /* Return true if the floating-point value computed by assignment STMT
7863 is known to have an integer value. We also allow +Inf, -Inf and NaN
7864 to be considered integer values. Return false for signaling NaN.
7866 DEPTH is the current nesting depth of the query. */
7868 static bool
7869 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7871 enum tree_code code = gimple_assign_rhs_code (stmt);
7872 switch (get_gimple_rhs_class (code))
7874 case GIMPLE_UNARY_RHS:
7875 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7876 gimple_assign_rhs1 (stmt), depth);
7877 case GIMPLE_BINARY_RHS:
7878 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7879 gimple_assign_rhs1 (stmt),
7880 gimple_assign_rhs2 (stmt), depth);
7881 case GIMPLE_TERNARY_RHS:
7882 return false;
7883 case GIMPLE_SINGLE_RHS:
7884 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7885 case GIMPLE_INVALID_RHS:
7886 break;
7888 gcc_unreachable ();
7891 /* Return true if the floating-point value computed by call STMT is known
7892 to have an integer value. We also allow +Inf, -Inf and NaN to be
7893 considered integer values. Return false for signaling NaN.
7895 DEPTH is the current nesting depth of the query. */
7897 static bool
7898 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7900 tree arg0 = (gimple_call_num_args (stmt) > 0
7901 ? gimple_call_arg (stmt, 0)
7902 : NULL_TREE);
7903 tree arg1 = (gimple_call_num_args (stmt) > 1
7904 ? gimple_call_arg (stmt, 1)
7905 : NULL_TREE);
7906 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7907 arg0, arg1, depth);
7910 /* Return true if the floating-point result of phi STMT is known to have
7911 an integer value. We also allow +Inf, -Inf and NaN to be considered
7912 integer values. Return false for signaling NaN.
7914 DEPTH is the current nesting depth of the query. */
7916 static bool
7917 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7919 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7921 tree arg = gimple_phi_arg_def (stmt, i);
7922 if (!integer_valued_real_single_p (arg, depth + 1))
7923 return false;
7925 return true;
7928 /* Return true if the floating-point value computed by STMT is known
7929 to have an integer value. We also allow +Inf, -Inf and NaN to be
7930 considered integer values. Return false for signaling NaN.
7932 DEPTH is the current nesting depth of the query. */
7934 bool
7935 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7937 switch (gimple_code (stmt))
7939 case GIMPLE_ASSIGN:
7940 return gimple_assign_integer_valued_real_p (stmt, depth);
7941 case GIMPLE_CALL:
7942 return gimple_call_integer_valued_real_p (stmt, depth);
7943 case GIMPLE_PHI:
7944 return gimple_phi_integer_valued_real_p (stmt, depth);
7945 default:
7946 return false;