[31/77] Use scalar_int_mode for move2add
[official-gcc.git] / gcc / gimple-fold.c
blob5886b3db91c9e59a38f3c588137bc537cc1e5d76
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2017 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stmt.h"
35 #include "expr.h"
36 #include "stor-layout.h"
37 #include "dumpfile.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "dbgcnt.h"
50 #include "builtins.h"
51 #include "tree-eh.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
55 #include "omp-general.h"
56 #include "ipa-chkp.h"
57 #include "tree-cfg.h"
58 #include "fold-const-call.h"
59 #include "stringpool.h"
60 #include "attribs.h"
61 #include "asan.h"
63 /* Return true when DECL can be referenced from current unit.
64 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
65 We can get declarations that are not possible to reference for various
66 reasons:
68 1) When analyzing C++ virtual tables.
69 C++ virtual tables do have known constructors even
70 when they are keyed to other compilation unit.
71 Those tables can contain pointers to methods and vars
72 in other units. Those methods have both STATIC and EXTERNAL
73 set.
74 2) In WHOPR mode devirtualization might lead to reference
75 to method that was partitioned elsehwere.
76 In this case we have static VAR_DECL or FUNCTION_DECL
77 that has no corresponding callgraph/varpool node
78 declaring the body.
79 3) COMDAT functions referred by external vtables that
80 we devirtualize only during final compilation stage.
81 At this time we already decided that we will not output
82 the function body and thus we can't reference the symbol
83 directly. */
85 static bool
86 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
88 varpool_node *vnode;
89 struct cgraph_node *node;
90 symtab_node *snode;
92 if (DECL_ABSTRACT_P (decl))
93 return false;
95 /* We are concerned only about static/external vars and functions. */
96 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
97 || !VAR_OR_FUNCTION_DECL_P (decl))
98 return true;
100 /* Static objects can be referred only if they was not optimized out yet. */
101 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
103 /* Before we start optimizing unreachable code we can be sure all
104 static objects are defined. */
105 if (symtab->function_flags_ready)
106 return true;
107 snode = symtab_node::get (decl);
108 if (!snode || !snode->definition)
109 return false;
110 node = dyn_cast <cgraph_node *> (snode);
111 return !node || !node->global.inlined_to;
114 /* We will later output the initializer, so we can refer to it.
115 So we are concerned only when DECL comes from initializer of
116 external var or var that has been optimized out. */
117 if (!from_decl
118 || !VAR_P (from_decl)
119 || (!DECL_EXTERNAL (from_decl)
120 && (vnode = varpool_node::get (from_decl)) != NULL
121 && vnode->definition)
122 || (flag_ltrans
123 && (vnode = varpool_node::get (from_decl)) != NULL
124 && vnode->in_other_partition))
125 return true;
126 /* We are folding reference from external vtable. The vtable may reffer
127 to a symbol keyed to other compilation unit. The other compilation
128 unit may be in separate DSO and the symbol may be hidden. */
129 if (DECL_VISIBILITY_SPECIFIED (decl)
130 && DECL_EXTERNAL (decl)
131 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
132 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
133 return false;
134 /* When function is public, we always can introduce new reference.
135 Exception are the COMDAT functions where introducing a direct
136 reference imply need to include function body in the curren tunit. */
137 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
138 return true;
139 /* We have COMDAT. We are going to check if we still have definition
140 or if the definition is going to be output in other partition.
141 Bypass this when gimplifying; all needed functions will be produced.
143 As observed in PR20991 for already optimized out comdat virtual functions
144 it may be tempting to not necessarily give up because the copy will be
145 output elsewhere when corresponding vtable is output.
146 This is however not possible - ABI specify that COMDATs are output in
147 units where they are used and when the other unit was compiled with LTO
148 it is possible that vtable was kept public while the function itself
149 was privatized. */
150 if (!symtab->function_flags_ready)
151 return true;
153 snode = symtab_node::get (decl);
154 if (!snode
155 || ((!snode->definition || DECL_EXTERNAL (decl))
156 && (!snode->in_other_partition
157 || (!snode->forced_by_abi && !snode->force_output))))
158 return false;
159 node = dyn_cast <cgraph_node *> (snode);
160 return !node || !node->global.inlined_to;
163 /* Create a temporary for TYPE for a statement STMT. If the current function
164 is in SSA form, a SSA name is created. Otherwise a temporary register
165 is made. */
167 tree
168 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
170 if (gimple_in_ssa_p (cfun))
171 return make_ssa_name (type, stmt);
172 else
173 return create_tmp_reg (type);
176 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
177 acceptable form for is_gimple_min_invariant.
178 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
180 tree
181 canonicalize_constructor_val (tree cval, tree from_decl)
183 tree orig_cval = cval;
184 STRIP_NOPS (cval);
185 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
186 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
188 tree ptr = TREE_OPERAND (cval, 0);
189 if (is_gimple_min_invariant (ptr))
190 cval = build1_loc (EXPR_LOCATION (cval),
191 ADDR_EXPR, TREE_TYPE (ptr),
192 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
193 ptr,
194 fold_convert (ptr_type_node,
195 TREE_OPERAND (cval, 1))));
197 if (TREE_CODE (cval) == ADDR_EXPR)
199 tree base = NULL_TREE;
200 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
202 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
203 if (base)
204 TREE_OPERAND (cval, 0) = base;
206 else
207 base = get_base_address (TREE_OPERAND (cval, 0));
208 if (!base)
209 return NULL_TREE;
211 if (VAR_OR_FUNCTION_DECL_P (base)
212 && !can_refer_decl_in_current_unit_p (base, from_decl))
213 return NULL_TREE;
214 if (TREE_TYPE (base) == error_mark_node)
215 return NULL_TREE;
216 if (VAR_P (base))
217 TREE_ADDRESSABLE (base) = 1;
218 else if (TREE_CODE (base) == FUNCTION_DECL)
220 /* Make sure we create a cgraph node for functions we'll reference.
221 They can be non-existent if the reference comes from an entry
222 of an external vtable for example. */
223 cgraph_node::get_create (base);
225 /* Fixup types in global initializers. */
226 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
227 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
229 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
230 cval = fold_convert (TREE_TYPE (orig_cval), cval);
231 return cval;
233 if (TREE_OVERFLOW_P (cval))
234 return drop_tree_overflow (cval);
235 return orig_cval;
238 /* If SYM is a constant variable with known value, return the value.
239 NULL_TREE is returned otherwise. */
241 tree
242 get_symbol_constant_value (tree sym)
244 tree val = ctor_for_folding (sym);
245 if (val != error_mark_node)
247 if (val)
249 val = canonicalize_constructor_val (unshare_expr (val), sym);
250 if (val && is_gimple_min_invariant (val))
251 return val;
252 else
253 return NULL_TREE;
255 /* Variables declared 'const' without an initializer
256 have zero as the initializer if they may not be
257 overridden at link or run time. */
258 if (!val
259 && is_gimple_reg_type (TREE_TYPE (sym)))
260 return build_zero_cst (TREE_TYPE (sym));
263 return NULL_TREE;
268 /* Subroutine of fold_stmt. We perform several simplifications of the
269 memory reference tree EXPR and make sure to re-gimplify them properly
270 after propagation of constant addresses. IS_LHS is true if the
271 reference is supposed to be an lvalue. */
273 static tree
274 maybe_fold_reference (tree expr, bool is_lhs)
276 tree result;
278 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
279 || TREE_CODE (expr) == REALPART_EXPR
280 || TREE_CODE (expr) == IMAGPART_EXPR)
281 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
282 return fold_unary_loc (EXPR_LOCATION (expr),
283 TREE_CODE (expr),
284 TREE_TYPE (expr),
285 TREE_OPERAND (expr, 0));
286 else if (TREE_CODE (expr) == BIT_FIELD_REF
287 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
288 return fold_ternary_loc (EXPR_LOCATION (expr),
289 TREE_CODE (expr),
290 TREE_TYPE (expr),
291 TREE_OPERAND (expr, 0),
292 TREE_OPERAND (expr, 1),
293 TREE_OPERAND (expr, 2));
295 if (!is_lhs
296 && (result = fold_const_aggregate_ref (expr))
297 && is_gimple_min_invariant (result))
298 return result;
300 return NULL_TREE;
304 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
305 replacement rhs for the statement or NULL_TREE if no simplification
306 could be made. It is assumed that the operands have been previously
307 folded. */
309 static tree
310 fold_gimple_assign (gimple_stmt_iterator *si)
312 gimple *stmt = gsi_stmt (*si);
313 enum tree_code subcode = gimple_assign_rhs_code (stmt);
314 location_t loc = gimple_location (stmt);
316 tree result = NULL_TREE;
318 switch (get_gimple_rhs_class (subcode))
320 case GIMPLE_SINGLE_RHS:
322 tree rhs = gimple_assign_rhs1 (stmt);
324 if (TREE_CLOBBER_P (rhs))
325 return NULL_TREE;
327 if (REFERENCE_CLASS_P (rhs))
328 return maybe_fold_reference (rhs, false);
330 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
332 tree val = OBJ_TYPE_REF_EXPR (rhs);
333 if (is_gimple_min_invariant (val))
334 return val;
335 else if (flag_devirtualize && virtual_method_call_p (rhs))
337 bool final;
338 vec <cgraph_node *>targets
339 = possible_polymorphic_call_targets (rhs, stmt, &final);
340 if (final && targets.length () <= 1 && dbg_cnt (devirt))
342 if (dump_enabled_p ())
344 location_t loc = gimple_location_safe (stmt);
345 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
346 "resolving virtual function address "
347 "reference to function %s\n",
348 targets.length () == 1
349 ? targets[0]->name ()
350 : "NULL");
352 if (targets.length () == 1)
354 val = fold_convert (TREE_TYPE (val),
355 build_fold_addr_expr_loc
356 (loc, targets[0]->decl));
357 STRIP_USELESS_TYPE_CONVERSION (val);
359 else
360 /* We can not use __builtin_unreachable here because it
361 can not have address taken. */
362 val = build_int_cst (TREE_TYPE (val), 0);
363 return val;
368 else if (TREE_CODE (rhs) == ADDR_EXPR)
370 tree ref = TREE_OPERAND (rhs, 0);
371 tree tem = maybe_fold_reference (ref, true);
372 if (tem
373 && TREE_CODE (tem) == MEM_REF
374 && integer_zerop (TREE_OPERAND (tem, 1)))
375 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
376 else if (tem)
377 result = fold_convert (TREE_TYPE (rhs),
378 build_fold_addr_expr_loc (loc, tem));
379 else if (TREE_CODE (ref) == MEM_REF
380 && integer_zerop (TREE_OPERAND (ref, 1)))
381 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
383 if (result)
385 /* Strip away useless type conversions. Both the
386 NON_LVALUE_EXPR that may have been added by fold, and
387 "useless" type conversions that might now be apparent
388 due to propagation. */
389 STRIP_USELESS_TYPE_CONVERSION (result);
391 if (result != rhs && valid_gimple_rhs_p (result))
392 return result;
396 else if (TREE_CODE (rhs) == CONSTRUCTOR
397 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
399 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
400 unsigned i;
401 tree val;
403 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
404 if (! CONSTANT_CLASS_P (val))
405 return NULL_TREE;
407 return build_vector_from_ctor (TREE_TYPE (rhs),
408 CONSTRUCTOR_ELTS (rhs));
411 else if (DECL_P (rhs))
412 return get_symbol_constant_value (rhs);
414 break;
416 case GIMPLE_UNARY_RHS:
417 break;
419 case GIMPLE_BINARY_RHS:
420 break;
422 case GIMPLE_TERNARY_RHS:
423 result = fold_ternary_loc (loc, subcode,
424 TREE_TYPE (gimple_assign_lhs (stmt)),
425 gimple_assign_rhs1 (stmt),
426 gimple_assign_rhs2 (stmt),
427 gimple_assign_rhs3 (stmt));
429 if (result)
431 STRIP_USELESS_TYPE_CONVERSION (result);
432 if (valid_gimple_rhs_p (result))
433 return result;
435 break;
437 case GIMPLE_INVALID_RHS:
438 gcc_unreachable ();
441 return NULL_TREE;
445 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
446 adjusting the replacement stmts location and virtual operands.
447 If the statement has a lhs the last stmt in the sequence is expected
448 to assign to that lhs. */
450 static void
451 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
453 gimple *stmt = gsi_stmt (*si_p);
455 if (gimple_has_location (stmt))
456 annotate_all_with_location (stmts, gimple_location (stmt));
458 /* First iterate over the replacement statements backward, assigning
459 virtual operands to their defining statements. */
460 gimple *laststore = NULL;
461 for (gimple_stmt_iterator i = gsi_last (stmts);
462 !gsi_end_p (i); gsi_prev (&i))
464 gimple *new_stmt = gsi_stmt (i);
465 if ((gimple_assign_single_p (new_stmt)
466 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
467 || (is_gimple_call (new_stmt)
468 && (gimple_call_flags (new_stmt)
469 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
471 tree vdef;
472 if (!laststore)
473 vdef = gimple_vdef (stmt);
474 else
475 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
476 gimple_set_vdef (new_stmt, vdef);
477 if (vdef && TREE_CODE (vdef) == SSA_NAME)
478 SSA_NAME_DEF_STMT (vdef) = new_stmt;
479 laststore = new_stmt;
483 /* Second iterate over the statements forward, assigning virtual
484 operands to their uses. */
485 tree reaching_vuse = gimple_vuse (stmt);
486 for (gimple_stmt_iterator i = gsi_start (stmts);
487 !gsi_end_p (i); gsi_next (&i))
489 gimple *new_stmt = gsi_stmt (i);
490 /* If the new statement possibly has a VUSE, update it with exact SSA
491 name we know will reach this one. */
492 if (gimple_has_mem_ops (new_stmt))
493 gimple_set_vuse (new_stmt, reaching_vuse);
494 gimple_set_modified (new_stmt, true);
495 if (gimple_vdef (new_stmt))
496 reaching_vuse = gimple_vdef (new_stmt);
499 /* If the new sequence does not do a store release the virtual
500 definition of the original statement. */
501 if (reaching_vuse
502 && reaching_vuse == gimple_vuse (stmt))
504 tree vdef = gimple_vdef (stmt);
505 if (vdef
506 && TREE_CODE (vdef) == SSA_NAME)
508 unlink_stmt_vdef (stmt);
509 release_ssa_name (vdef);
513 /* Finally replace the original statement with the sequence. */
514 gsi_replace_with_seq (si_p, stmts, false);
517 /* Convert EXPR into a GIMPLE value suitable for substitution on the
518 RHS of an assignment. Insert the necessary statements before
519 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
520 is replaced. If the call is expected to produces a result, then it
521 is replaced by an assignment of the new RHS to the result variable.
522 If the result is to be ignored, then the call is replaced by a
523 GIMPLE_NOP. A proper VDEF chain is retained by making the first
524 VUSE and the last VDEF of the whole sequence be the same as the replaced
525 statement and using new SSA names for stores in between. */
527 void
528 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
530 tree lhs;
531 gimple *stmt, *new_stmt;
532 gimple_stmt_iterator i;
533 gimple_seq stmts = NULL;
535 stmt = gsi_stmt (*si_p);
537 gcc_assert (is_gimple_call (stmt));
539 push_gimplify_context (gimple_in_ssa_p (cfun));
541 lhs = gimple_call_lhs (stmt);
542 if (lhs == NULL_TREE)
544 gimplify_and_add (expr, &stmts);
545 /* We can end up with folding a memcpy of an empty class assignment
546 which gets optimized away by C++ gimplification. */
547 if (gimple_seq_empty_p (stmts))
549 pop_gimplify_context (NULL);
550 if (gimple_in_ssa_p (cfun))
552 unlink_stmt_vdef (stmt);
553 release_defs (stmt);
555 gsi_replace (si_p, gimple_build_nop (), false);
556 return;
559 else
561 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
562 new_stmt = gimple_build_assign (lhs, tmp);
563 i = gsi_last (stmts);
564 gsi_insert_after_without_update (&i, new_stmt,
565 GSI_CONTINUE_LINKING);
568 pop_gimplify_context (NULL);
570 gsi_replace_with_seq_vops (si_p, stmts);
574 /* Replace the call at *GSI with the gimple value VAL. */
576 void
577 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
579 gimple *stmt = gsi_stmt (*gsi);
580 tree lhs = gimple_call_lhs (stmt);
581 gimple *repl;
582 if (lhs)
584 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
585 val = fold_convert (TREE_TYPE (lhs), val);
586 repl = gimple_build_assign (lhs, val);
588 else
589 repl = gimple_build_nop ();
590 tree vdef = gimple_vdef (stmt);
591 if (vdef && TREE_CODE (vdef) == SSA_NAME)
593 unlink_stmt_vdef (stmt);
594 release_ssa_name (vdef);
596 gsi_replace (gsi, repl, false);
599 /* Replace the call at *GSI with the new call REPL and fold that
600 again. */
602 static void
603 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
605 gimple *stmt = gsi_stmt (*gsi);
606 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
607 gimple_set_location (repl, gimple_location (stmt));
608 if (gimple_vdef (stmt)
609 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
611 gimple_set_vdef (repl, gimple_vdef (stmt));
612 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
614 if (gimple_vuse (stmt))
615 gimple_set_vuse (repl, gimple_vuse (stmt));
616 gsi_replace (gsi, repl, false);
617 fold_stmt (gsi);
620 /* Return true if VAR is a VAR_DECL or a component thereof. */
622 static bool
623 var_decl_component_p (tree var)
625 tree inner = var;
626 while (handled_component_p (inner))
627 inner = TREE_OPERAND (inner, 0);
628 return SSA_VAR_P (inner);
631 /* If the SIZE argument representing the size of an object is in a range
632 of values of which exactly one is valid (and that is zero), return
633 true, otherwise false. */
635 static bool
636 size_must_be_zero_p (tree size)
638 if (integer_zerop (size))
639 return true;
641 if (TREE_CODE (size) != SSA_NAME)
642 return false;
644 wide_int min, max;
645 enum value_range_type rtype = get_range_info (size, &min, &max);
646 if (rtype != VR_ANTI_RANGE)
647 return false;
649 tree type = TREE_TYPE (size);
650 int prec = TYPE_PRECISION (type);
652 wide_int wone = wi::one (prec);
654 /* Compute the value of SSIZE_MAX, the largest positive value that
655 can be stored in ssize_t, the signed counterpart of size_t. */
656 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
658 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
661 /* Fold function call to builtin mem{{,p}cpy,move}. Return
662 false if no simplification can be made.
663 If ENDP is 0, return DEST (like memcpy).
664 If ENDP is 1, return DEST+LEN (like mempcpy).
665 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
666 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
667 (memmove). */
669 static bool
670 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
671 tree dest, tree src, int endp)
673 gimple *stmt = gsi_stmt (*gsi);
674 tree lhs = gimple_call_lhs (stmt);
675 tree len = gimple_call_arg (stmt, 2);
676 tree destvar, srcvar;
677 location_t loc = gimple_location (stmt);
679 /* If the LEN parameter is a constant zero or in range where
680 the only valid value is zero, return DEST. */
681 if (size_must_be_zero_p (len))
683 gimple *repl;
684 if (gimple_call_lhs (stmt))
685 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
686 else
687 repl = gimple_build_nop ();
688 tree vdef = gimple_vdef (stmt);
689 if (vdef && TREE_CODE (vdef) == SSA_NAME)
691 unlink_stmt_vdef (stmt);
692 release_ssa_name (vdef);
694 gsi_replace (gsi, repl, false);
695 return true;
698 /* If SRC and DEST are the same (and not volatile), return
699 DEST{,+LEN,+LEN-1}. */
700 if (operand_equal_p (src, dest, 0))
702 unlink_stmt_vdef (stmt);
703 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
704 release_ssa_name (gimple_vdef (stmt));
705 if (!lhs)
707 gsi_replace (gsi, gimple_build_nop (), false);
708 return true;
710 goto done;
712 else
714 tree srctype, desttype;
715 unsigned int src_align, dest_align;
716 tree off0;
718 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
719 pointers as wide integer) and also may result in huge function
720 size because of inlined bounds copy. Thus don't inline for
721 functions we want to instrument. */
722 if (flag_check_pointer_bounds
723 && chkp_instrumentable_p (cfun->decl)
724 /* Even if data may contain pointers we can inline if copy
725 less than a pointer size. */
726 && (!tree_fits_uhwi_p (len)
727 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
728 return false;
730 /* Build accesses at offset zero with a ref-all character type. */
731 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
732 ptr_mode, true), 0);
734 /* If we can perform the copy efficiently with first doing all loads
735 and then all stores inline it that way. Currently efficiently
736 means that we can load all the memory into a single integer
737 register which is what MOVE_MAX gives us. */
738 src_align = get_pointer_alignment (src);
739 dest_align = get_pointer_alignment (dest);
740 if (tree_fits_uhwi_p (len)
741 && compare_tree_int (len, MOVE_MAX) <= 0
742 /* ??? Don't transform copies from strings with known length this
743 confuses the tree-ssa-strlen.c. This doesn't handle
744 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
745 reason. */
746 && !c_strlen (src, 2))
748 unsigned ilen = tree_to_uhwi (len);
749 if (pow2p_hwi (ilen))
751 scalar_int_mode mode;
752 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
753 if (type
754 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
755 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
756 /* If the destination pointer is not aligned we must be able
757 to emit an unaligned store. */
758 && (dest_align >= GET_MODE_ALIGNMENT (mode)
759 || !SLOW_UNALIGNED_ACCESS (mode, dest_align)
760 || (optab_handler (movmisalign_optab, mode)
761 != CODE_FOR_nothing)))
763 tree srctype = type;
764 tree desttype = type;
765 if (src_align < GET_MODE_ALIGNMENT (mode))
766 srctype = build_aligned_type (type, src_align);
767 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
768 tree tem = fold_const_aggregate_ref (srcmem);
769 if (tem)
770 srcmem = tem;
771 else if (src_align < GET_MODE_ALIGNMENT (mode)
772 && SLOW_UNALIGNED_ACCESS (mode, src_align)
773 && (optab_handler (movmisalign_optab, mode)
774 == CODE_FOR_nothing))
775 srcmem = NULL_TREE;
776 if (srcmem)
778 gimple *new_stmt;
779 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
781 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
782 srcmem
783 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
784 new_stmt);
785 gimple_assign_set_lhs (new_stmt, srcmem);
786 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
787 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
789 if (dest_align < GET_MODE_ALIGNMENT (mode))
790 desttype = build_aligned_type (type, dest_align);
791 new_stmt
792 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
793 dest, off0),
794 srcmem);
795 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
796 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
797 if (gimple_vdef (new_stmt)
798 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
799 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
800 if (!lhs)
802 gsi_replace (gsi, new_stmt, false);
803 return true;
805 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
806 goto done;
812 if (endp == 3)
814 /* Both DEST and SRC must be pointer types.
815 ??? This is what old code did. Is the testing for pointer types
816 really mandatory?
818 If either SRC is readonly or length is 1, we can use memcpy. */
819 if (!dest_align || !src_align)
820 return false;
821 if (readonly_data_expr (src)
822 || (tree_fits_uhwi_p (len)
823 && (MIN (src_align, dest_align) / BITS_PER_UNIT
824 >= tree_to_uhwi (len))))
826 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
827 if (!fn)
828 return false;
829 gimple_call_set_fndecl (stmt, fn);
830 gimple_call_set_arg (stmt, 0, dest);
831 gimple_call_set_arg (stmt, 1, src);
832 fold_stmt (gsi);
833 return true;
836 /* If *src and *dest can't overlap, optimize into memcpy as well. */
837 if (TREE_CODE (src) == ADDR_EXPR
838 && TREE_CODE (dest) == ADDR_EXPR)
840 tree src_base, dest_base, fn;
841 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
842 HOST_WIDE_INT maxsize;
844 srcvar = TREE_OPERAND (src, 0);
845 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
846 if (src_base == NULL)
847 src_base = srcvar;
848 destvar = TREE_OPERAND (dest, 0);
849 dest_base = get_addr_base_and_unit_offset (destvar,
850 &dest_offset);
851 if (dest_base == NULL)
852 dest_base = destvar;
853 if (tree_fits_uhwi_p (len))
854 maxsize = tree_to_uhwi (len);
855 else
856 maxsize = -1;
857 if (SSA_VAR_P (src_base)
858 && SSA_VAR_P (dest_base))
860 if (operand_equal_p (src_base, dest_base, 0)
861 && ranges_overlap_p (src_offset, maxsize,
862 dest_offset, maxsize))
863 return false;
865 else if (TREE_CODE (src_base) == MEM_REF
866 && TREE_CODE (dest_base) == MEM_REF)
868 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
869 TREE_OPERAND (dest_base, 0), 0))
870 return false;
871 offset_int off = mem_ref_offset (src_base) + src_offset;
872 if (!wi::fits_shwi_p (off))
873 return false;
874 src_offset = off.to_shwi ();
876 off = mem_ref_offset (dest_base) + dest_offset;
877 if (!wi::fits_shwi_p (off))
878 return false;
879 dest_offset = off.to_shwi ();
880 if (ranges_overlap_p (src_offset, maxsize,
881 dest_offset, maxsize))
882 return false;
884 else
885 return false;
887 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
888 if (!fn)
889 return false;
890 gimple_call_set_fndecl (stmt, fn);
891 gimple_call_set_arg (stmt, 0, dest);
892 gimple_call_set_arg (stmt, 1, src);
893 fold_stmt (gsi);
894 return true;
897 /* If the destination and source do not alias optimize into
898 memcpy as well. */
899 if ((is_gimple_min_invariant (dest)
900 || TREE_CODE (dest) == SSA_NAME)
901 && (is_gimple_min_invariant (src)
902 || TREE_CODE (src) == SSA_NAME))
904 ao_ref destr, srcr;
905 ao_ref_init_from_ptr_and_size (&destr, dest, len);
906 ao_ref_init_from_ptr_and_size (&srcr, src, len);
907 if (!refs_may_alias_p_1 (&destr, &srcr, false))
909 tree fn;
910 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
911 if (!fn)
912 return false;
913 gimple_call_set_fndecl (stmt, fn);
914 gimple_call_set_arg (stmt, 0, dest);
915 gimple_call_set_arg (stmt, 1, src);
916 fold_stmt (gsi);
917 return true;
921 return false;
924 if (!tree_fits_shwi_p (len))
925 return false;
926 /* FIXME:
927 This logic lose for arguments like (type *)malloc (sizeof (type)),
928 since we strip the casts of up to VOID return value from malloc.
929 Perhaps we ought to inherit type from non-VOID argument here? */
930 STRIP_NOPS (src);
931 STRIP_NOPS (dest);
932 if (!POINTER_TYPE_P (TREE_TYPE (src))
933 || !POINTER_TYPE_P (TREE_TYPE (dest)))
934 return false;
935 /* In the following try to find a type that is most natural to be
936 used for the memcpy source and destination and that allows
937 the most optimization when memcpy is turned into a plain assignment
938 using that type. In theory we could always use a char[len] type
939 but that only gains us that the destination and source possibly
940 no longer will have their address taken. */
941 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
942 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
944 tree tem = TREE_OPERAND (src, 0);
945 STRIP_NOPS (tem);
946 if (tem != TREE_OPERAND (src, 0))
947 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
949 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
951 tree tem = TREE_OPERAND (dest, 0);
952 STRIP_NOPS (tem);
953 if (tem != TREE_OPERAND (dest, 0))
954 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
956 srctype = TREE_TYPE (TREE_TYPE (src));
957 if (TREE_CODE (srctype) == ARRAY_TYPE
958 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
960 srctype = TREE_TYPE (srctype);
961 STRIP_NOPS (src);
962 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
964 desttype = TREE_TYPE (TREE_TYPE (dest));
965 if (TREE_CODE (desttype) == ARRAY_TYPE
966 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
968 desttype = TREE_TYPE (desttype);
969 STRIP_NOPS (dest);
970 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
972 if (TREE_ADDRESSABLE (srctype)
973 || TREE_ADDRESSABLE (desttype))
974 return false;
976 /* Make sure we are not copying using a floating-point mode or
977 a type whose size possibly does not match its precision. */
978 if (FLOAT_MODE_P (TYPE_MODE (desttype))
979 || TREE_CODE (desttype) == BOOLEAN_TYPE
980 || TREE_CODE (desttype) == ENUMERAL_TYPE)
981 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
982 if (FLOAT_MODE_P (TYPE_MODE (srctype))
983 || TREE_CODE (srctype) == BOOLEAN_TYPE
984 || TREE_CODE (srctype) == ENUMERAL_TYPE)
985 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
986 if (!srctype)
987 srctype = desttype;
988 if (!desttype)
989 desttype = srctype;
990 if (!srctype)
991 return false;
993 src_align = get_pointer_alignment (src);
994 dest_align = get_pointer_alignment (dest);
995 if (dest_align < TYPE_ALIGN (desttype)
996 || src_align < TYPE_ALIGN (srctype))
997 return false;
999 destvar = dest;
1000 STRIP_NOPS (destvar);
1001 if (TREE_CODE (destvar) == ADDR_EXPR
1002 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1003 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1004 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1005 else
1006 destvar = NULL_TREE;
1008 srcvar = src;
1009 STRIP_NOPS (srcvar);
1010 if (TREE_CODE (srcvar) == ADDR_EXPR
1011 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1012 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1014 if (!destvar
1015 || src_align >= TYPE_ALIGN (desttype))
1016 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1017 srcvar, off0);
1018 else if (!STRICT_ALIGNMENT)
1020 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1021 src_align);
1022 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1024 else
1025 srcvar = NULL_TREE;
1027 else
1028 srcvar = NULL_TREE;
1030 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1031 return false;
1033 if (srcvar == NULL_TREE)
1035 STRIP_NOPS (src);
1036 if (src_align >= TYPE_ALIGN (desttype))
1037 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1038 else
1040 if (STRICT_ALIGNMENT)
1041 return false;
1042 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1043 src_align);
1044 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1047 else if (destvar == NULL_TREE)
1049 STRIP_NOPS (dest);
1050 if (dest_align >= TYPE_ALIGN (srctype))
1051 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1052 else
1054 if (STRICT_ALIGNMENT)
1055 return false;
1056 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1057 dest_align);
1058 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1062 gimple *new_stmt;
1063 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1065 tree tem = fold_const_aggregate_ref (srcvar);
1066 if (tem)
1067 srcvar = tem;
1068 if (! is_gimple_min_invariant (srcvar))
1070 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1071 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1072 new_stmt);
1073 gimple_assign_set_lhs (new_stmt, srcvar);
1074 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1075 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1078 new_stmt = gimple_build_assign (destvar, srcvar);
1079 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1080 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1081 if (gimple_vdef (new_stmt)
1082 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1083 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1084 if (!lhs)
1086 gsi_replace (gsi, new_stmt, false);
1087 return true;
1089 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1092 done:
1093 gimple_seq stmts = NULL;
1094 if (endp == 0 || endp == 3)
1095 len = NULL_TREE;
1096 else if (endp == 2)
1097 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1098 ssize_int (1));
1099 if (endp == 2 || endp == 1)
1101 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1102 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1103 TREE_TYPE (dest), dest, len);
1106 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1107 gimple *repl = gimple_build_assign (lhs, dest);
1108 gsi_replace (gsi, repl, false);
1109 return true;
1112 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1113 to built-in memcmp (a, b, len). */
1115 static bool
1116 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1118 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1120 if (!fn)
1121 return false;
1123 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1125 gimple *stmt = gsi_stmt (*gsi);
1126 tree a = gimple_call_arg (stmt, 0);
1127 tree b = gimple_call_arg (stmt, 1);
1128 tree len = gimple_call_arg (stmt, 2);
1130 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1131 replace_call_with_call_and_fold (gsi, repl);
1133 return true;
1136 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1137 to built-in memmove (dest, src, len). */
1139 static bool
1140 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1142 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1144 if (!fn)
1145 return false;
1147 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1148 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1149 len) into memmove (dest, src, len). */
1151 gimple *stmt = gsi_stmt (*gsi);
1152 tree src = gimple_call_arg (stmt, 0);
1153 tree dest = gimple_call_arg (stmt, 1);
1154 tree len = gimple_call_arg (stmt, 2);
1156 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1157 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1158 replace_call_with_call_and_fold (gsi, repl);
1160 return true;
1163 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1164 to built-in memset (dest, 0, len). */
1166 static bool
1167 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1169 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1171 if (!fn)
1172 return false;
1174 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1176 gimple *stmt = gsi_stmt (*gsi);
1177 tree dest = gimple_call_arg (stmt, 0);
1178 tree len = gimple_call_arg (stmt, 1);
1180 gimple_seq seq = NULL;
1181 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1182 gimple_seq_add_stmt_without_update (&seq, repl);
1183 gsi_replace_with_seq_vops (gsi, seq);
1184 fold_stmt (gsi);
1186 return true;
1189 /* Fold function call to builtin memset or bzero at *GSI setting the
1190 memory of size LEN to VAL. Return whether a simplification was made. */
1192 static bool
1193 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1195 gimple *stmt = gsi_stmt (*gsi);
1196 tree etype;
1197 unsigned HOST_WIDE_INT length, cval;
1199 /* If the LEN parameter is zero, return DEST. */
1200 if (integer_zerop (len))
1202 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1203 return true;
1206 if (! tree_fits_uhwi_p (len))
1207 return false;
1209 if (TREE_CODE (c) != INTEGER_CST)
1210 return false;
1212 tree dest = gimple_call_arg (stmt, 0);
1213 tree var = dest;
1214 if (TREE_CODE (var) != ADDR_EXPR)
1215 return false;
1217 var = TREE_OPERAND (var, 0);
1218 if (TREE_THIS_VOLATILE (var))
1219 return false;
1221 etype = TREE_TYPE (var);
1222 if (TREE_CODE (etype) == ARRAY_TYPE)
1223 etype = TREE_TYPE (etype);
1225 if (!INTEGRAL_TYPE_P (etype)
1226 && !POINTER_TYPE_P (etype))
1227 return NULL_TREE;
1229 if (! var_decl_component_p (var))
1230 return NULL_TREE;
1232 length = tree_to_uhwi (len);
1233 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1234 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1235 return NULL_TREE;
1237 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1238 return NULL_TREE;
1240 if (integer_zerop (c))
1241 cval = 0;
1242 else
1244 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1245 return NULL_TREE;
1247 cval = TREE_INT_CST_LOW (c);
1248 cval &= 0xff;
1249 cval |= cval << 8;
1250 cval |= cval << 16;
1251 cval |= (cval << 31) << 1;
1254 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1255 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1256 gimple_set_vuse (store, gimple_vuse (stmt));
1257 tree vdef = gimple_vdef (stmt);
1258 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1260 gimple_set_vdef (store, gimple_vdef (stmt));
1261 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1263 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1264 if (gimple_call_lhs (stmt))
1266 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1267 gsi_replace (gsi, asgn, false);
1269 else
1271 gimple_stmt_iterator gsi2 = *gsi;
1272 gsi_prev (gsi);
1273 gsi_remove (&gsi2, true);
1276 return true;
1280 /* Obtain the minimum and maximum string length or minimum and maximum
1281 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1282 If ARG is an SSA name variable, follow its use-def chains. When
1283 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1284 if we are unable to determine the length or value, return False.
1285 VISITED is a bitmap of visited variables.
1286 TYPE is 0 if string length should be obtained, 1 for maximum string
1287 length and 2 for maximum value ARG can have.
1288 When FUZZY is set and the length of a string cannot be determined,
1289 the function instead considers as the maximum possible length the
1290 size of a character array it may refer to.
1291 Set *FLEXP to true if the range of the string lengths has been
1292 obtained from the upper bound of an array at the end of a struct.
1293 Such an array may hold a string that's longer than its upper bound
1294 due to it being used as a poor-man's flexible array member. */
1296 static bool
1297 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1298 bool fuzzy, bool *flexp)
1300 tree var, val;
1301 gimple *def_stmt;
1303 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1304 but MINLEN may be cleared during the execution of the function. */
1305 tree *minlen = length;
1306 tree *const maxlen = length + 1;
1308 if (TREE_CODE (arg) != SSA_NAME)
1310 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1311 if (TREE_CODE (arg) == ADDR_EXPR
1312 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1313 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1315 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1316 if (TREE_CODE (aop0) == INDIRECT_REF
1317 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1318 return get_range_strlen (TREE_OPERAND (aop0, 0),
1319 length, visited, type, fuzzy, flexp);
1322 if (type == 2)
1324 val = arg;
1325 if (TREE_CODE (val) != INTEGER_CST
1326 || tree_int_cst_sgn (val) < 0)
1327 return false;
1329 else
1330 val = c_strlen (arg, 1);
1332 if (!val && fuzzy)
1334 if (TREE_CODE (arg) == ADDR_EXPR)
1335 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1336 visited, type, fuzzy, flexp);
1338 if (TREE_CODE (arg) == COMPONENT_REF
1339 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE)
1341 /* Use the type of the member array to determine the upper
1342 bound on the length of the array. This may be overly
1343 optimistic if the array itself isn't NUL-terminated and
1344 the caller relies on the subsequent member to contain
1345 the NUL.
1346 Set *FLEXP to true if the array whose bound is being
1347 used is at the end of a struct. */
1348 if (array_at_struct_end_p (arg))
1349 *flexp = true;
1351 arg = TREE_OPERAND (arg, 1);
1352 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1353 if (!val || integer_zerop (val))
1354 return false;
1355 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1356 integer_one_node);
1357 /* Set the minimum size to zero since the string in
1358 the array could have zero length. */
1359 *minlen = ssize_int (0);
1363 if (!val)
1364 return false;
1366 if (minlen
1367 && (!*minlen
1368 || (type > 0
1369 && TREE_CODE (*minlen) == INTEGER_CST
1370 && TREE_CODE (val) == INTEGER_CST
1371 && tree_int_cst_lt (val, *minlen))))
1372 *minlen = val;
1374 if (*maxlen)
1376 if (type > 0)
1378 if (TREE_CODE (*maxlen) != INTEGER_CST
1379 || TREE_CODE (val) != INTEGER_CST)
1380 return false;
1382 if (tree_int_cst_lt (*maxlen, val))
1383 *maxlen = val;
1384 return true;
1386 else if (simple_cst_equal (val, *maxlen) != 1)
1387 return false;
1390 *maxlen = val;
1391 return true;
1394 /* If ARG is registered for SSA update we cannot look at its defining
1395 statement. */
1396 if (name_registered_for_update_p (arg))
1397 return false;
1399 /* If we were already here, break the infinite cycle. */
1400 if (!*visited)
1401 *visited = BITMAP_ALLOC (NULL);
1402 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1403 return true;
1405 var = arg;
1406 def_stmt = SSA_NAME_DEF_STMT (var);
1408 switch (gimple_code (def_stmt))
1410 case GIMPLE_ASSIGN:
1411 /* The RHS of the statement defining VAR must either have a
1412 constant length or come from another SSA_NAME with a constant
1413 length. */
1414 if (gimple_assign_single_p (def_stmt)
1415 || gimple_assign_unary_nop_p (def_stmt))
1417 tree rhs = gimple_assign_rhs1 (def_stmt);
1418 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
1420 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1422 tree op2 = gimple_assign_rhs2 (def_stmt);
1423 tree op3 = gimple_assign_rhs3 (def_stmt);
1424 return get_range_strlen (op2, length, visited, type, fuzzy, flexp)
1425 && get_range_strlen (op3, length, visited, type, fuzzy, flexp);
1427 return false;
1429 case GIMPLE_PHI:
1431 /* All the arguments of the PHI node must have the same constant
1432 length. */
1433 unsigned i;
1435 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1437 tree arg = gimple_phi_arg (def_stmt, i)->def;
1439 /* If this PHI has itself as an argument, we cannot
1440 determine the string length of this argument. However,
1441 if we can find a constant string length for the other
1442 PHI args then we can still be sure that this is a
1443 constant string length. So be optimistic and just
1444 continue with the next argument. */
1445 if (arg == gimple_phi_result (def_stmt))
1446 continue;
1448 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
1450 if (fuzzy)
1451 *maxlen = build_all_ones_cst (size_type_node);
1452 else
1453 return false;
1457 return true;
1459 default:
1460 return false;
1464 /* Determine the minimum and maximum value or string length that ARG
1465 refers to and store each in the first two elements of MINMAXLEN.
1466 For expressions that point to strings of unknown lengths that are
1467 character arrays, use the upper bound of the array as the maximum
1468 length. For example, given an expression like 'x ? array : "xyz"'
1469 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1470 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1471 stored in array.
1472 Return true if the range of the string lengths has been obtained
1473 from the upper bound of an array at the end of a struct. Such
1474 an array may hold a string that's longer than its upper bound
1475 due to it being used as a poor-man's flexible array member. */
1477 bool
1478 get_range_strlen (tree arg, tree minmaxlen[2])
1480 bitmap visited = NULL;
1482 minmaxlen[0] = NULL_TREE;
1483 minmaxlen[1] = NULL_TREE;
1485 bool flexarray = false;
1486 get_range_strlen (arg, minmaxlen, &visited, 1, true, &flexarray);
1488 if (visited)
1489 BITMAP_FREE (visited);
1491 return flexarray;
1494 tree
1495 get_maxval_strlen (tree arg, int type)
1497 bitmap visited = NULL;
1498 tree len[2] = { NULL_TREE, NULL_TREE };
1500 bool dummy;
1501 if (!get_range_strlen (arg, len, &visited, type, false, &dummy))
1502 len[1] = NULL_TREE;
1503 if (visited)
1504 BITMAP_FREE (visited);
1506 return len[1];
1510 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1511 If LEN is not NULL, it represents the length of the string to be
1512 copied. Return NULL_TREE if no simplification can be made. */
1514 static bool
1515 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1516 tree dest, tree src)
1518 location_t loc = gimple_location (gsi_stmt (*gsi));
1519 tree fn;
1521 /* If SRC and DEST are the same (and not volatile), return DEST. */
1522 if (operand_equal_p (src, dest, 0))
1524 replace_call_with_value (gsi, dest);
1525 return true;
1528 if (optimize_function_for_size_p (cfun))
1529 return false;
1531 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1532 if (!fn)
1533 return false;
1535 tree len = get_maxval_strlen (src, 0);
1536 if (!len)
1537 return false;
1539 len = fold_convert_loc (loc, size_type_node, len);
1540 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1541 len = force_gimple_operand_gsi (gsi, len, true,
1542 NULL_TREE, true, GSI_SAME_STMT);
1543 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1544 replace_call_with_call_and_fold (gsi, repl);
1545 return true;
1548 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1549 If SLEN is not NULL, it represents the length of the source string.
1550 Return NULL_TREE if no simplification can be made. */
1552 static bool
1553 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1554 tree dest, tree src, tree len)
1556 location_t loc = gimple_location (gsi_stmt (*gsi));
1557 tree fn;
1559 /* If the LEN parameter is zero, return DEST. */
1560 if (integer_zerop (len))
1562 replace_call_with_value (gsi, dest);
1563 return true;
1566 /* We can't compare slen with len as constants below if len is not a
1567 constant. */
1568 if (TREE_CODE (len) != INTEGER_CST)
1569 return false;
1571 /* Now, we must be passed a constant src ptr parameter. */
1572 tree slen = get_maxval_strlen (src, 0);
1573 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1574 return false;
1576 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1578 /* We do not support simplification of this case, though we do
1579 support it when expanding trees into RTL. */
1580 /* FIXME: generate a call to __builtin_memset. */
1581 if (tree_int_cst_lt (slen, len))
1582 return false;
1584 /* OK transform into builtin memcpy. */
1585 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1586 if (!fn)
1587 return false;
1589 len = fold_convert_loc (loc, size_type_node, len);
1590 len = force_gimple_operand_gsi (gsi, len, true,
1591 NULL_TREE, true, GSI_SAME_STMT);
1592 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1593 replace_call_with_call_and_fold (gsi, repl);
1594 return true;
1597 /* Fold function call to builtin strchr or strrchr.
1598 If both arguments are constant, evaluate and fold the result,
1599 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1600 In general strlen is significantly faster than strchr
1601 due to being a simpler operation. */
1602 static bool
1603 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1605 gimple *stmt = gsi_stmt (*gsi);
1606 tree str = gimple_call_arg (stmt, 0);
1607 tree c = gimple_call_arg (stmt, 1);
1608 location_t loc = gimple_location (stmt);
1609 const char *p;
1610 char ch;
1612 if (!gimple_call_lhs (stmt))
1613 return false;
1615 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1617 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1619 if (p1 == NULL)
1621 replace_call_with_value (gsi, integer_zero_node);
1622 return true;
1625 tree len = build_int_cst (size_type_node, p1 - p);
1626 gimple_seq stmts = NULL;
1627 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1628 POINTER_PLUS_EXPR, str, len);
1629 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1630 gsi_replace_with_seq_vops (gsi, stmts);
1631 return true;
1634 if (!integer_zerop (c))
1635 return false;
1637 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1638 if (is_strrchr && optimize_function_for_size_p (cfun))
1640 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1642 if (strchr_fn)
1644 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1645 replace_call_with_call_and_fold (gsi, repl);
1646 return true;
1649 return false;
1652 tree len;
1653 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1655 if (!strlen_fn)
1656 return false;
1658 /* Create newstr = strlen (str). */
1659 gimple_seq stmts = NULL;
1660 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1661 gimple_set_location (new_stmt, loc);
1662 len = create_tmp_reg_or_ssa_name (size_type_node);
1663 gimple_call_set_lhs (new_stmt, len);
1664 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1666 /* Create (str p+ strlen (str)). */
1667 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1668 POINTER_PLUS_EXPR, str, len);
1669 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1670 gsi_replace_with_seq_vops (gsi, stmts);
1671 /* gsi now points at the assignment to the lhs, get a
1672 stmt iterator to the strlen.
1673 ??? We can't use gsi_for_stmt as that doesn't work when the
1674 CFG isn't built yet. */
1675 gimple_stmt_iterator gsi2 = *gsi;
1676 gsi_prev (&gsi2);
1677 fold_stmt (&gsi2);
1678 return true;
1681 /* Fold function call to builtin strstr.
1682 If both arguments are constant, evaluate and fold the result,
1683 additionally fold strstr (x, "") into x and strstr (x, "c")
1684 into strchr (x, 'c'). */
1685 static bool
1686 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1688 gimple *stmt = gsi_stmt (*gsi);
1689 tree haystack = gimple_call_arg (stmt, 0);
1690 tree needle = gimple_call_arg (stmt, 1);
1691 const char *p, *q;
1693 if (!gimple_call_lhs (stmt))
1694 return false;
1696 q = c_getstr (needle);
1697 if (q == NULL)
1698 return false;
1700 if ((p = c_getstr (haystack)))
1702 const char *r = strstr (p, q);
1704 if (r == NULL)
1706 replace_call_with_value (gsi, integer_zero_node);
1707 return true;
1710 tree len = build_int_cst (size_type_node, r - p);
1711 gimple_seq stmts = NULL;
1712 gimple *new_stmt
1713 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1714 haystack, len);
1715 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1716 gsi_replace_with_seq_vops (gsi, stmts);
1717 return true;
1720 /* For strstr (x, "") return x. */
1721 if (q[0] == '\0')
1723 replace_call_with_value (gsi, haystack);
1724 return true;
1727 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1728 if (q[1] == '\0')
1730 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1731 if (strchr_fn)
1733 tree c = build_int_cst (integer_type_node, q[0]);
1734 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1735 replace_call_with_call_and_fold (gsi, repl);
1736 return true;
1740 return false;
1743 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1744 to the call.
1746 Return NULL_TREE if no simplification was possible, otherwise return the
1747 simplified form of the call as a tree.
1749 The simplified form may be a constant or other expression which
1750 computes the same value, but in a more efficient manner (including
1751 calls to other builtin functions).
1753 The call may contain arguments which need to be evaluated, but
1754 which are not useful to determine the result of the call. In
1755 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1756 COMPOUND_EXPR will be an argument which must be evaluated.
1757 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1758 COMPOUND_EXPR in the chain will contain the tree for the simplified
1759 form of the builtin function call. */
1761 static bool
1762 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1764 gimple *stmt = gsi_stmt (*gsi);
1765 location_t loc = gimple_location (stmt);
1767 const char *p = c_getstr (src);
1769 /* If the string length is zero, return the dst parameter. */
1770 if (p && *p == '\0')
1772 replace_call_with_value (gsi, dst);
1773 return true;
1776 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1777 return false;
1779 /* See if we can store by pieces into (dst + strlen(dst)). */
1780 tree newdst;
1781 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1782 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1784 if (!strlen_fn || !memcpy_fn)
1785 return false;
1787 /* If the length of the source string isn't computable don't
1788 split strcat into strlen and memcpy. */
1789 tree len = get_maxval_strlen (src, 0);
1790 if (! len)
1791 return false;
1793 /* Create strlen (dst). */
1794 gimple_seq stmts = NULL, stmts2;
1795 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1796 gimple_set_location (repl, loc);
1797 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1798 gimple_call_set_lhs (repl, newdst);
1799 gimple_seq_add_stmt_without_update (&stmts, repl);
1801 /* Create (dst p+ strlen (dst)). */
1802 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1803 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1804 gimple_seq_add_seq_without_update (&stmts, stmts2);
1806 len = fold_convert_loc (loc, size_type_node, len);
1807 len = size_binop_loc (loc, PLUS_EXPR, len,
1808 build_int_cst (size_type_node, 1));
1809 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1810 gimple_seq_add_seq_without_update (&stmts, stmts2);
1812 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1813 gimple_seq_add_stmt_without_update (&stmts, repl);
1814 if (gimple_call_lhs (stmt))
1816 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1817 gimple_seq_add_stmt_without_update (&stmts, repl);
1818 gsi_replace_with_seq_vops (gsi, stmts);
1819 /* gsi now points at the assignment to the lhs, get a
1820 stmt iterator to the memcpy call.
1821 ??? We can't use gsi_for_stmt as that doesn't work when the
1822 CFG isn't built yet. */
1823 gimple_stmt_iterator gsi2 = *gsi;
1824 gsi_prev (&gsi2);
1825 fold_stmt (&gsi2);
1827 else
1829 gsi_replace_with_seq_vops (gsi, stmts);
1830 fold_stmt (gsi);
1832 return true;
1835 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1836 are the arguments to the call. */
1838 static bool
1839 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1841 gimple *stmt = gsi_stmt (*gsi);
1842 tree dest = gimple_call_arg (stmt, 0);
1843 tree src = gimple_call_arg (stmt, 1);
1844 tree size = gimple_call_arg (stmt, 2);
1845 tree fn;
1846 const char *p;
1849 p = c_getstr (src);
1850 /* If the SRC parameter is "", return DEST. */
1851 if (p && *p == '\0')
1853 replace_call_with_value (gsi, dest);
1854 return true;
1857 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1858 return false;
1860 /* If __builtin_strcat_chk is used, assume strcat is available. */
1861 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1862 if (!fn)
1863 return false;
1865 gimple *repl = gimple_build_call (fn, 2, dest, src);
1866 replace_call_with_call_and_fold (gsi, repl);
1867 return true;
1870 /* Simplify a call to the strncat builtin. */
1872 static bool
1873 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1875 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1876 tree dst = gimple_call_arg (stmt, 0);
1877 tree src = gimple_call_arg (stmt, 1);
1878 tree len = gimple_call_arg (stmt, 2);
1880 const char *p = c_getstr (src);
1882 /* If the requested length is zero, or the src parameter string
1883 length is zero, return the dst parameter. */
1884 if (integer_zerop (len) || (p && *p == '\0'))
1886 replace_call_with_value (gsi, dst);
1887 return true;
1890 /* If the requested len is greater than or equal to the string
1891 length, call strcat. */
1892 if (TREE_CODE (len) == INTEGER_CST && p
1893 && compare_tree_int (len, strlen (p)) >= 0)
1895 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1897 /* If the replacement _DECL isn't initialized, don't do the
1898 transformation. */
1899 if (!fn)
1900 return false;
1902 gcall *repl = gimple_build_call (fn, 2, dst, src);
1903 replace_call_with_call_and_fold (gsi, repl);
1904 return true;
1907 return false;
1910 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1911 LEN, and SIZE. */
1913 static bool
1914 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1916 gimple *stmt = gsi_stmt (*gsi);
1917 tree dest = gimple_call_arg (stmt, 0);
1918 tree src = gimple_call_arg (stmt, 1);
1919 tree len = gimple_call_arg (stmt, 2);
1920 tree size = gimple_call_arg (stmt, 3);
1921 tree fn;
1922 const char *p;
1924 p = c_getstr (src);
1925 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1926 if ((p && *p == '\0')
1927 || integer_zerop (len))
1929 replace_call_with_value (gsi, dest);
1930 return true;
1933 if (! tree_fits_uhwi_p (size))
1934 return false;
1936 if (! integer_all_onesp (size))
1938 tree src_len = c_strlen (src, 1);
1939 if (src_len
1940 && tree_fits_uhwi_p (src_len)
1941 && tree_fits_uhwi_p (len)
1942 && ! tree_int_cst_lt (len, src_len))
1944 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1945 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1946 if (!fn)
1947 return false;
1949 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1950 replace_call_with_call_and_fold (gsi, repl);
1951 return true;
1953 return false;
1956 /* If __builtin_strncat_chk is used, assume strncat is available. */
1957 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1958 if (!fn)
1959 return false;
1961 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1962 replace_call_with_call_and_fold (gsi, repl);
1963 return true;
1966 /* Build and append gimple statements to STMTS that would load a first
1967 character of a memory location identified by STR. LOC is location
1968 of the statement. */
1970 static tree
1971 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
1973 tree var;
1975 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
1976 tree cst_uchar_ptr_node
1977 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
1978 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
1980 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
1981 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
1982 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
1984 gimple_assign_set_lhs (stmt, var);
1985 gimple_seq_add_stmt_without_update (stmts, stmt);
1987 return var;
1990 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
1991 FCODE is the name of the builtin. */
1993 static bool
1994 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
1996 gimple *stmt = gsi_stmt (*gsi);
1997 tree callee = gimple_call_fndecl (stmt);
1998 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2000 tree type = integer_type_node;
2001 tree str1 = gimple_call_arg (stmt, 0);
2002 tree str2 = gimple_call_arg (stmt, 1);
2003 tree lhs = gimple_call_lhs (stmt);
2004 HOST_WIDE_INT length = -1;
2006 /* Handle strncmp and strncasecmp functions. */
2007 if (gimple_call_num_args (stmt) == 3)
2009 tree len = gimple_call_arg (stmt, 2);
2010 if (tree_fits_uhwi_p (len))
2011 length = tree_to_uhwi (len);
2014 /* If the LEN parameter is zero, return zero. */
2015 if (length == 0)
2017 replace_call_with_value (gsi, integer_zero_node);
2018 return true;
2021 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2022 if (operand_equal_p (str1, str2, 0))
2024 replace_call_with_value (gsi, integer_zero_node);
2025 return true;
2028 const char *p1 = c_getstr (str1);
2029 const char *p2 = c_getstr (str2);
2031 /* For known strings, return an immediate value. */
2032 if (p1 && p2)
2034 int r = 0;
2035 bool known_result = false;
2037 switch (fcode)
2039 case BUILT_IN_STRCMP:
2041 r = strcmp (p1, p2);
2042 known_result = true;
2043 break;
2045 case BUILT_IN_STRNCMP:
2047 if (length == -1)
2048 break;
2049 r = strncmp (p1, p2, length);
2050 known_result = true;
2051 break;
2053 /* Only handleable situation is where the string are equal (result 0),
2054 which is already handled by operand_equal_p case. */
2055 case BUILT_IN_STRCASECMP:
2056 break;
2057 case BUILT_IN_STRNCASECMP:
2059 if (length == -1)
2060 break;
2061 r = strncmp (p1, p2, length);
2062 if (r == 0)
2063 known_result = true;
2064 break;;
2066 default:
2067 gcc_unreachable ();
2070 if (known_result)
2072 replace_call_with_value (gsi, build_cmp_result (type, r));
2073 return true;
2077 bool nonzero_length = length >= 1
2078 || fcode == BUILT_IN_STRCMP
2079 || fcode == BUILT_IN_STRCASECMP;
2081 location_t loc = gimple_location (stmt);
2083 /* If the second arg is "", return *(const unsigned char*)arg1. */
2084 if (p2 && *p2 == '\0' && nonzero_length)
2086 gimple_seq stmts = NULL;
2087 tree var = gimple_load_first_char (loc, str1, &stmts);
2088 if (lhs)
2090 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2091 gimple_seq_add_stmt_without_update (&stmts, stmt);
2094 gsi_replace_with_seq_vops (gsi, stmts);
2095 return true;
2098 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2099 if (p1 && *p1 == '\0' && nonzero_length)
2101 gimple_seq stmts = NULL;
2102 tree var = gimple_load_first_char (loc, str2, &stmts);
2104 if (lhs)
2106 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2107 stmt = gimple_build_assign (c, NOP_EXPR, var);
2108 gimple_seq_add_stmt_without_update (&stmts, stmt);
2110 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2111 gimple_seq_add_stmt_without_update (&stmts, stmt);
2114 gsi_replace_with_seq_vops (gsi, stmts);
2115 return true;
2118 /* If len parameter is one, return an expression corresponding to
2119 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2120 if (fcode == BUILT_IN_STRNCMP && length == 1)
2122 gimple_seq stmts = NULL;
2123 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2124 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2126 if (lhs)
2128 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2129 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2130 gimple_seq_add_stmt_without_update (&stmts, convert1);
2132 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2133 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2134 gimple_seq_add_stmt_without_update (&stmts, convert2);
2136 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2137 gimple_seq_add_stmt_without_update (&stmts, stmt);
2140 gsi_replace_with_seq_vops (gsi, stmts);
2141 return true;
2144 return false;
2147 /* Fold a call to the memchr pointed by GSI iterator. */
2149 static bool
2150 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2152 gimple *stmt = gsi_stmt (*gsi);
2153 tree lhs = gimple_call_lhs (stmt);
2154 tree arg1 = gimple_call_arg (stmt, 0);
2155 tree arg2 = gimple_call_arg (stmt, 1);
2156 tree len = gimple_call_arg (stmt, 2);
2158 /* If the LEN parameter is zero, return zero. */
2159 if (integer_zerop (len))
2161 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2162 return true;
2165 char c;
2166 if (TREE_CODE (arg2) != INTEGER_CST
2167 || !tree_fits_uhwi_p (len)
2168 || !target_char_cst_p (arg2, &c))
2169 return false;
2171 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2172 unsigned HOST_WIDE_INT string_length;
2173 const char *p1 = c_getstr (arg1, &string_length);
2175 if (p1)
2177 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2178 if (r == NULL)
2180 if (length <= string_length)
2182 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2183 return true;
2186 else
2188 unsigned HOST_WIDE_INT offset = r - p1;
2189 gimple_seq stmts = NULL;
2190 if (lhs != NULL_TREE)
2192 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2193 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2194 arg1, offset_cst);
2195 gimple_seq_add_stmt_without_update (&stmts, stmt);
2197 else
2198 gimple_seq_add_stmt_without_update (&stmts,
2199 gimple_build_nop ());
2201 gsi_replace_with_seq_vops (gsi, stmts);
2202 return true;
2206 return false;
2209 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2210 to the call. IGNORE is true if the value returned
2211 by the builtin will be ignored. UNLOCKED is true is true if this
2212 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2213 the known length of the string. Return NULL_TREE if no simplification
2214 was possible. */
2216 static bool
2217 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2218 tree arg0, tree arg1,
2219 bool unlocked)
2221 gimple *stmt = gsi_stmt (*gsi);
2223 /* If we're using an unlocked function, assume the other unlocked
2224 functions exist explicitly. */
2225 tree const fn_fputc = (unlocked
2226 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2227 : builtin_decl_implicit (BUILT_IN_FPUTC));
2228 tree const fn_fwrite = (unlocked
2229 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2230 : builtin_decl_implicit (BUILT_IN_FWRITE));
2232 /* If the return value is used, don't do the transformation. */
2233 if (gimple_call_lhs (stmt))
2234 return false;
2236 /* Get the length of the string passed to fputs. If the length
2237 can't be determined, punt. */
2238 tree len = get_maxval_strlen (arg0, 0);
2239 if (!len
2240 || TREE_CODE (len) != INTEGER_CST)
2241 return false;
2243 switch (compare_tree_int (len, 1))
2245 case -1: /* length is 0, delete the call entirely . */
2246 replace_call_with_value (gsi, integer_zero_node);
2247 return true;
2249 case 0: /* length is 1, call fputc. */
2251 const char *p = c_getstr (arg0);
2252 if (p != NULL)
2254 if (!fn_fputc)
2255 return false;
2257 gimple *repl = gimple_build_call (fn_fputc, 2,
2258 build_int_cst
2259 (integer_type_node, p[0]), arg1);
2260 replace_call_with_call_and_fold (gsi, repl);
2261 return true;
2264 /* FALLTHROUGH */
2265 case 1: /* length is greater than 1, call fwrite. */
2267 /* If optimizing for size keep fputs. */
2268 if (optimize_function_for_size_p (cfun))
2269 return false;
2270 /* New argument list transforming fputs(string, stream) to
2271 fwrite(string, 1, len, stream). */
2272 if (!fn_fwrite)
2273 return false;
2275 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2276 size_one_node, len, arg1);
2277 replace_call_with_call_and_fold (gsi, repl);
2278 return true;
2280 default:
2281 gcc_unreachable ();
2283 return false;
2286 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2287 DEST, SRC, LEN, and SIZE are the arguments to the call.
2288 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2289 code of the builtin. If MAXLEN is not NULL, it is maximum length
2290 passed as third argument. */
2292 static bool
2293 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2294 tree dest, tree src, tree len, tree size,
2295 enum built_in_function fcode)
2297 gimple *stmt = gsi_stmt (*gsi);
2298 location_t loc = gimple_location (stmt);
2299 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2300 tree fn;
2302 /* If SRC and DEST are the same (and not volatile), return DEST
2303 (resp. DEST+LEN for __mempcpy_chk). */
2304 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2306 if (fcode != BUILT_IN_MEMPCPY_CHK)
2308 replace_call_with_value (gsi, dest);
2309 return true;
2311 else
2313 gimple_seq stmts = NULL;
2314 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2315 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2316 TREE_TYPE (dest), dest, len);
2317 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2318 replace_call_with_value (gsi, temp);
2319 return true;
2323 if (! tree_fits_uhwi_p (size))
2324 return false;
2326 tree maxlen = get_maxval_strlen (len, 2);
2327 if (! integer_all_onesp (size))
2329 if (! tree_fits_uhwi_p (len))
2331 /* If LEN is not constant, try MAXLEN too.
2332 For MAXLEN only allow optimizing into non-_ocs function
2333 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2334 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2336 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2338 /* (void) __mempcpy_chk () can be optimized into
2339 (void) __memcpy_chk (). */
2340 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2341 if (!fn)
2342 return false;
2344 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2345 replace_call_with_call_and_fold (gsi, repl);
2346 return true;
2348 return false;
2351 else
2352 maxlen = len;
2354 if (tree_int_cst_lt (size, maxlen))
2355 return false;
2358 fn = NULL_TREE;
2359 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2360 mem{cpy,pcpy,move,set} is available. */
2361 switch (fcode)
2363 case BUILT_IN_MEMCPY_CHK:
2364 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2365 break;
2366 case BUILT_IN_MEMPCPY_CHK:
2367 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2368 break;
2369 case BUILT_IN_MEMMOVE_CHK:
2370 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2371 break;
2372 case BUILT_IN_MEMSET_CHK:
2373 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2374 break;
2375 default:
2376 break;
2379 if (!fn)
2380 return false;
2382 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2383 replace_call_with_call_and_fold (gsi, repl);
2384 return true;
2387 /* Fold a call to the __st[rp]cpy_chk builtin.
2388 DEST, SRC, and SIZE are the arguments to the call.
2389 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2390 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2391 strings passed as second argument. */
2393 static bool
2394 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2395 tree dest,
2396 tree src, tree size,
2397 enum built_in_function fcode)
2399 gimple *stmt = gsi_stmt (*gsi);
2400 location_t loc = gimple_location (stmt);
2401 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2402 tree len, fn;
2404 /* If SRC and DEST are the same (and not volatile), return DEST. */
2405 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2407 replace_call_with_value (gsi, dest);
2408 return true;
2411 if (! tree_fits_uhwi_p (size))
2412 return false;
2414 tree maxlen = get_maxval_strlen (src, 1);
2415 if (! integer_all_onesp (size))
2417 len = c_strlen (src, 1);
2418 if (! len || ! tree_fits_uhwi_p (len))
2420 /* If LEN is not constant, try MAXLEN too.
2421 For MAXLEN only allow optimizing into non-_ocs function
2422 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2423 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2425 if (fcode == BUILT_IN_STPCPY_CHK)
2427 if (! ignore)
2428 return false;
2430 /* If return value of __stpcpy_chk is ignored,
2431 optimize into __strcpy_chk. */
2432 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2433 if (!fn)
2434 return false;
2436 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2437 replace_call_with_call_and_fold (gsi, repl);
2438 return true;
2441 if (! len || TREE_SIDE_EFFECTS (len))
2442 return false;
2444 /* If c_strlen returned something, but not a constant,
2445 transform __strcpy_chk into __memcpy_chk. */
2446 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2447 if (!fn)
2448 return false;
2450 gimple_seq stmts = NULL;
2451 len = gimple_convert (&stmts, loc, size_type_node, len);
2452 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2453 build_int_cst (size_type_node, 1));
2454 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2455 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2456 replace_call_with_call_and_fold (gsi, repl);
2457 return true;
2460 else
2461 maxlen = len;
2463 if (! tree_int_cst_lt (maxlen, size))
2464 return false;
2467 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2468 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2469 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2470 if (!fn)
2471 return false;
2473 gimple *repl = gimple_build_call (fn, 2, dest, src);
2474 replace_call_with_call_and_fold (gsi, repl);
2475 return true;
2478 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2479 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2480 length passed as third argument. IGNORE is true if return value can be
2481 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2483 static bool
2484 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2485 tree dest, tree src,
2486 tree len, tree size,
2487 enum built_in_function fcode)
2489 gimple *stmt = gsi_stmt (*gsi);
2490 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2491 tree fn;
2493 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2495 /* If return value of __stpncpy_chk is ignored,
2496 optimize into __strncpy_chk. */
2497 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2498 if (fn)
2500 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2501 replace_call_with_call_and_fold (gsi, repl);
2502 return true;
2506 if (! tree_fits_uhwi_p (size))
2507 return false;
2509 tree maxlen = get_maxval_strlen (len, 2);
2510 if (! integer_all_onesp (size))
2512 if (! tree_fits_uhwi_p (len))
2514 /* If LEN is not constant, try MAXLEN too.
2515 For MAXLEN only allow optimizing into non-_ocs function
2516 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2517 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2518 return false;
2520 else
2521 maxlen = len;
2523 if (tree_int_cst_lt (size, maxlen))
2524 return false;
2527 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2528 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2529 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2530 if (!fn)
2531 return false;
2533 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2534 replace_call_with_call_and_fold (gsi, repl);
2535 return true;
2538 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2539 Return NULL_TREE if no simplification can be made. */
2541 static bool
2542 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2544 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2545 location_t loc = gimple_location (stmt);
2546 tree dest = gimple_call_arg (stmt, 0);
2547 tree src = gimple_call_arg (stmt, 1);
2548 tree fn, len, lenp1;
2550 /* If the result is unused, replace stpcpy with strcpy. */
2551 if (gimple_call_lhs (stmt) == NULL_TREE)
2553 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2554 if (!fn)
2555 return false;
2556 gimple_call_set_fndecl (stmt, fn);
2557 fold_stmt (gsi);
2558 return true;
2561 len = c_strlen (src, 1);
2562 if (!len
2563 || TREE_CODE (len) != INTEGER_CST)
2564 return false;
2566 if (optimize_function_for_size_p (cfun)
2567 /* If length is zero it's small enough. */
2568 && !integer_zerop (len))
2569 return false;
2571 /* If the source has a known length replace stpcpy with memcpy. */
2572 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2573 if (!fn)
2574 return false;
2576 gimple_seq stmts = NULL;
2577 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2578 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2579 tem, build_int_cst (size_type_node, 1));
2580 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2581 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2582 gimple_set_vuse (repl, gimple_vuse (stmt));
2583 gimple_set_vdef (repl, gimple_vdef (stmt));
2584 if (gimple_vdef (repl)
2585 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2586 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2587 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2588 /* Replace the result with dest + len. */
2589 stmts = NULL;
2590 tem = gimple_convert (&stmts, loc, sizetype, len);
2591 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2592 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2593 POINTER_PLUS_EXPR, dest, tem);
2594 gsi_replace (gsi, ret, false);
2595 /* Finally fold the memcpy call. */
2596 gimple_stmt_iterator gsi2 = *gsi;
2597 gsi_prev (&gsi2);
2598 fold_stmt (&gsi2);
2599 return true;
2602 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2603 NULL_TREE if a normal call should be emitted rather than expanding
2604 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2605 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2606 passed as second argument. */
2608 static bool
2609 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2610 enum built_in_function fcode)
2612 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2613 tree dest, size, len, fn, fmt, flag;
2614 const char *fmt_str;
2616 /* Verify the required arguments in the original call. */
2617 if (gimple_call_num_args (stmt) < 5)
2618 return false;
2620 dest = gimple_call_arg (stmt, 0);
2621 len = gimple_call_arg (stmt, 1);
2622 flag = gimple_call_arg (stmt, 2);
2623 size = gimple_call_arg (stmt, 3);
2624 fmt = gimple_call_arg (stmt, 4);
2626 if (! tree_fits_uhwi_p (size))
2627 return false;
2629 if (! integer_all_onesp (size))
2631 tree maxlen = get_maxval_strlen (len, 2);
2632 if (! tree_fits_uhwi_p (len))
2634 /* If LEN is not constant, try MAXLEN too.
2635 For MAXLEN only allow optimizing into non-_ocs function
2636 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2637 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2638 return false;
2640 else
2641 maxlen = len;
2643 if (tree_int_cst_lt (size, maxlen))
2644 return false;
2647 if (!init_target_chars ())
2648 return false;
2650 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2651 or if format doesn't contain % chars or is "%s". */
2652 if (! integer_zerop (flag))
2654 fmt_str = c_getstr (fmt);
2655 if (fmt_str == NULL)
2656 return false;
2657 if (strchr (fmt_str, target_percent) != NULL
2658 && strcmp (fmt_str, target_percent_s))
2659 return false;
2662 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2663 available. */
2664 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2665 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2666 if (!fn)
2667 return false;
2669 /* Replace the called function and the first 5 argument by 3 retaining
2670 trailing varargs. */
2671 gimple_call_set_fndecl (stmt, fn);
2672 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2673 gimple_call_set_arg (stmt, 0, dest);
2674 gimple_call_set_arg (stmt, 1, len);
2675 gimple_call_set_arg (stmt, 2, fmt);
2676 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2677 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2678 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2679 fold_stmt (gsi);
2680 return true;
2683 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2684 Return NULL_TREE if a normal call should be emitted rather than
2685 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2686 or BUILT_IN_VSPRINTF_CHK. */
2688 static bool
2689 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2690 enum built_in_function fcode)
2692 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2693 tree dest, size, len, fn, fmt, flag;
2694 const char *fmt_str;
2695 unsigned nargs = gimple_call_num_args (stmt);
2697 /* Verify the required arguments in the original call. */
2698 if (nargs < 4)
2699 return false;
2700 dest = gimple_call_arg (stmt, 0);
2701 flag = gimple_call_arg (stmt, 1);
2702 size = gimple_call_arg (stmt, 2);
2703 fmt = gimple_call_arg (stmt, 3);
2705 if (! tree_fits_uhwi_p (size))
2706 return false;
2708 len = NULL_TREE;
2710 if (!init_target_chars ())
2711 return false;
2713 /* Check whether the format is a literal string constant. */
2714 fmt_str = c_getstr (fmt);
2715 if (fmt_str != NULL)
2717 /* If the format doesn't contain % args or %%, we know the size. */
2718 if (strchr (fmt_str, target_percent) == 0)
2720 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2721 len = build_int_cstu (size_type_node, strlen (fmt_str));
2723 /* If the format is "%s" and first ... argument is a string literal,
2724 we know the size too. */
2725 else if (fcode == BUILT_IN_SPRINTF_CHK
2726 && strcmp (fmt_str, target_percent_s) == 0)
2728 tree arg;
2730 if (nargs == 5)
2732 arg = gimple_call_arg (stmt, 4);
2733 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2735 len = c_strlen (arg, 1);
2736 if (! len || ! tree_fits_uhwi_p (len))
2737 len = NULL_TREE;
2743 if (! integer_all_onesp (size))
2745 if (! len || ! tree_int_cst_lt (len, size))
2746 return false;
2749 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2750 or if format doesn't contain % chars or is "%s". */
2751 if (! integer_zerop (flag))
2753 if (fmt_str == NULL)
2754 return false;
2755 if (strchr (fmt_str, target_percent) != NULL
2756 && strcmp (fmt_str, target_percent_s))
2757 return false;
2760 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2761 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2762 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2763 if (!fn)
2764 return false;
2766 /* Replace the called function and the first 4 argument by 2 retaining
2767 trailing varargs. */
2768 gimple_call_set_fndecl (stmt, fn);
2769 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2770 gimple_call_set_arg (stmt, 0, dest);
2771 gimple_call_set_arg (stmt, 1, fmt);
2772 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2773 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2774 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2775 fold_stmt (gsi);
2776 return true;
2779 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2780 ORIG may be null if this is a 2-argument call. We don't attempt to
2781 simplify calls with more than 3 arguments.
2783 Return true if simplification was possible, otherwise false. */
2785 bool
2786 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2788 gimple *stmt = gsi_stmt (*gsi);
2789 tree dest = gimple_call_arg (stmt, 0);
2790 tree fmt = gimple_call_arg (stmt, 1);
2791 tree orig = NULL_TREE;
2792 const char *fmt_str = NULL;
2794 /* Verify the required arguments in the original call. We deal with two
2795 types of sprintf() calls: 'sprintf (str, fmt)' and
2796 'sprintf (dest, "%s", orig)'. */
2797 if (gimple_call_num_args (stmt) > 3)
2798 return false;
2800 if (gimple_call_num_args (stmt) == 3)
2801 orig = gimple_call_arg (stmt, 2);
2803 /* Check whether the format is a literal string constant. */
2804 fmt_str = c_getstr (fmt);
2805 if (fmt_str == NULL)
2806 return false;
2808 if (!init_target_chars ())
2809 return false;
2811 /* If the format doesn't contain % args or %%, use strcpy. */
2812 if (strchr (fmt_str, target_percent) == NULL)
2814 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2816 if (!fn)
2817 return false;
2819 /* Don't optimize sprintf (buf, "abc", ptr++). */
2820 if (orig)
2821 return false;
2823 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2824 'format' is known to contain no % formats. */
2825 gimple_seq stmts = NULL;
2826 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2827 gimple_seq_add_stmt_without_update (&stmts, repl);
2828 if (gimple_call_lhs (stmt))
2830 repl = gimple_build_assign (gimple_call_lhs (stmt),
2831 build_int_cst (integer_type_node,
2832 strlen (fmt_str)));
2833 gimple_seq_add_stmt_without_update (&stmts, repl);
2834 gsi_replace_with_seq_vops (gsi, stmts);
2835 /* gsi now points at the assignment to the lhs, get a
2836 stmt iterator to the memcpy call.
2837 ??? We can't use gsi_for_stmt as that doesn't work when the
2838 CFG isn't built yet. */
2839 gimple_stmt_iterator gsi2 = *gsi;
2840 gsi_prev (&gsi2);
2841 fold_stmt (&gsi2);
2843 else
2845 gsi_replace_with_seq_vops (gsi, stmts);
2846 fold_stmt (gsi);
2848 return true;
2851 /* If the format is "%s", use strcpy if the result isn't used. */
2852 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2854 tree fn;
2855 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2857 if (!fn)
2858 return false;
2860 /* Don't crash on sprintf (str1, "%s"). */
2861 if (!orig)
2862 return false;
2864 tree orig_len = NULL_TREE;
2865 if (gimple_call_lhs (stmt))
2867 orig_len = get_maxval_strlen (orig, 0);
2868 if (!orig_len)
2869 return false;
2872 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2873 gimple_seq stmts = NULL;
2874 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2875 gimple_seq_add_stmt_without_update (&stmts, repl);
2876 if (gimple_call_lhs (stmt))
2878 if (!useless_type_conversion_p (integer_type_node,
2879 TREE_TYPE (orig_len)))
2880 orig_len = fold_convert (integer_type_node, orig_len);
2881 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2882 gimple_seq_add_stmt_without_update (&stmts, repl);
2883 gsi_replace_with_seq_vops (gsi, stmts);
2884 /* gsi now points at the assignment to the lhs, get a
2885 stmt iterator to the memcpy call.
2886 ??? We can't use gsi_for_stmt as that doesn't work when the
2887 CFG isn't built yet. */
2888 gimple_stmt_iterator gsi2 = *gsi;
2889 gsi_prev (&gsi2);
2890 fold_stmt (&gsi2);
2892 else
2894 gsi_replace_with_seq_vops (gsi, stmts);
2895 fold_stmt (gsi);
2897 return true;
2899 return false;
2902 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2903 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2904 attempt to simplify calls with more than 4 arguments.
2906 Return true if simplification was possible, otherwise false. */
2908 bool
2909 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2911 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2912 tree dest = gimple_call_arg (stmt, 0);
2913 tree destsize = gimple_call_arg (stmt, 1);
2914 tree fmt = gimple_call_arg (stmt, 2);
2915 tree orig = NULL_TREE;
2916 const char *fmt_str = NULL;
2918 if (gimple_call_num_args (stmt) > 4)
2919 return false;
2921 if (gimple_call_num_args (stmt) == 4)
2922 orig = gimple_call_arg (stmt, 3);
2924 if (!tree_fits_uhwi_p (destsize))
2925 return false;
2926 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2928 /* Check whether the format is a literal string constant. */
2929 fmt_str = c_getstr (fmt);
2930 if (fmt_str == NULL)
2931 return false;
2933 if (!init_target_chars ())
2934 return false;
2936 /* If the format doesn't contain % args or %%, use strcpy. */
2937 if (strchr (fmt_str, target_percent) == NULL)
2939 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2940 if (!fn)
2941 return false;
2943 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2944 if (orig)
2945 return false;
2947 /* We could expand this as
2948 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2949 or to
2950 memcpy (str, fmt_with_nul_at_cstm1, cst);
2951 but in the former case that might increase code size
2952 and in the latter case grow .rodata section too much.
2953 So punt for now. */
2954 size_t len = strlen (fmt_str);
2955 if (len >= destlen)
2956 return false;
2958 gimple_seq stmts = NULL;
2959 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2960 gimple_seq_add_stmt_without_update (&stmts, repl);
2961 if (gimple_call_lhs (stmt))
2963 repl = gimple_build_assign (gimple_call_lhs (stmt),
2964 build_int_cst (integer_type_node, len));
2965 gimple_seq_add_stmt_without_update (&stmts, repl);
2966 gsi_replace_with_seq_vops (gsi, stmts);
2967 /* gsi now points at the assignment to the lhs, get a
2968 stmt iterator to the memcpy call.
2969 ??? We can't use gsi_for_stmt as that doesn't work when the
2970 CFG isn't built yet. */
2971 gimple_stmt_iterator gsi2 = *gsi;
2972 gsi_prev (&gsi2);
2973 fold_stmt (&gsi2);
2975 else
2977 gsi_replace_with_seq_vops (gsi, stmts);
2978 fold_stmt (gsi);
2980 return true;
2983 /* If the format is "%s", use strcpy if the result isn't used. */
2984 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2986 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2987 if (!fn)
2988 return false;
2990 /* Don't crash on snprintf (str1, cst, "%s"). */
2991 if (!orig)
2992 return false;
2994 tree orig_len = get_maxval_strlen (orig, 0);
2995 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2996 return false;
2998 /* We could expand this as
2999 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3000 or to
3001 memcpy (str1, str2_with_nul_at_cstm1, cst);
3002 but in the former case that might increase code size
3003 and in the latter case grow .rodata section too much.
3004 So punt for now. */
3005 if (compare_tree_int (orig_len, destlen) >= 0)
3006 return false;
3008 /* Convert snprintf (str1, cst, "%s", str2) into
3009 strcpy (str1, str2) if strlen (str2) < cst. */
3010 gimple_seq stmts = NULL;
3011 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3012 gimple_seq_add_stmt_without_update (&stmts, repl);
3013 if (gimple_call_lhs (stmt))
3015 if (!useless_type_conversion_p (integer_type_node,
3016 TREE_TYPE (orig_len)))
3017 orig_len = fold_convert (integer_type_node, orig_len);
3018 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3019 gimple_seq_add_stmt_without_update (&stmts, repl);
3020 gsi_replace_with_seq_vops (gsi, stmts);
3021 /* gsi now points at the assignment to the lhs, get a
3022 stmt iterator to the memcpy call.
3023 ??? We can't use gsi_for_stmt as that doesn't work when the
3024 CFG isn't built yet. */
3025 gimple_stmt_iterator gsi2 = *gsi;
3026 gsi_prev (&gsi2);
3027 fold_stmt (&gsi2);
3029 else
3031 gsi_replace_with_seq_vops (gsi, stmts);
3032 fold_stmt (gsi);
3034 return true;
3036 return false;
3039 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3040 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3041 more than 3 arguments, and ARG may be null in the 2-argument case.
3043 Return NULL_TREE if no simplification was possible, otherwise return the
3044 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3045 code of the function to be simplified. */
3047 static bool
3048 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3049 tree fp, tree fmt, tree arg,
3050 enum built_in_function fcode)
3052 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3053 tree fn_fputc, fn_fputs;
3054 const char *fmt_str = NULL;
3056 /* If the return value is used, don't do the transformation. */
3057 if (gimple_call_lhs (stmt) != NULL_TREE)
3058 return false;
3060 /* Check whether the format is a literal string constant. */
3061 fmt_str = c_getstr (fmt);
3062 if (fmt_str == NULL)
3063 return false;
3065 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3067 /* If we're using an unlocked function, assume the other
3068 unlocked functions exist explicitly. */
3069 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3070 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3072 else
3074 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3075 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3078 if (!init_target_chars ())
3079 return false;
3081 /* If the format doesn't contain % args or %%, use strcpy. */
3082 if (strchr (fmt_str, target_percent) == NULL)
3084 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3085 && arg)
3086 return false;
3088 /* If the format specifier was "", fprintf does nothing. */
3089 if (fmt_str[0] == '\0')
3091 replace_call_with_value (gsi, NULL_TREE);
3092 return true;
3095 /* When "string" doesn't contain %, replace all cases of
3096 fprintf (fp, string) with fputs (string, fp). The fputs
3097 builtin will take care of special cases like length == 1. */
3098 if (fn_fputs)
3100 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3101 replace_call_with_call_and_fold (gsi, repl);
3102 return true;
3106 /* The other optimizations can be done only on the non-va_list variants. */
3107 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3108 return false;
3110 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3111 else if (strcmp (fmt_str, target_percent_s) == 0)
3113 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3114 return false;
3115 if (fn_fputs)
3117 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3118 replace_call_with_call_and_fold (gsi, repl);
3119 return true;
3123 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3124 else if (strcmp (fmt_str, target_percent_c) == 0)
3126 if (!arg
3127 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3128 return false;
3129 if (fn_fputc)
3131 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3132 replace_call_with_call_and_fold (gsi, repl);
3133 return true;
3137 return false;
3140 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3141 FMT and ARG are the arguments to the call; we don't fold cases with
3142 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3144 Return NULL_TREE if no simplification was possible, otherwise return the
3145 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3146 code of the function to be simplified. */
3148 static bool
3149 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3150 tree arg, enum built_in_function fcode)
3152 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3153 tree fn_putchar, fn_puts, newarg;
3154 const char *fmt_str = NULL;
3156 /* If the return value is used, don't do the transformation. */
3157 if (gimple_call_lhs (stmt) != NULL_TREE)
3158 return false;
3160 /* Check whether the format is a literal string constant. */
3161 fmt_str = c_getstr (fmt);
3162 if (fmt_str == NULL)
3163 return false;
3165 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3167 /* If we're using an unlocked function, assume the other
3168 unlocked functions exist explicitly. */
3169 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3170 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3172 else
3174 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3175 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3178 if (!init_target_chars ())
3179 return false;
3181 if (strcmp (fmt_str, target_percent_s) == 0
3182 || strchr (fmt_str, target_percent) == NULL)
3184 const char *str;
3186 if (strcmp (fmt_str, target_percent_s) == 0)
3188 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3189 return false;
3191 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3192 return false;
3194 str = c_getstr (arg);
3195 if (str == NULL)
3196 return false;
3198 else
3200 /* The format specifier doesn't contain any '%' characters. */
3201 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3202 && arg)
3203 return false;
3204 str = fmt_str;
3207 /* If the string was "", printf does nothing. */
3208 if (str[0] == '\0')
3210 replace_call_with_value (gsi, NULL_TREE);
3211 return true;
3214 /* If the string has length of 1, call putchar. */
3215 if (str[1] == '\0')
3217 /* Given printf("c"), (where c is any one character,)
3218 convert "c"[0] to an int and pass that to the replacement
3219 function. */
3220 newarg = build_int_cst (integer_type_node, str[0]);
3221 if (fn_putchar)
3223 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3224 replace_call_with_call_and_fold (gsi, repl);
3225 return true;
3228 else
3230 /* If the string was "string\n", call puts("string"). */
3231 size_t len = strlen (str);
3232 if ((unsigned char)str[len - 1] == target_newline
3233 && (size_t) (int) len == len
3234 && (int) len > 0)
3236 char *newstr;
3237 tree offset_node, string_cst;
3239 /* Create a NUL-terminated string that's one char shorter
3240 than the original, stripping off the trailing '\n'. */
3241 newarg = build_string_literal (len, str);
3242 string_cst = string_constant (newarg, &offset_node);
3243 gcc_checking_assert (string_cst
3244 && (TREE_STRING_LENGTH (string_cst)
3245 == (int) len)
3246 && integer_zerop (offset_node)
3247 && (unsigned char)
3248 TREE_STRING_POINTER (string_cst)[len - 1]
3249 == target_newline);
3250 /* build_string_literal creates a new STRING_CST,
3251 modify it in place to avoid double copying. */
3252 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3253 newstr[len - 1] = '\0';
3254 if (fn_puts)
3256 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3257 replace_call_with_call_and_fold (gsi, repl);
3258 return true;
3261 else
3262 /* We'd like to arrange to call fputs(string,stdout) here,
3263 but we need stdout and don't have a way to get it yet. */
3264 return false;
3268 /* The other optimizations can be done only on the non-va_list variants. */
3269 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3270 return false;
3272 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3273 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3275 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3276 return false;
3277 if (fn_puts)
3279 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3280 replace_call_with_call_and_fold (gsi, repl);
3281 return true;
3285 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3286 else if (strcmp (fmt_str, target_percent_c) == 0)
3288 if (!arg || ! useless_type_conversion_p (integer_type_node,
3289 TREE_TYPE (arg)))
3290 return false;
3291 if (fn_putchar)
3293 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3294 replace_call_with_call_and_fold (gsi, repl);
3295 return true;
3299 return false;
3304 /* Fold a call to __builtin_strlen with known length LEN. */
3306 static bool
3307 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3309 gimple *stmt = gsi_stmt (*gsi);
3310 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
3311 if (!len)
3312 return false;
3313 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
3314 replace_call_with_value (gsi, len);
3315 return true;
3318 /* Fold a call to __builtin_acc_on_device. */
3320 static bool
3321 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3323 /* Defer folding until we know which compiler we're in. */
3324 if (symtab->state != EXPANSION)
3325 return false;
3327 unsigned val_host = GOMP_DEVICE_HOST;
3328 unsigned val_dev = GOMP_DEVICE_NONE;
3330 #ifdef ACCEL_COMPILER
3331 val_host = GOMP_DEVICE_NOT_HOST;
3332 val_dev = ACCEL_COMPILER_acc_device;
3333 #endif
3335 location_t loc = gimple_location (gsi_stmt (*gsi));
3337 tree host_eq = make_ssa_name (boolean_type_node);
3338 gimple *host_ass = gimple_build_assign
3339 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3340 gimple_set_location (host_ass, loc);
3341 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3343 tree dev_eq = make_ssa_name (boolean_type_node);
3344 gimple *dev_ass = gimple_build_assign
3345 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3346 gimple_set_location (dev_ass, loc);
3347 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3349 tree result = make_ssa_name (boolean_type_node);
3350 gimple *result_ass = gimple_build_assign
3351 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3352 gimple_set_location (result_ass, loc);
3353 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3355 replace_call_with_value (gsi, result);
3357 return true;
3360 /* Fold realloc (0, n) -> malloc (n). */
3362 static bool
3363 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3365 gimple *stmt = gsi_stmt (*gsi);
3366 tree arg = gimple_call_arg (stmt, 0);
3367 tree size = gimple_call_arg (stmt, 1);
3369 if (operand_equal_p (arg, null_pointer_node, 0))
3371 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3372 if (fn_malloc)
3374 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3375 replace_call_with_call_and_fold (gsi, repl);
3376 return true;
3379 return false;
3382 /* Fold the non-target builtin at *GSI and return whether any simplification
3383 was made. */
3385 static bool
3386 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3388 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3389 tree callee = gimple_call_fndecl (stmt);
3391 /* Give up for always_inline inline builtins until they are
3392 inlined. */
3393 if (avoid_folding_inline_builtin (callee))
3394 return false;
3396 unsigned n = gimple_call_num_args (stmt);
3397 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3398 switch (fcode)
3400 case BUILT_IN_BCMP:
3401 return gimple_fold_builtin_bcmp (gsi);
3402 case BUILT_IN_BCOPY:
3403 return gimple_fold_builtin_bcopy (gsi);
3404 case BUILT_IN_BZERO:
3405 return gimple_fold_builtin_bzero (gsi);
3407 case BUILT_IN_MEMSET:
3408 return gimple_fold_builtin_memset (gsi,
3409 gimple_call_arg (stmt, 1),
3410 gimple_call_arg (stmt, 2));
3411 case BUILT_IN_MEMCPY:
3412 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3413 gimple_call_arg (stmt, 1), 0);
3414 case BUILT_IN_MEMPCPY:
3415 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3416 gimple_call_arg (stmt, 1), 1);
3417 case BUILT_IN_MEMMOVE:
3418 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3419 gimple_call_arg (stmt, 1), 3);
3420 case BUILT_IN_SPRINTF_CHK:
3421 case BUILT_IN_VSPRINTF_CHK:
3422 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3423 case BUILT_IN_STRCAT_CHK:
3424 return gimple_fold_builtin_strcat_chk (gsi);
3425 case BUILT_IN_STRNCAT_CHK:
3426 return gimple_fold_builtin_strncat_chk (gsi);
3427 case BUILT_IN_STRLEN:
3428 return gimple_fold_builtin_strlen (gsi);
3429 case BUILT_IN_STRCPY:
3430 return gimple_fold_builtin_strcpy (gsi,
3431 gimple_call_arg (stmt, 0),
3432 gimple_call_arg (stmt, 1));
3433 case BUILT_IN_STRNCPY:
3434 return gimple_fold_builtin_strncpy (gsi,
3435 gimple_call_arg (stmt, 0),
3436 gimple_call_arg (stmt, 1),
3437 gimple_call_arg (stmt, 2));
3438 case BUILT_IN_STRCAT:
3439 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3440 gimple_call_arg (stmt, 1));
3441 case BUILT_IN_STRNCAT:
3442 return gimple_fold_builtin_strncat (gsi);
3443 case BUILT_IN_INDEX:
3444 case BUILT_IN_STRCHR:
3445 return gimple_fold_builtin_strchr (gsi, false);
3446 case BUILT_IN_RINDEX:
3447 case BUILT_IN_STRRCHR:
3448 return gimple_fold_builtin_strchr (gsi, true);
3449 case BUILT_IN_STRSTR:
3450 return gimple_fold_builtin_strstr (gsi);
3451 case BUILT_IN_STRCMP:
3452 case BUILT_IN_STRCASECMP:
3453 case BUILT_IN_STRNCMP:
3454 case BUILT_IN_STRNCASECMP:
3455 return gimple_fold_builtin_string_compare (gsi);
3456 case BUILT_IN_MEMCHR:
3457 return gimple_fold_builtin_memchr (gsi);
3458 case BUILT_IN_FPUTS:
3459 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3460 gimple_call_arg (stmt, 1), false);
3461 case BUILT_IN_FPUTS_UNLOCKED:
3462 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3463 gimple_call_arg (stmt, 1), true);
3464 case BUILT_IN_MEMCPY_CHK:
3465 case BUILT_IN_MEMPCPY_CHK:
3466 case BUILT_IN_MEMMOVE_CHK:
3467 case BUILT_IN_MEMSET_CHK:
3468 return gimple_fold_builtin_memory_chk (gsi,
3469 gimple_call_arg (stmt, 0),
3470 gimple_call_arg (stmt, 1),
3471 gimple_call_arg (stmt, 2),
3472 gimple_call_arg (stmt, 3),
3473 fcode);
3474 case BUILT_IN_STPCPY:
3475 return gimple_fold_builtin_stpcpy (gsi);
3476 case BUILT_IN_STRCPY_CHK:
3477 case BUILT_IN_STPCPY_CHK:
3478 return gimple_fold_builtin_stxcpy_chk (gsi,
3479 gimple_call_arg (stmt, 0),
3480 gimple_call_arg (stmt, 1),
3481 gimple_call_arg (stmt, 2),
3482 fcode);
3483 case BUILT_IN_STRNCPY_CHK:
3484 case BUILT_IN_STPNCPY_CHK:
3485 return gimple_fold_builtin_stxncpy_chk (gsi,
3486 gimple_call_arg (stmt, 0),
3487 gimple_call_arg (stmt, 1),
3488 gimple_call_arg (stmt, 2),
3489 gimple_call_arg (stmt, 3),
3490 fcode);
3491 case BUILT_IN_SNPRINTF_CHK:
3492 case BUILT_IN_VSNPRINTF_CHK:
3493 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3495 case BUILT_IN_FPRINTF:
3496 case BUILT_IN_FPRINTF_UNLOCKED:
3497 case BUILT_IN_VFPRINTF:
3498 if (n == 2 || n == 3)
3499 return gimple_fold_builtin_fprintf (gsi,
3500 gimple_call_arg (stmt, 0),
3501 gimple_call_arg (stmt, 1),
3502 n == 3
3503 ? gimple_call_arg (stmt, 2)
3504 : NULL_TREE,
3505 fcode);
3506 break;
3507 case BUILT_IN_FPRINTF_CHK:
3508 case BUILT_IN_VFPRINTF_CHK:
3509 if (n == 3 || n == 4)
3510 return gimple_fold_builtin_fprintf (gsi,
3511 gimple_call_arg (stmt, 0),
3512 gimple_call_arg (stmt, 2),
3513 n == 4
3514 ? gimple_call_arg (stmt, 3)
3515 : NULL_TREE,
3516 fcode);
3517 break;
3518 case BUILT_IN_PRINTF:
3519 case BUILT_IN_PRINTF_UNLOCKED:
3520 case BUILT_IN_VPRINTF:
3521 if (n == 1 || n == 2)
3522 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3523 n == 2
3524 ? gimple_call_arg (stmt, 1)
3525 : NULL_TREE, fcode);
3526 break;
3527 case BUILT_IN_PRINTF_CHK:
3528 case BUILT_IN_VPRINTF_CHK:
3529 if (n == 2 || n == 3)
3530 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3531 n == 3
3532 ? gimple_call_arg (stmt, 2)
3533 : NULL_TREE, fcode);
3534 break;
3535 case BUILT_IN_ACC_ON_DEVICE:
3536 return gimple_fold_builtin_acc_on_device (gsi,
3537 gimple_call_arg (stmt, 0));
3538 case BUILT_IN_REALLOC:
3539 return gimple_fold_builtin_realloc (gsi);
3541 default:;
3544 /* Try the generic builtin folder. */
3545 bool ignore = (gimple_call_lhs (stmt) == NULL);
3546 tree result = fold_call_stmt (stmt, ignore);
3547 if (result)
3549 if (ignore)
3550 STRIP_NOPS (result);
3551 else
3552 result = fold_convert (gimple_call_return_type (stmt), result);
3553 if (!update_call_from_tree (gsi, result))
3554 gimplify_and_update_call_from_tree (gsi, result);
3555 return true;
3558 return false;
3561 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3562 function calls to constants, where possible. */
3564 static tree
3565 fold_internal_goacc_dim (const gimple *call)
3567 int axis = oacc_get_ifn_dim_arg (call);
3568 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3569 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
3570 tree result = NULL_TREE;
3572 /* If the size is 1, or we only want the size and it is not dynamic,
3573 we know the answer. */
3574 if (size == 1 || (!is_pos && size))
3576 tree type = TREE_TYPE (gimple_call_lhs (call));
3577 result = build_int_cst (type, size - is_pos);
3580 return result;
3583 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3584 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3585 &var where var is only addressable because of such calls. */
3587 bool
3588 optimize_atomic_compare_exchange_p (gimple *stmt)
3590 if (gimple_call_num_args (stmt) != 6
3591 || !flag_inline_atomics
3592 || !optimize
3593 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3594 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3595 || !gimple_vdef (stmt)
3596 || !gimple_vuse (stmt))
3597 return false;
3599 tree fndecl = gimple_call_fndecl (stmt);
3600 switch (DECL_FUNCTION_CODE (fndecl))
3602 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3603 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3604 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3605 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3606 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3607 break;
3608 default:
3609 return false;
3612 tree expected = gimple_call_arg (stmt, 1);
3613 if (TREE_CODE (expected) != ADDR_EXPR
3614 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3615 return false;
3617 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3618 if (!is_gimple_reg_type (etype)
3619 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3620 || TREE_THIS_VOLATILE (etype)
3621 || VECTOR_TYPE_P (etype)
3622 || TREE_CODE (etype) == COMPLEX_TYPE
3623 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3624 might not preserve all the bits. See PR71716. */
3625 || SCALAR_FLOAT_TYPE_P (etype)
3626 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype)))
3627 return false;
3629 tree weak = gimple_call_arg (stmt, 3);
3630 if (!integer_zerop (weak) && !integer_onep (weak))
3631 return false;
3633 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3634 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3635 machine_mode mode = TYPE_MODE (itype);
3637 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3638 == CODE_FOR_nothing
3639 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3640 return false;
3642 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode))
3643 return false;
3645 return true;
3648 /* Fold
3649 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3650 into
3651 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3652 i = IMAGPART_EXPR <t>;
3653 r = (_Bool) i;
3654 e = REALPART_EXPR <t>; */
3656 void
3657 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3659 gimple *stmt = gsi_stmt (*gsi);
3660 tree fndecl = gimple_call_fndecl (stmt);
3661 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3662 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3663 tree ctype = build_complex_type (itype);
3664 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3665 bool throws = false;
3666 edge e = NULL;
3667 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3668 expected);
3669 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3670 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3671 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3673 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3674 build1 (VIEW_CONVERT_EXPR, itype,
3675 gimple_assign_lhs (g)));
3676 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3678 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3679 + int_size_in_bytes (itype);
3680 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3681 gimple_call_arg (stmt, 0),
3682 gimple_assign_lhs (g),
3683 gimple_call_arg (stmt, 2),
3684 build_int_cst (integer_type_node, flag),
3685 gimple_call_arg (stmt, 4),
3686 gimple_call_arg (stmt, 5));
3687 tree lhs = make_ssa_name (ctype);
3688 gimple_call_set_lhs (g, lhs);
3689 gimple_set_vdef (g, gimple_vdef (stmt));
3690 gimple_set_vuse (g, gimple_vuse (stmt));
3691 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3692 tree oldlhs = gimple_call_lhs (stmt);
3693 if (stmt_can_throw_internal (stmt))
3695 throws = true;
3696 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3698 gimple_call_set_nothrow (as_a <gcall *> (g),
3699 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3700 gimple_call_set_lhs (stmt, NULL_TREE);
3701 gsi_replace (gsi, g, true);
3702 if (oldlhs)
3704 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3705 build1 (IMAGPART_EXPR, itype, lhs));
3706 if (throws)
3708 gsi_insert_on_edge_immediate (e, g);
3709 *gsi = gsi_for_stmt (g);
3711 else
3712 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3713 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3714 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3716 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3717 build1 (REALPART_EXPR, itype, lhs));
3718 if (throws && oldlhs == NULL_TREE)
3720 gsi_insert_on_edge_immediate (e, g);
3721 *gsi = gsi_for_stmt (g);
3723 else
3724 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3725 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3727 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3728 VIEW_CONVERT_EXPR,
3729 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3730 gimple_assign_lhs (g)));
3731 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3733 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3734 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3735 *gsi = gsiret;
3738 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3739 doesn't fit into TYPE. The test for overflow should be regardless of
3740 -fwrapv, and even for unsigned types. */
3742 bool
3743 arith_overflowed_p (enum tree_code code, const_tree type,
3744 const_tree arg0, const_tree arg1)
3746 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3747 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3748 widest2_int_cst;
3749 widest2_int warg0 = widest2_int_cst (arg0);
3750 widest2_int warg1 = widest2_int_cst (arg1);
3751 widest2_int wres;
3752 switch (code)
3754 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3755 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3756 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3757 default: gcc_unreachable ();
3759 signop sign = TYPE_SIGN (type);
3760 if (sign == UNSIGNED && wi::neg_p (wres))
3761 return true;
3762 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3765 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3766 The statement may be replaced by another statement, e.g., if the call
3767 simplifies to a constant value. Return true if any changes were made.
3768 It is assumed that the operands have been previously folded. */
3770 static bool
3771 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3773 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3774 tree callee;
3775 bool changed = false;
3776 unsigned i;
3778 /* Fold *& in call arguments. */
3779 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3780 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3782 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3783 if (tmp)
3785 gimple_call_set_arg (stmt, i, tmp);
3786 changed = true;
3790 /* Check for virtual calls that became direct calls. */
3791 callee = gimple_call_fn (stmt);
3792 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3794 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3796 if (dump_file && virtual_method_call_p (callee)
3797 && !possible_polymorphic_call_target_p
3798 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3799 (OBJ_TYPE_REF_EXPR (callee)))))
3801 fprintf (dump_file,
3802 "Type inheritance inconsistent devirtualization of ");
3803 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3804 fprintf (dump_file, " to ");
3805 print_generic_expr (dump_file, callee, TDF_SLIM);
3806 fprintf (dump_file, "\n");
3809 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3810 changed = true;
3812 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3814 bool final;
3815 vec <cgraph_node *>targets
3816 = possible_polymorphic_call_targets (callee, stmt, &final);
3817 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3819 tree lhs = gimple_call_lhs (stmt);
3820 if (dump_enabled_p ())
3822 location_t loc = gimple_location_safe (stmt);
3823 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3824 "folding virtual function call to %s\n",
3825 targets.length () == 1
3826 ? targets[0]->name ()
3827 : "__builtin_unreachable");
3829 if (targets.length () == 1)
3831 tree fndecl = targets[0]->decl;
3832 gimple_call_set_fndecl (stmt, fndecl);
3833 changed = true;
3834 /* If changing the call to __cxa_pure_virtual
3835 or similar noreturn function, adjust gimple_call_fntype
3836 too. */
3837 if (gimple_call_noreturn_p (stmt)
3838 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3839 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3840 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3841 == void_type_node))
3842 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
3843 /* If the call becomes noreturn, remove the lhs. */
3844 if (lhs
3845 && gimple_call_noreturn_p (stmt)
3846 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
3847 || should_remove_lhs_p (lhs)))
3849 if (TREE_CODE (lhs) == SSA_NAME)
3851 tree var = create_tmp_var (TREE_TYPE (lhs));
3852 tree def = get_or_create_ssa_default_def (cfun, var);
3853 gimple *new_stmt = gimple_build_assign (lhs, def);
3854 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3856 gimple_call_set_lhs (stmt, NULL_TREE);
3858 maybe_remove_unused_call_args (cfun, stmt);
3860 else
3862 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3863 gimple *new_stmt = gimple_build_call (fndecl, 0);
3864 gimple_set_location (new_stmt, gimple_location (stmt));
3865 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3867 tree var = create_tmp_var (TREE_TYPE (lhs));
3868 tree def = get_or_create_ssa_default_def (cfun, var);
3870 /* To satisfy condition for
3871 cgraph_update_edges_for_call_stmt_node,
3872 we need to preserve GIMPLE_CALL statement
3873 at position of GSI iterator. */
3874 update_call_from_tree (gsi, def);
3875 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3877 else
3879 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3880 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3881 gsi_replace (gsi, new_stmt, false);
3883 return true;
3889 /* Check for indirect calls that became direct calls, and then
3890 no longer require a static chain. */
3891 if (gimple_call_chain (stmt))
3893 tree fn = gimple_call_fndecl (stmt);
3894 if (fn && !DECL_STATIC_CHAIN (fn))
3896 gimple_call_set_chain (stmt, NULL);
3897 changed = true;
3899 else
3901 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3902 if (tmp)
3904 gimple_call_set_chain (stmt, tmp);
3905 changed = true;
3910 if (inplace)
3911 return changed;
3913 /* Check for builtins that CCP can handle using information not
3914 available in the generic fold routines. */
3915 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3917 if (gimple_fold_builtin (gsi))
3918 changed = true;
3920 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3922 changed |= targetm.gimple_fold_builtin (gsi);
3924 else if (gimple_call_internal_p (stmt))
3926 enum tree_code subcode = ERROR_MARK;
3927 tree result = NULL_TREE;
3928 bool cplx_result = false;
3929 tree overflow = NULL_TREE;
3930 switch (gimple_call_internal_fn (stmt))
3932 case IFN_BUILTIN_EXPECT:
3933 result = fold_builtin_expect (gimple_location (stmt),
3934 gimple_call_arg (stmt, 0),
3935 gimple_call_arg (stmt, 1),
3936 gimple_call_arg (stmt, 2));
3937 break;
3938 case IFN_UBSAN_OBJECT_SIZE:
3939 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3940 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3941 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3942 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3943 gimple_call_arg (stmt, 2))))
3945 gsi_replace (gsi, gimple_build_nop (), false);
3946 unlink_stmt_vdef (stmt);
3947 release_defs (stmt);
3948 return true;
3950 break;
3951 case IFN_GOACC_DIM_SIZE:
3952 case IFN_GOACC_DIM_POS:
3953 result = fold_internal_goacc_dim (stmt);
3954 break;
3955 case IFN_UBSAN_CHECK_ADD:
3956 subcode = PLUS_EXPR;
3957 break;
3958 case IFN_UBSAN_CHECK_SUB:
3959 subcode = MINUS_EXPR;
3960 break;
3961 case IFN_UBSAN_CHECK_MUL:
3962 subcode = MULT_EXPR;
3963 break;
3964 case IFN_ADD_OVERFLOW:
3965 subcode = PLUS_EXPR;
3966 cplx_result = true;
3967 break;
3968 case IFN_SUB_OVERFLOW:
3969 subcode = MINUS_EXPR;
3970 cplx_result = true;
3971 break;
3972 case IFN_MUL_OVERFLOW:
3973 subcode = MULT_EXPR;
3974 cplx_result = true;
3975 break;
3976 default:
3977 break;
3979 if (subcode != ERROR_MARK)
3981 tree arg0 = gimple_call_arg (stmt, 0);
3982 tree arg1 = gimple_call_arg (stmt, 1);
3983 tree type = TREE_TYPE (arg0);
3984 if (cplx_result)
3986 tree lhs = gimple_call_lhs (stmt);
3987 if (lhs == NULL_TREE)
3988 type = NULL_TREE;
3989 else
3990 type = TREE_TYPE (TREE_TYPE (lhs));
3992 if (type == NULL_TREE)
3994 /* x = y + 0; x = y - 0; x = y * 0; */
3995 else if (integer_zerop (arg1))
3996 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3997 /* x = 0 + y; x = 0 * y; */
3998 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3999 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4000 /* x = y - y; */
4001 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4002 result = integer_zero_node;
4003 /* x = y * 1; x = 1 * y; */
4004 else if (subcode == MULT_EXPR && integer_onep (arg1))
4005 result = arg0;
4006 else if (subcode == MULT_EXPR && integer_onep (arg0))
4007 result = arg1;
4008 else if (TREE_CODE (arg0) == INTEGER_CST
4009 && TREE_CODE (arg1) == INTEGER_CST)
4011 if (cplx_result)
4012 result = int_const_binop (subcode, fold_convert (type, arg0),
4013 fold_convert (type, arg1));
4014 else
4015 result = int_const_binop (subcode, arg0, arg1);
4016 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4018 if (cplx_result)
4019 overflow = build_one_cst (type);
4020 else
4021 result = NULL_TREE;
4024 if (result)
4026 if (result == integer_zero_node)
4027 result = build_zero_cst (type);
4028 else if (cplx_result && TREE_TYPE (result) != type)
4030 if (TREE_CODE (result) == INTEGER_CST)
4032 if (arith_overflowed_p (PLUS_EXPR, type, result,
4033 integer_zero_node))
4034 overflow = build_one_cst (type);
4036 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4037 && TYPE_UNSIGNED (type))
4038 || (TYPE_PRECISION (type)
4039 < (TYPE_PRECISION (TREE_TYPE (result))
4040 + (TYPE_UNSIGNED (TREE_TYPE (result))
4041 && !TYPE_UNSIGNED (type)))))
4042 result = NULL_TREE;
4043 if (result)
4044 result = fold_convert (type, result);
4049 if (result)
4051 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4052 result = drop_tree_overflow (result);
4053 if (cplx_result)
4055 if (overflow == NULL_TREE)
4056 overflow = build_zero_cst (TREE_TYPE (result));
4057 tree ctype = build_complex_type (TREE_TYPE (result));
4058 if (TREE_CODE (result) == INTEGER_CST
4059 && TREE_CODE (overflow) == INTEGER_CST)
4060 result = build_complex (ctype, result, overflow);
4061 else
4062 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4063 ctype, result, overflow);
4065 if (!update_call_from_tree (gsi, result))
4066 gimplify_and_update_call_from_tree (gsi, result);
4067 changed = true;
4071 return changed;
4075 /* Return true whether NAME has a use on STMT. */
4077 static bool
4078 has_use_on_stmt (tree name, gimple *stmt)
4080 imm_use_iterator iter;
4081 use_operand_p use_p;
4082 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4083 if (USE_STMT (use_p) == stmt)
4084 return true;
4085 return false;
4088 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4089 gimple_simplify.
4091 Replaces *GSI with the simplification result in RCODE and OPS
4092 and the associated statements in *SEQ. Does the replacement
4093 according to INPLACE and returns true if the operation succeeded. */
4095 static bool
4096 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4097 code_helper rcode, tree *ops,
4098 gimple_seq *seq, bool inplace)
4100 gimple *stmt = gsi_stmt (*gsi);
4102 /* Play safe and do not allow abnormals to be mentioned in
4103 newly created statements. See also maybe_push_res_to_seq.
4104 As an exception allow such uses if there was a use of the
4105 same SSA name on the old stmt. */
4106 if ((TREE_CODE (ops[0]) == SSA_NAME
4107 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
4108 && !has_use_on_stmt (ops[0], stmt))
4109 || (ops[1]
4110 && TREE_CODE (ops[1]) == SSA_NAME
4111 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
4112 && !has_use_on_stmt (ops[1], stmt))
4113 || (ops[2]
4114 && TREE_CODE (ops[2]) == SSA_NAME
4115 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
4116 && !has_use_on_stmt (ops[2], stmt))
4117 || (COMPARISON_CLASS_P (ops[0])
4118 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
4119 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
4120 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
4121 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
4122 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
4123 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
4124 return false;
4126 /* Don't insert new statements when INPLACE is true, even if we could
4127 reuse STMT for the final statement. */
4128 if (inplace && !gimple_seq_empty_p (*seq))
4129 return false;
4131 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4133 gcc_assert (rcode.is_tree_code ());
4134 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
4135 /* GIMPLE_CONDs condition may not throw. */
4136 && (!flag_exceptions
4137 || !cfun->can_throw_non_call_exceptions
4138 || !operation_could_trap_p (rcode,
4139 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4140 false, NULL_TREE)))
4141 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
4142 else if (rcode == SSA_NAME)
4143 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4144 build_zero_cst (TREE_TYPE (ops[0])));
4145 else if (rcode == INTEGER_CST)
4147 if (integer_zerop (ops[0]))
4148 gimple_cond_make_false (cond_stmt);
4149 else
4150 gimple_cond_make_true (cond_stmt);
4152 else if (!inplace)
4154 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
4155 ops, seq);
4156 if (!res)
4157 return false;
4158 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4159 build_zero_cst (TREE_TYPE (res)));
4161 else
4162 return false;
4163 if (dump_file && (dump_flags & TDF_DETAILS))
4165 fprintf (dump_file, "gimple_simplified to ");
4166 if (!gimple_seq_empty_p (*seq))
4167 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4168 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4169 0, TDF_SLIM);
4171 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4172 return true;
4174 else if (is_gimple_assign (stmt)
4175 && rcode.is_tree_code ())
4177 if (!inplace
4178 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
4180 maybe_build_generic_op (rcode,
4181 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
4182 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
4183 if (dump_file && (dump_flags & TDF_DETAILS))
4185 fprintf (dump_file, "gimple_simplified to ");
4186 if (!gimple_seq_empty_p (*seq))
4187 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4188 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4189 0, TDF_SLIM);
4191 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4192 return true;
4195 else if (rcode.is_fn_code ()
4196 && gimple_call_combined_fn (stmt) == rcode)
4198 unsigned i;
4199 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4201 gcc_assert (ops[i] != NULL_TREE);
4202 gimple_call_set_arg (stmt, i, ops[i]);
4204 if (i < 3)
4205 gcc_assert (ops[i] == NULL_TREE);
4206 if (dump_file && (dump_flags & TDF_DETAILS))
4208 fprintf (dump_file, "gimple_simplified to ");
4209 if (!gimple_seq_empty_p (*seq))
4210 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4211 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4213 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4214 return true;
4216 else if (!inplace)
4218 if (gimple_has_lhs (stmt))
4220 tree lhs = gimple_get_lhs (stmt);
4221 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
4222 ops, seq, lhs))
4223 return false;
4224 if (dump_file && (dump_flags & TDF_DETAILS))
4226 fprintf (dump_file, "gimple_simplified to ");
4227 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4229 gsi_replace_with_seq_vops (gsi, *seq);
4230 return true;
4232 else
4233 gcc_unreachable ();
4236 return false;
4239 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4241 static bool
4242 maybe_canonicalize_mem_ref_addr (tree *t)
4244 bool res = false;
4246 if (TREE_CODE (*t) == ADDR_EXPR)
4247 t = &TREE_OPERAND (*t, 0);
4249 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4250 generic vector extension. The actual vector referenced is
4251 view-converted to an array type for this purpose. If the index
4252 is constant the canonical representation in the middle-end is a
4253 BIT_FIELD_REF so re-write the former to the latter here. */
4254 if (TREE_CODE (*t) == ARRAY_REF
4255 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4256 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4257 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4259 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4260 if (VECTOR_TYPE_P (vtype))
4262 tree low = array_ref_low_bound (*t);
4263 if (TREE_CODE (low) == INTEGER_CST)
4265 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4267 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4268 wi::to_widest (low));
4269 idx = wi::mul (idx, wi::to_widest
4270 (TYPE_SIZE (TREE_TYPE (*t))));
4271 widest_int ext
4272 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4273 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4275 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4276 TREE_TYPE (*t),
4277 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4278 TYPE_SIZE (TREE_TYPE (*t)),
4279 wide_int_to_tree (bitsizetype, idx));
4280 res = true;
4287 while (handled_component_p (*t))
4288 t = &TREE_OPERAND (*t, 0);
4290 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4291 of invariant addresses into a SSA name MEM_REF address. */
4292 if (TREE_CODE (*t) == MEM_REF
4293 || TREE_CODE (*t) == TARGET_MEM_REF)
4295 tree addr = TREE_OPERAND (*t, 0);
4296 if (TREE_CODE (addr) == ADDR_EXPR
4297 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4298 || handled_component_p (TREE_OPERAND (addr, 0))))
4300 tree base;
4301 HOST_WIDE_INT coffset;
4302 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4303 &coffset);
4304 if (!base)
4305 gcc_unreachable ();
4307 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4308 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4309 TREE_OPERAND (*t, 1),
4310 size_int (coffset));
4311 res = true;
4313 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4314 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4317 /* Canonicalize back MEM_REFs to plain reference trees if the object
4318 accessed is a decl that has the same access semantics as the MEM_REF. */
4319 if (TREE_CODE (*t) == MEM_REF
4320 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4321 && integer_zerop (TREE_OPERAND (*t, 1))
4322 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4324 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4325 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4326 if (/* Same volatile qualification. */
4327 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4328 /* Same TBAA behavior with -fstrict-aliasing. */
4329 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4330 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4331 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4332 /* Same alignment. */
4333 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4334 /* We have to look out here to not drop a required conversion
4335 from the rhs to the lhs if *t appears on the lhs or vice-versa
4336 if it appears on the rhs. Thus require strict type
4337 compatibility. */
4338 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4340 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4341 res = true;
4345 /* Canonicalize TARGET_MEM_REF in particular with respect to
4346 the indexes becoming constant. */
4347 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4349 tree tem = maybe_fold_tmr (*t);
4350 if (tem)
4352 *t = tem;
4353 res = true;
4357 return res;
4360 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4361 distinguishes both cases. */
4363 static bool
4364 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4366 bool changed = false;
4367 gimple *stmt = gsi_stmt (*gsi);
4368 bool nowarning = gimple_no_warning_p (stmt);
4369 unsigned i;
4370 fold_defer_overflow_warnings ();
4372 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4373 after propagation.
4374 ??? This shouldn't be done in generic folding but in the
4375 propagation helpers which also know whether an address was
4376 propagated.
4377 Also canonicalize operand order. */
4378 switch (gimple_code (stmt))
4380 case GIMPLE_ASSIGN:
4381 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4383 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4384 if ((REFERENCE_CLASS_P (*rhs)
4385 || TREE_CODE (*rhs) == ADDR_EXPR)
4386 && maybe_canonicalize_mem_ref_addr (rhs))
4387 changed = true;
4388 tree *lhs = gimple_assign_lhs_ptr (stmt);
4389 if (REFERENCE_CLASS_P (*lhs)
4390 && maybe_canonicalize_mem_ref_addr (lhs))
4391 changed = true;
4393 else
4395 /* Canonicalize operand order. */
4396 enum tree_code code = gimple_assign_rhs_code (stmt);
4397 if (TREE_CODE_CLASS (code) == tcc_comparison
4398 || commutative_tree_code (code)
4399 || commutative_ternary_tree_code (code))
4401 tree rhs1 = gimple_assign_rhs1 (stmt);
4402 tree rhs2 = gimple_assign_rhs2 (stmt);
4403 if (tree_swap_operands_p (rhs1, rhs2))
4405 gimple_assign_set_rhs1 (stmt, rhs2);
4406 gimple_assign_set_rhs2 (stmt, rhs1);
4407 if (TREE_CODE_CLASS (code) == tcc_comparison)
4408 gimple_assign_set_rhs_code (stmt,
4409 swap_tree_comparison (code));
4410 changed = true;
4414 break;
4415 case GIMPLE_CALL:
4417 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4419 tree *arg = gimple_call_arg_ptr (stmt, i);
4420 if (REFERENCE_CLASS_P (*arg)
4421 && maybe_canonicalize_mem_ref_addr (arg))
4422 changed = true;
4424 tree *lhs = gimple_call_lhs_ptr (stmt);
4425 if (*lhs
4426 && REFERENCE_CLASS_P (*lhs)
4427 && maybe_canonicalize_mem_ref_addr (lhs))
4428 changed = true;
4429 break;
4431 case GIMPLE_ASM:
4433 gasm *asm_stmt = as_a <gasm *> (stmt);
4434 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4436 tree link = gimple_asm_output_op (asm_stmt, i);
4437 tree op = TREE_VALUE (link);
4438 if (REFERENCE_CLASS_P (op)
4439 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4440 changed = true;
4442 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4444 tree link = gimple_asm_input_op (asm_stmt, i);
4445 tree op = TREE_VALUE (link);
4446 if ((REFERENCE_CLASS_P (op)
4447 || TREE_CODE (op) == ADDR_EXPR)
4448 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4449 changed = true;
4452 break;
4453 case GIMPLE_DEBUG:
4454 if (gimple_debug_bind_p (stmt))
4456 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4457 if (*val
4458 && (REFERENCE_CLASS_P (*val)
4459 || TREE_CODE (*val) == ADDR_EXPR)
4460 && maybe_canonicalize_mem_ref_addr (val))
4461 changed = true;
4463 break;
4464 case GIMPLE_COND:
4466 /* Canonicalize operand order. */
4467 tree lhs = gimple_cond_lhs (stmt);
4468 tree rhs = gimple_cond_rhs (stmt);
4469 if (tree_swap_operands_p (lhs, rhs))
4471 gcond *gc = as_a <gcond *> (stmt);
4472 gimple_cond_set_lhs (gc, rhs);
4473 gimple_cond_set_rhs (gc, lhs);
4474 gimple_cond_set_code (gc,
4475 swap_tree_comparison (gimple_cond_code (gc)));
4476 changed = true;
4479 default:;
4482 /* Dispatch to pattern-based folding. */
4483 if (!inplace
4484 || is_gimple_assign (stmt)
4485 || gimple_code (stmt) == GIMPLE_COND)
4487 gimple_seq seq = NULL;
4488 code_helper rcode;
4489 tree ops[3] = {};
4490 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4491 valueize, valueize))
4493 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4494 changed = true;
4495 else
4496 gimple_seq_discard (seq);
4500 stmt = gsi_stmt (*gsi);
4502 /* Fold the main computation performed by the statement. */
4503 switch (gimple_code (stmt))
4505 case GIMPLE_ASSIGN:
4507 /* Try to canonicalize for boolean-typed X the comparisons
4508 X == 0, X == 1, X != 0, and X != 1. */
4509 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4510 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4512 tree lhs = gimple_assign_lhs (stmt);
4513 tree op1 = gimple_assign_rhs1 (stmt);
4514 tree op2 = gimple_assign_rhs2 (stmt);
4515 tree type = TREE_TYPE (op1);
4517 /* Check whether the comparison operands are of the same boolean
4518 type as the result type is.
4519 Check that second operand is an integer-constant with value
4520 one or zero. */
4521 if (TREE_CODE (op2) == INTEGER_CST
4522 && (integer_zerop (op2) || integer_onep (op2))
4523 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4525 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4526 bool is_logical_not = false;
4528 /* X == 0 and X != 1 is a logical-not.of X
4529 X == 1 and X != 0 is X */
4530 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4531 || (cmp_code == NE_EXPR && integer_onep (op2)))
4532 is_logical_not = true;
4534 if (is_logical_not == false)
4535 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4536 /* Only for one-bit precision typed X the transformation
4537 !X -> ~X is valied. */
4538 else if (TYPE_PRECISION (type) == 1)
4539 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4540 /* Otherwise we use !X -> X ^ 1. */
4541 else
4542 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4543 build_int_cst (type, 1));
4544 changed = true;
4545 break;
4549 unsigned old_num_ops = gimple_num_ops (stmt);
4550 tree lhs = gimple_assign_lhs (stmt);
4551 tree new_rhs = fold_gimple_assign (gsi);
4552 if (new_rhs
4553 && !useless_type_conversion_p (TREE_TYPE (lhs),
4554 TREE_TYPE (new_rhs)))
4555 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4556 if (new_rhs
4557 && (!inplace
4558 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4560 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4561 changed = true;
4563 break;
4566 case GIMPLE_CALL:
4567 changed |= gimple_fold_call (gsi, inplace);
4568 break;
4570 case GIMPLE_ASM:
4571 /* Fold *& in asm operands. */
4573 gasm *asm_stmt = as_a <gasm *> (stmt);
4574 size_t noutputs;
4575 const char **oconstraints;
4576 const char *constraint;
4577 bool allows_mem, allows_reg;
4579 noutputs = gimple_asm_noutputs (asm_stmt);
4580 oconstraints = XALLOCAVEC (const char *, noutputs);
4582 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4584 tree link = gimple_asm_output_op (asm_stmt, i);
4585 tree op = TREE_VALUE (link);
4586 oconstraints[i]
4587 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4588 if (REFERENCE_CLASS_P (op)
4589 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4591 TREE_VALUE (link) = op;
4592 changed = true;
4595 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4597 tree link = gimple_asm_input_op (asm_stmt, i);
4598 tree op = TREE_VALUE (link);
4599 constraint
4600 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4601 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4602 oconstraints, &allows_mem, &allows_reg);
4603 if (REFERENCE_CLASS_P (op)
4604 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4605 != NULL_TREE)
4607 TREE_VALUE (link) = op;
4608 changed = true;
4612 break;
4614 case GIMPLE_DEBUG:
4615 if (gimple_debug_bind_p (stmt))
4617 tree val = gimple_debug_bind_get_value (stmt);
4618 if (val
4619 && REFERENCE_CLASS_P (val))
4621 tree tem = maybe_fold_reference (val, false);
4622 if (tem)
4624 gimple_debug_bind_set_value (stmt, tem);
4625 changed = true;
4628 else if (val
4629 && TREE_CODE (val) == ADDR_EXPR)
4631 tree ref = TREE_OPERAND (val, 0);
4632 tree tem = maybe_fold_reference (ref, false);
4633 if (tem)
4635 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4636 gimple_debug_bind_set_value (stmt, tem);
4637 changed = true;
4641 break;
4643 case GIMPLE_RETURN:
4645 greturn *ret_stmt = as_a<greturn *> (stmt);
4646 tree ret = gimple_return_retval(ret_stmt);
4648 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4650 tree val = valueize (ret);
4651 if (val && val != ret
4652 && may_propagate_copy (ret, val))
4654 gimple_return_set_retval (ret_stmt, val);
4655 changed = true;
4659 break;
4661 default:;
4664 stmt = gsi_stmt (*gsi);
4666 /* Fold *& on the lhs. */
4667 if (gimple_has_lhs (stmt))
4669 tree lhs = gimple_get_lhs (stmt);
4670 if (lhs && REFERENCE_CLASS_P (lhs))
4672 tree new_lhs = maybe_fold_reference (lhs, true);
4673 if (new_lhs)
4675 gimple_set_lhs (stmt, new_lhs);
4676 changed = true;
4681 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4682 return changed;
4685 /* Valueziation callback that ends up not following SSA edges. */
4687 tree
4688 no_follow_ssa_edges (tree)
4690 return NULL_TREE;
4693 /* Valueization callback that ends up following single-use SSA edges only. */
4695 tree
4696 follow_single_use_edges (tree val)
4698 if (TREE_CODE (val) == SSA_NAME
4699 && !has_single_use (val))
4700 return NULL_TREE;
4701 return val;
4704 /* Fold the statement pointed to by GSI. In some cases, this function may
4705 replace the whole statement with a new one. Returns true iff folding
4706 makes any changes.
4707 The statement pointed to by GSI should be in valid gimple form but may
4708 be in unfolded state as resulting from for example constant propagation
4709 which can produce *&x = 0. */
4711 bool
4712 fold_stmt (gimple_stmt_iterator *gsi)
4714 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4717 bool
4718 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4720 return fold_stmt_1 (gsi, false, valueize);
4723 /* Perform the minimal folding on statement *GSI. Only operations like
4724 *&x created by constant propagation are handled. The statement cannot
4725 be replaced with a new one. Return true if the statement was
4726 changed, false otherwise.
4727 The statement *GSI should be in valid gimple form but may
4728 be in unfolded state as resulting from for example constant propagation
4729 which can produce *&x = 0. */
4731 bool
4732 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4734 gimple *stmt = gsi_stmt (*gsi);
4735 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4736 gcc_assert (gsi_stmt (*gsi) == stmt);
4737 return changed;
4740 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4741 if EXPR is null or we don't know how.
4742 If non-null, the result always has boolean type. */
4744 static tree
4745 canonicalize_bool (tree expr, bool invert)
4747 if (!expr)
4748 return NULL_TREE;
4749 else if (invert)
4751 if (integer_nonzerop (expr))
4752 return boolean_false_node;
4753 else if (integer_zerop (expr))
4754 return boolean_true_node;
4755 else if (TREE_CODE (expr) == SSA_NAME)
4756 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4757 build_int_cst (TREE_TYPE (expr), 0));
4758 else if (COMPARISON_CLASS_P (expr))
4759 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4760 boolean_type_node,
4761 TREE_OPERAND (expr, 0),
4762 TREE_OPERAND (expr, 1));
4763 else
4764 return NULL_TREE;
4766 else
4768 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4769 return expr;
4770 if (integer_nonzerop (expr))
4771 return boolean_true_node;
4772 else if (integer_zerop (expr))
4773 return boolean_false_node;
4774 else if (TREE_CODE (expr) == SSA_NAME)
4775 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4776 build_int_cst (TREE_TYPE (expr), 0));
4777 else if (COMPARISON_CLASS_P (expr))
4778 return fold_build2 (TREE_CODE (expr),
4779 boolean_type_node,
4780 TREE_OPERAND (expr, 0),
4781 TREE_OPERAND (expr, 1));
4782 else
4783 return NULL_TREE;
4787 /* Check to see if a boolean expression EXPR is logically equivalent to the
4788 comparison (OP1 CODE OP2). Check for various identities involving
4789 SSA_NAMEs. */
4791 static bool
4792 same_bool_comparison_p (const_tree expr, enum tree_code code,
4793 const_tree op1, const_tree op2)
4795 gimple *s;
4797 /* The obvious case. */
4798 if (TREE_CODE (expr) == code
4799 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4800 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4801 return true;
4803 /* Check for comparing (name, name != 0) and the case where expr
4804 is an SSA_NAME with a definition matching the comparison. */
4805 if (TREE_CODE (expr) == SSA_NAME
4806 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4808 if (operand_equal_p (expr, op1, 0))
4809 return ((code == NE_EXPR && integer_zerop (op2))
4810 || (code == EQ_EXPR && integer_nonzerop (op2)));
4811 s = SSA_NAME_DEF_STMT (expr);
4812 if (is_gimple_assign (s)
4813 && gimple_assign_rhs_code (s) == code
4814 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4815 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4816 return true;
4819 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4820 of name is a comparison, recurse. */
4821 if (TREE_CODE (op1) == SSA_NAME
4822 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4824 s = SSA_NAME_DEF_STMT (op1);
4825 if (is_gimple_assign (s)
4826 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
4828 enum tree_code c = gimple_assign_rhs_code (s);
4829 if ((c == NE_EXPR && integer_zerop (op2))
4830 || (c == EQ_EXPR && integer_nonzerop (op2)))
4831 return same_bool_comparison_p (expr, c,
4832 gimple_assign_rhs1 (s),
4833 gimple_assign_rhs2 (s));
4834 if ((c == EQ_EXPR && integer_zerop (op2))
4835 || (c == NE_EXPR && integer_nonzerop (op2)))
4836 return same_bool_comparison_p (expr,
4837 invert_tree_comparison (c, false),
4838 gimple_assign_rhs1 (s),
4839 gimple_assign_rhs2 (s));
4842 return false;
4845 /* Check to see if two boolean expressions OP1 and OP2 are logically
4846 equivalent. */
4848 static bool
4849 same_bool_result_p (const_tree op1, const_tree op2)
4851 /* Simple cases first. */
4852 if (operand_equal_p (op1, op2, 0))
4853 return true;
4855 /* Check the cases where at least one of the operands is a comparison.
4856 These are a bit smarter than operand_equal_p in that they apply some
4857 identifies on SSA_NAMEs. */
4858 if (COMPARISON_CLASS_P (op2)
4859 && same_bool_comparison_p (op1, TREE_CODE (op2),
4860 TREE_OPERAND (op2, 0),
4861 TREE_OPERAND (op2, 1)))
4862 return true;
4863 if (COMPARISON_CLASS_P (op1)
4864 && same_bool_comparison_p (op2, TREE_CODE (op1),
4865 TREE_OPERAND (op1, 0),
4866 TREE_OPERAND (op1, 1)))
4867 return true;
4869 /* Default case. */
4870 return false;
4873 /* Forward declarations for some mutually recursive functions. */
4875 static tree
4876 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4877 enum tree_code code2, tree op2a, tree op2b);
4878 static tree
4879 and_var_with_comparison (tree var, bool invert,
4880 enum tree_code code2, tree op2a, tree op2b);
4881 static tree
4882 and_var_with_comparison_1 (gimple *stmt,
4883 enum tree_code code2, tree op2a, tree op2b);
4884 static tree
4885 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4886 enum tree_code code2, tree op2a, tree op2b);
4887 static tree
4888 or_var_with_comparison (tree var, bool invert,
4889 enum tree_code code2, tree op2a, tree op2b);
4890 static tree
4891 or_var_with_comparison_1 (gimple *stmt,
4892 enum tree_code code2, tree op2a, tree op2b);
4894 /* Helper function for and_comparisons_1: try to simplify the AND of the
4895 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4896 If INVERT is true, invert the value of the VAR before doing the AND.
4897 Return NULL_EXPR if we can't simplify this to a single expression. */
4899 static tree
4900 and_var_with_comparison (tree var, bool invert,
4901 enum tree_code code2, tree op2a, tree op2b)
4903 tree t;
4904 gimple *stmt = SSA_NAME_DEF_STMT (var);
4906 /* We can only deal with variables whose definitions are assignments. */
4907 if (!is_gimple_assign (stmt))
4908 return NULL_TREE;
4910 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4911 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4912 Then we only have to consider the simpler non-inverted cases. */
4913 if (invert)
4914 t = or_var_with_comparison_1 (stmt,
4915 invert_tree_comparison (code2, false),
4916 op2a, op2b);
4917 else
4918 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4919 return canonicalize_bool (t, invert);
4922 /* Try to simplify the AND of the ssa variable defined by the assignment
4923 STMT with the comparison specified by (OP2A CODE2 OP2B).
4924 Return NULL_EXPR if we can't simplify this to a single expression. */
4926 static tree
4927 and_var_with_comparison_1 (gimple *stmt,
4928 enum tree_code code2, tree op2a, tree op2b)
4930 tree var = gimple_assign_lhs (stmt);
4931 tree true_test_var = NULL_TREE;
4932 tree false_test_var = NULL_TREE;
4933 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4935 /* Check for identities like (var AND (var == 0)) => false. */
4936 if (TREE_CODE (op2a) == SSA_NAME
4937 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4939 if ((code2 == NE_EXPR && integer_zerop (op2b))
4940 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4942 true_test_var = op2a;
4943 if (var == true_test_var)
4944 return var;
4946 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4947 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4949 false_test_var = op2a;
4950 if (var == false_test_var)
4951 return boolean_false_node;
4955 /* If the definition is a comparison, recurse on it. */
4956 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4958 tree t = and_comparisons_1 (innercode,
4959 gimple_assign_rhs1 (stmt),
4960 gimple_assign_rhs2 (stmt),
4961 code2,
4962 op2a,
4963 op2b);
4964 if (t)
4965 return t;
4968 /* If the definition is an AND or OR expression, we may be able to
4969 simplify by reassociating. */
4970 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4971 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4973 tree inner1 = gimple_assign_rhs1 (stmt);
4974 tree inner2 = gimple_assign_rhs2 (stmt);
4975 gimple *s;
4976 tree t;
4977 tree partial = NULL_TREE;
4978 bool is_and = (innercode == BIT_AND_EXPR);
4980 /* Check for boolean identities that don't require recursive examination
4981 of inner1/inner2:
4982 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4983 inner1 AND (inner1 OR inner2) => inner1
4984 !inner1 AND (inner1 AND inner2) => false
4985 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4986 Likewise for similar cases involving inner2. */
4987 if (inner1 == true_test_var)
4988 return (is_and ? var : inner1);
4989 else if (inner2 == true_test_var)
4990 return (is_and ? var : inner2);
4991 else if (inner1 == false_test_var)
4992 return (is_and
4993 ? boolean_false_node
4994 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4995 else if (inner2 == false_test_var)
4996 return (is_and
4997 ? boolean_false_node
4998 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5000 /* Next, redistribute/reassociate the AND across the inner tests.
5001 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5002 if (TREE_CODE (inner1) == SSA_NAME
5003 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5004 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5005 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5006 gimple_assign_rhs1 (s),
5007 gimple_assign_rhs2 (s),
5008 code2, op2a, op2b)))
5010 /* Handle the AND case, where we are reassociating:
5011 (inner1 AND inner2) AND (op2a code2 op2b)
5012 => (t AND inner2)
5013 If the partial result t is a constant, we win. Otherwise
5014 continue on to try reassociating with the other inner test. */
5015 if (is_and)
5017 if (integer_onep (t))
5018 return inner2;
5019 else if (integer_zerop (t))
5020 return boolean_false_node;
5023 /* Handle the OR case, where we are redistributing:
5024 (inner1 OR inner2) AND (op2a code2 op2b)
5025 => (t OR (inner2 AND (op2a code2 op2b))) */
5026 else if (integer_onep (t))
5027 return boolean_true_node;
5029 /* Save partial result for later. */
5030 partial = t;
5033 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5034 if (TREE_CODE (inner2) == SSA_NAME
5035 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5036 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5037 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5038 gimple_assign_rhs1 (s),
5039 gimple_assign_rhs2 (s),
5040 code2, op2a, op2b)))
5042 /* Handle the AND case, where we are reassociating:
5043 (inner1 AND inner2) AND (op2a code2 op2b)
5044 => (inner1 AND t) */
5045 if (is_and)
5047 if (integer_onep (t))
5048 return inner1;
5049 else if (integer_zerop (t))
5050 return boolean_false_node;
5051 /* If both are the same, we can apply the identity
5052 (x AND x) == x. */
5053 else if (partial && same_bool_result_p (t, partial))
5054 return t;
5057 /* Handle the OR case. where we are redistributing:
5058 (inner1 OR inner2) AND (op2a code2 op2b)
5059 => (t OR (inner1 AND (op2a code2 op2b)))
5060 => (t OR partial) */
5061 else
5063 if (integer_onep (t))
5064 return boolean_true_node;
5065 else if (partial)
5067 /* We already got a simplification for the other
5068 operand to the redistributed OR expression. The
5069 interesting case is when at least one is false.
5070 Or, if both are the same, we can apply the identity
5071 (x OR x) == x. */
5072 if (integer_zerop (partial))
5073 return t;
5074 else if (integer_zerop (t))
5075 return partial;
5076 else if (same_bool_result_p (t, partial))
5077 return t;
5082 return NULL_TREE;
5085 /* Try to simplify the AND of two comparisons defined by
5086 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5087 If this can be done without constructing an intermediate value,
5088 return the resulting tree; otherwise NULL_TREE is returned.
5089 This function is deliberately asymmetric as it recurses on SSA_DEFs
5090 in the first comparison but not the second. */
5092 static tree
5093 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5094 enum tree_code code2, tree op2a, tree op2b)
5096 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5098 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5099 if (operand_equal_p (op1a, op2a, 0)
5100 && operand_equal_p (op1b, op2b, 0))
5102 /* Result will be either NULL_TREE, or a combined comparison. */
5103 tree t = combine_comparisons (UNKNOWN_LOCATION,
5104 TRUTH_ANDIF_EXPR, code1, code2,
5105 truth_type, op1a, op1b);
5106 if (t)
5107 return t;
5110 /* Likewise the swapped case of the above. */
5111 if (operand_equal_p (op1a, op2b, 0)
5112 && operand_equal_p (op1b, op2a, 0))
5114 /* Result will be either NULL_TREE, or a combined comparison. */
5115 tree t = combine_comparisons (UNKNOWN_LOCATION,
5116 TRUTH_ANDIF_EXPR, code1,
5117 swap_tree_comparison (code2),
5118 truth_type, op1a, op1b);
5119 if (t)
5120 return t;
5123 /* If both comparisons are of the same value against constants, we might
5124 be able to merge them. */
5125 if (operand_equal_p (op1a, op2a, 0)
5126 && TREE_CODE (op1b) == INTEGER_CST
5127 && TREE_CODE (op2b) == INTEGER_CST)
5129 int cmp = tree_int_cst_compare (op1b, op2b);
5131 /* If we have (op1a == op1b), we should either be able to
5132 return that or FALSE, depending on whether the constant op1b
5133 also satisfies the other comparison against op2b. */
5134 if (code1 == EQ_EXPR)
5136 bool done = true;
5137 bool val;
5138 switch (code2)
5140 case EQ_EXPR: val = (cmp == 0); break;
5141 case NE_EXPR: val = (cmp != 0); break;
5142 case LT_EXPR: val = (cmp < 0); break;
5143 case GT_EXPR: val = (cmp > 0); break;
5144 case LE_EXPR: val = (cmp <= 0); break;
5145 case GE_EXPR: val = (cmp >= 0); break;
5146 default: done = false;
5148 if (done)
5150 if (val)
5151 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5152 else
5153 return boolean_false_node;
5156 /* Likewise if the second comparison is an == comparison. */
5157 else if (code2 == EQ_EXPR)
5159 bool done = true;
5160 bool val;
5161 switch (code1)
5163 case EQ_EXPR: val = (cmp == 0); break;
5164 case NE_EXPR: val = (cmp != 0); break;
5165 case LT_EXPR: val = (cmp > 0); break;
5166 case GT_EXPR: val = (cmp < 0); break;
5167 case LE_EXPR: val = (cmp >= 0); break;
5168 case GE_EXPR: val = (cmp <= 0); break;
5169 default: done = false;
5171 if (done)
5173 if (val)
5174 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5175 else
5176 return boolean_false_node;
5180 /* Same business with inequality tests. */
5181 else if (code1 == NE_EXPR)
5183 bool val;
5184 switch (code2)
5186 case EQ_EXPR: val = (cmp != 0); break;
5187 case NE_EXPR: val = (cmp == 0); break;
5188 case LT_EXPR: val = (cmp >= 0); break;
5189 case GT_EXPR: val = (cmp <= 0); break;
5190 case LE_EXPR: val = (cmp > 0); break;
5191 case GE_EXPR: val = (cmp < 0); break;
5192 default:
5193 val = false;
5195 if (val)
5196 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5198 else if (code2 == NE_EXPR)
5200 bool val;
5201 switch (code1)
5203 case EQ_EXPR: val = (cmp == 0); break;
5204 case NE_EXPR: val = (cmp != 0); break;
5205 case LT_EXPR: val = (cmp <= 0); break;
5206 case GT_EXPR: val = (cmp >= 0); break;
5207 case LE_EXPR: val = (cmp < 0); break;
5208 case GE_EXPR: val = (cmp > 0); break;
5209 default:
5210 val = false;
5212 if (val)
5213 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5216 /* Chose the more restrictive of two < or <= comparisons. */
5217 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5218 && (code2 == LT_EXPR || code2 == LE_EXPR))
5220 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5221 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5222 else
5223 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5226 /* Likewise chose the more restrictive of two > or >= comparisons. */
5227 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5228 && (code2 == GT_EXPR || code2 == GE_EXPR))
5230 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5231 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5232 else
5233 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5236 /* Check for singleton ranges. */
5237 else if (cmp == 0
5238 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5239 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5240 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5242 /* Check for disjoint ranges. */
5243 else if (cmp <= 0
5244 && (code1 == LT_EXPR || code1 == LE_EXPR)
5245 && (code2 == GT_EXPR || code2 == GE_EXPR))
5246 return boolean_false_node;
5247 else if (cmp >= 0
5248 && (code1 == GT_EXPR || code1 == GE_EXPR)
5249 && (code2 == LT_EXPR || code2 == LE_EXPR))
5250 return boolean_false_node;
5253 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5254 NAME's definition is a truth value. See if there are any simplifications
5255 that can be done against the NAME's definition. */
5256 if (TREE_CODE (op1a) == SSA_NAME
5257 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5258 && (integer_zerop (op1b) || integer_onep (op1b)))
5260 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5261 || (code1 == NE_EXPR && integer_onep (op1b)));
5262 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5263 switch (gimple_code (stmt))
5265 case GIMPLE_ASSIGN:
5266 /* Try to simplify by copy-propagating the definition. */
5267 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5269 case GIMPLE_PHI:
5270 /* If every argument to the PHI produces the same result when
5271 ANDed with the second comparison, we win.
5272 Do not do this unless the type is bool since we need a bool
5273 result here anyway. */
5274 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5276 tree result = NULL_TREE;
5277 unsigned i;
5278 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5280 tree arg = gimple_phi_arg_def (stmt, i);
5282 /* If this PHI has itself as an argument, ignore it.
5283 If all the other args produce the same result,
5284 we're still OK. */
5285 if (arg == gimple_phi_result (stmt))
5286 continue;
5287 else if (TREE_CODE (arg) == INTEGER_CST)
5289 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5291 if (!result)
5292 result = boolean_false_node;
5293 else if (!integer_zerop (result))
5294 return NULL_TREE;
5296 else if (!result)
5297 result = fold_build2 (code2, boolean_type_node,
5298 op2a, op2b);
5299 else if (!same_bool_comparison_p (result,
5300 code2, op2a, op2b))
5301 return NULL_TREE;
5303 else if (TREE_CODE (arg) == SSA_NAME
5304 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5306 tree temp;
5307 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5308 /* In simple cases we can look through PHI nodes,
5309 but we have to be careful with loops.
5310 See PR49073. */
5311 if (! dom_info_available_p (CDI_DOMINATORS)
5312 || gimple_bb (def_stmt) == gimple_bb (stmt)
5313 || dominated_by_p (CDI_DOMINATORS,
5314 gimple_bb (def_stmt),
5315 gimple_bb (stmt)))
5316 return NULL_TREE;
5317 temp = and_var_with_comparison (arg, invert, code2,
5318 op2a, op2b);
5319 if (!temp)
5320 return NULL_TREE;
5321 else if (!result)
5322 result = temp;
5323 else if (!same_bool_result_p (result, temp))
5324 return NULL_TREE;
5326 else
5327 return NULL_TREE;
5329 return result;
5332 default:
5333 break;
5336 return NULL_TREE;
5339 /* Try to simplify the AND of two comparisons, specified by
5340 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5341 If this can be simplified to a single expression (without requiring
5342 introducing more SSA variables to hold intermediate values),
5343 return the resulting tree. Otherwise return NULL_TREE.
5344 If the result expression is non-null, it has boolean type. */
5346 tree
5347 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5348 enum tree_code code2, tree op2a, tree op2b)
5350 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5351 if (t)
5352 return t;
5353 else
5354 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5357 /* Helper function for or_comparisons_1: try to simplify the OR of the
5358 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5359 If INVERT is true, invert the value of VAR before doing the OR.
5360 Return NULL_EXPR if we can't simplify this to a single expression. */
5362 static tree
5363 or_var_with_comparison (tree var, bool invert,
5364 enum tree_code code2, tree op2a, tree op2b)
5366 tree t;
5367 gimple *stmt = SSA_NAME_DEF_STMT (var);
5369 /* We can only deal with variables whose definitions are assignments. */
5370 if (!is_gimple_assign (stmt))
5371 return NULL_TREE;
5373 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5374 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5375 Then we only have to consider the simpler non-inverted cases. */
5376 if (invert)
5377 t = and_var_with_comparison_1 (stmt,
5378 invert_tree_comparison (code2, false),
5379 op2a, op2b);
5380 else
5381 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5382 return canonicalize_bool (t, invert);
5385 /* Try to simplify the OR of the ssa variable defined by the assignment
5386 STMT with the comparison specified by (OP2A CODE2 OP2B).
5387 Return NULL_EXPR if we can't simplify this to a single expression. */
5389 static tree
5390 or_var_with_comparison_1 (gimple *stmt,
5391 enum tree_code code2, tree op2a, tree op2b)
5393 tree var = gimple_assign_lhs (stmt);
5394 tree true_test_var = NULL_TREE;
5395 tree false_test_var = NULL_TREE;
5396 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5398 /* Check for identities like (var OR (var != 0)) => true . */
5399 if (TREE_CODE (op2a) == SSA_NAME
5400 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5402 if ((code2 == NE_EXPR && integer_zerop (op2b))
5403 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5405 true_test_var = op2a;
5406 if (var == true_test_var)
5407 return var;
5409 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5410 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5412 false_test_var = op2a;
5413 if (var == false_test_var)
5414 return boolean_true_node;
5418 /* If the definition is a comparison, recurse on it. */
5419 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5421 tree t = or_comparisons_1 (innercode,
5422 gimple_assign_rhs1 (stmt),
5423 gimple_assign_rhs2 (stmt),
5424 code2,
5425 op2a,
5426 op2b);
5427 if (t)
5428 return t;
5431 /* If the definition is an AND or OR expression, we may be able to
5432 simplify by reassociating. */
5433 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5434 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5436 tree inner1 = gimple_assign_rhs1 (stmt);
5437 tree inner2 = gimple_assign_rhs2 (stmt);
5438 gimple *s;
5439 tree t;
5440 tree partial = NULL_TREE;
5441 bool is_or = (innercode == BIT_IOR_EXPR);
5443 /* Check for boolean identities that don't require recursive examination
5444 of inner1/inner2:
5445 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5446 inner1 OR (inner1 AND inner2) => inner1
5447 !inner1 OR (inner1 OR inner2) => true
5448 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5450 if (inner1 == true_test_var)
5451 return (is_or ? var : inner1);
5452 else if (inner2 == true_test_var)
5453 return (is_or ? var : inner2);
5454 else if (inner1 == false_test_var)
5455 return (is_or
5456 ? boolean_true_node
5457 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5458 else if (inner2 == false_test_var)
5459 return (is_or
5460 ? boolean_true_node
5461 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5463 /* Next, redistribute/reassociate the OR across the inner tests.
5464 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5465 if (TREE_CODE (inner1) == SSA_NAME
5466 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5467 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5468 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5469 gimple_assign_rhs1 (s),
5470 gimple_assign_rhs2 (s),
5471 code2, op2a, op2b)))
5473 /* Handle the OR case, where we are reassociating:
5474 (inner1 OR inner2) OR (op2a code2 op2b)
5475 => (t OR inner2)
5476 If the partial result t is a constant, we win. Otherwise
5477 continue on to try reassociating with the other inner test. */
5478 if (is_or)
5480 if (integer_onep (t))
5481 return boolean_true_node;
5482 else if (integer_zerop (t))
5483 return inner2;
5486 /* Handle the AND case, where we are redistributing:
5487 (inner1 AND inner2) OR (op2a code2 op2b)
5488 => (t AND (inner2 OR (op2a code op2b))) */
5489 else if (integer_zerop (t))
5490 return boolean_false_node;
5492 /* Save partial result for later. */
5493 partial = t;
5496 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5497 if (TREE_CODE (inner2) == SSA_NAME
5498 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5499 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5500 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5501 gimple_assign_rhs1 (s),
5502 gimple_assign_rhs2 (s),
5503 code2, op2a, op2b)))
5505 /* Handle the OR case, where we are reassociating:
5506 (inner1 OR inner2) OR (op2a code2 op2b)
5507 => (inner1 OR t)
5508 => (t OR partial) */
5509 if (is_or)
5511 if (integer_zerop (t))
5512 return inner1;
5513 else if (integer_onep (t))
5514 return boolean_true_node;
5515 /* If both are the same, we can apply the identity
5516 (x OR x) == x. */
5517 else if (partial && same_bool_result_p (t, partial))
5518 return t;
5521 /* Handle the AND case, where we are redistributing:
5522 (inner1 AND inner2) OR (op2a code2 op2b)
5523 => (t AND (inner1 OR (op2a code2 op2b)))
5524 => (t AND partial) */
5525 else
5527 if (integer_zerop (t))
5528 return boolean_false_node;
5529 else if (partial)
5531 /* We already got a simplification for the other
5532 operand to the redistributed AND expression. The
5533 interesting case is when at least one is true.
5534 Or, if both are the same, we can apply the identity
5535 (x AND x) == x. */
5536 if (integer_onep (partial))
5537 return t;
5538 else if (integer_onep (t))
5539 return partial;
5540 else if (same_bool_result_p (t, partial))
5541 return t;
5546 return NULL_TREE;
5549 /* Try to simplify the OR of two comparisons defined by
5550 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5551 If this can be done without constructing an intermediate value,
5552 return the resulting tree; otherwise NULL_TREE is returned.
5553 This function is deliberately asymmetric as it recurses on SSA_DEFs
5554 in the first comparison but not the second. */
5556 static tree
5557 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5558 enum tree_code code2, tree op2a, tree op2b)
5560 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5562 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5563 if (operand_equal_p (op1a, op2a, 0)
5564 && operand_equal_p (op1b, op2b, 0))
5566 /* Result will be either NULL_TREE, or a combined comparison. */
5567 tree t = combine_comparisons (UNKNOWN_LOCATION,
5568 TRUTH_ORIF_EXPR, code1, code2,
5569 truth_type, op1a, op1b);
5570 if (t)
5571 return t;
5574 /* Likewise the swapped case of the above. */
5575 if (operand_equal_p (op1a, op2b, 0)
5576 && operand_equal_p (op1b, op2a, 0))
5578 /* Result will be either NULL_TREE, or a combined comparison. */
5579 tree t = combine_comparisons (UNKNOWN_LOCATION,
5580 TRUTH_ORIF_EXPR, code1,
5581 swap_tree_comparison (code2),
5582 truth_type, op1a, op1b);
5583 if (t)
5584 return t;
5587 /* If both comparisons are of the same value against constants, we might
5588 be able to merge them. */
5589 if (operand_equal_p (op1a, op2a, 0)
5590 && TREE_CODE (op1b) == INTEGER_CST
5591 && TREE_CODE (op2b) == INTEGER_CST)
5593 int cmp = tree_int_cst_compare (op1b, op2b);
5595 /* If we have (op1a != op1b), we should either be able to
5596 return that or TRUE, depending on whether the constant op1b
5597 also satisfies the other comparison against op2b. */
5598 if (code1 == NE_EXPR)
5600 bool done = true;
5601 bool val;
5602 switch (code2)
5604 case EQ_EXPR: val = (cmp == 0); break;
5605 case NE_EXPR: val = (cmp != 0); break;
5606 case LT_EXPR: val = (cmp < 0); break;
5607 case GT_EXPR: val = (cmp > 0); break;
5608 case LE_EXPR: val = (cmp <= 0); break;
5609 case GE_EXPR: val = (cmp >= 0); break;
5610 default: done = false;
5612 if (done)
5614 if (val)
5615 return boolean_true_node;
5616 else
5617 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5620 /* Likewise if the second comparison is a != comparison. */
5621 else if (code2 == NE_EXPR)
5623 bool done = true;
5624 bool val;
5625 switch (code1)
5627 case EQ_EXPR: val = (cmp == 0); break;
5628 case NE_EXPR: val = (cmp != 0); break;
5629 case LT_EXPR: val = (cmp > 0); break;
5630 case GT_EXPR: val = (cmp < 0); break;
5631 case LE_EXPR: val = (cmp >= 0); break;
5632 case GE_EXPR: val = (cmp <= 0); break;
5633 default: done = false;
5635 if (done)
5637 if (val)
5638 return boolean_true_node;
5639 else
5640 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5644 /* See if an equality test is redundant with the other comparison. */
5645 else if (code1 == EQ_EXPR)
5647 bool val;
5648 switch (code2)
5650 case EQ_EXPR: val = (cmp == 0); break;
5651 case NE_EXPR: val = (cmp != 0); break;
5652 case LT_EXPR: val = (cmp < 0); break;
5653 case GT_EXPR: val = (cmp > 0); break;
5654 case LE_EXPR: val = (cmp <= 0); break;
5655 case GE_EXPR: val = (cmp >= 0); break;
5656 default:
5657 val = false;
5659 if (val)
5660 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5662 else if (code2 == EQ_EXPR)
5664 bool val;
5665 switch (code1)
5667 case EQ_EXPR: val = (cmp == 0); break;
5668 case NE_EXPR: val = (cmp != 0); break;
5669 case LT_EXPR: val = (cmp > 0); break;
5670 case GT_EXPR: val = (cmp < 0); break;
5671 case LE_EXPR: val = (cmp >= 0); break;
5672 case GE_EXPR: val = (cmp <= 0); break;
5673 default:
5674 val = false;
5676 if (val)
5677 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5680 /* Chose the less restrictive of two < or <= comparisons. */
5681 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5682 && (code2 == LT_EXPR || code2 == LE_EXPR))
5684 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5685 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5686 else
5687 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5690 /* Likewise chose the less restrictive of two > or >= comparisons. */
5691 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5692 && (code2 == GT_EXPR || code2 == GE_EXPR))
5694 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5695 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5696 else
5697 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5700 /* Check for singleton ranges. */
5701 else if (cmp == 0
5702 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5703 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5704 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5706 /* Check for less/greater pairs that don't restrict the range at all. */
5707 else if (cmp >= 0
5708 && (code1 == LT_EXPR || code1 == LE_EXPR)
5709 && (code2 == GT_EXPR || code2 == GE_EXPR))
5710 return boolean_true_node;
5711 else if (cmp <= 0
5712 && (code1 == GT_EXPR || code1 == GE_EXPR)
5713 && (code2 == LT_EXPR || code2 == LE_EXPR))
5714 return boolean_true_node;
5717 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5718 NAME's definition is a truth value. See if there are any simplifications
5719 that can be done against the NAME's definition. */
5720 if (TREE_CODE (op1a) == SSA_NAME
5721 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5722 && (integer_zerop (op1b) || integer_onep (op1b)))
5724 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5725 || (code1 == NE_EXPR && integer_onep (op1b)));
5726 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5727 switch (gimple_code (stmt))
5729 case GIMPLE_ASSIGN:
5730 /* Try to simplify by copy-propagating the definition. */
5731 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5733 case GIMPLE_PHI:
5734 /* If every argument to the PHI produces the same result when
5735 ORed with the second comparison, we win.
5736 Do not do this unless the type is bool since we need a bool
5737 result here anyway. */
5738 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5740 tree result = NULL_TREE;
5741 unsigned i;
5742 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5744 tree arg = gimple_phi_arg_def (stmt, i);
5746 /* If this PHI has itself as an argument, ignore it.
5747 If all the other args produce the same result,
5748 we're still OK. */
5749 if (arg == gimple_phi_result (stmt))
5750 continue;
5751 else if (TREE_CODE (arg) == INTEGER_CST)
5753 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5755 if (!result)
5756 result = boolean_true_node;
5757 else if (!integer_onep (result))
5758 return NULL_TREE;
5760 else if (!result)
5761 result = fold_build2 (code2, boolean_type_node,
5762 op2a, op2b);
5763 else if (!same_bool_comparison_p (result,
5764 code2, op2a, op2b))
5765 return NULL_TREE;
5767 else if (TREE_CODE (arg) == SSA_NAME
5768 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5770 tree temp;
5771 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5772 /* In simple cases we can look through PHI nodes,
5773 but we have to be careful with loops.
5774 See PR49073. */
5775 if (! dom_info_available_p (CDI_DOMINATORS)
5776 || gimple_bb (def_stmt) == gimple_bb (stmt)
5777 || dominated_by_p (CDI_DOMINATORS,
5778 gimple_bb (def_stmt),
5779 gimple_bb (stmt)))
5780 return NULL_TREE;
5781 temp = or_var_with_comparison (arg, invert, code2,
5782 op2a, op2b);
5783 if (!temp)
5784 return NULL_TREE;
5785 else if (!result)
5786 result = temp;
5787 else if (!same_bool_result_p (result, temp))
5788 return NULL_TREE;
5790 else
5791 return NULL_TREE;
5793 return result;
5796 default:
5797 break;
5800 return NULL_TREE;
5803 /* Try to simplify the OR of two comparisons, specified by
5804 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5805 If this can be simplified to a single expression (without requiring
5806 introducing more SSA variables to hold intermediate values),
5807 return the resulting tree. Otherwise return NULL_TREE.
5808 If the result expression is non-null, it has boolean type. */
5810 tree
5811 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5812 enum tree_code code2, tree op2a, tree op2b)
5814 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5815 if (t)
5816 return t;
5817 else
5818 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5822 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5824 Either NULL_TREE, a simplified but non-constant or a constant
5825 is returned.
5827 ??? This should go into a gimple-fold-inline.h file to be eventually
5828 privatized with the single valueize function used in the various TUs
5829 to avoid the indirect function call overhead. */
5831 tree
5832 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
5833 tree (*gvalueize) (tree))
5835 code_helper rcode;
5836 tree ops[3] = {};
5837 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5838 edges if there are intermediate VARYING defs. For this reason
5839 do not follow SSA edges here even though SCCVN can technically
5840 just deal fine with that. */
5841 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
5843 tree res = NULL_TREE;
5844 if (gimple_simplified_result_is_gimple_val (rcode, ops))
5845 res = ops[0];
5846 else if (mprts_hook)
5847 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5848 if (res)
5850 if (dump_file && dump_flags & TDF_DETAILS)
5852 fprintf (dump_file, "Match-and-simplified ");
5853 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5854 fprintf (dump_file, " to ");
5855 print_generic_expr (dump_file, res);
5856 fprintf (dump_file, "\n");
5858 return res;
5862 location_t loc = gimple_location (stmt);
5863 switch (gimple_code (stmt))
5865 case GIMPLE_ASSIGN:
5867 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5869 switch (get_gimple_rhs_class (subcode))
5871 case GIMPLE_SINGLE_RHS:
5873 tree rhs = gimple_assign_rhs1 (stmt);
5874 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5876 if (TREE_CODE (rhs) == SSA_NAME)
5878 /* If the RHS is an SSA_NAME, return its known constant value,
5879 if any. */
5880 return (*valueize) (rhs);
5882 /* Handle propagating invariant addresses into address
5883 operations. */
5884 else if (TREE_CODE (rhs) == ADDR_EXPR
5885 && !is_gimple_min_invariant (rhs))
5887 HOST_WIDE_INT offset = 0;
5888 tree base;
5889 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5890 &offset,
5891 valueize);
5892 if (base
5893 && (CONSTANT_CLASS_P (base)
5894 || decl_address_invariant_p (base)))
5895 return build_invariant_address (TREE_TYPE (rhs),
5896 base, offset);
5898 else if (TREE_CODE (rhs) == CONSTRUCTOR
5899 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5900 && (CONSTRUCTOR_NELTS (rhs)
5901 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5903 unsigned i;
5904 tree val, *vec;
5906 vec = XALLOCAVEC (tree,
5907 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5908 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5910 val = (*valueize) (val);
5911 if (TREE_CODE (val) == INTEGER_CST
5912 || TREE_CODE (val) == REAL_CST
5913 || TREE_CODE (val) == FIXED_CST)
5914 vec[i] = val;
5915 else
5916 return NULL_TREE;
5919 return build_vector (TREE_TYPE (rhs), vec);
5921 if (subcode == OBJ_TYPE_REF)
5923 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5924 /* If callee is constant, we can fold away the wrapper. */
5925 if (is_gimple_min_invariant (val))
5926 return val;
5929 if (kind == tcc_reference)
5931 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5932 || TREE_CODE (rhs) == REALPART_EXPR
5933 || TREE_CODE (rhs) == IMAGPART_EXPR)
5934 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5936 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5937 return fold_unary_loc (EXPR_LOCATION (rhs),
5938 TREE_CODE (rhs),
5939 TREE_TYPE (rhs), val);
5941 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5942 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5944 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5945 return fold_ternary_loc (EXPR_LOCATION (rhs),
5946 TREE_CODE (rhs),
5947 TREE_TYPE (rhs), val,
5948 TREE_OPERAND (rhs, 1),
5949 TREE_OPERAND (rhs, 2));
5951 else if (TREE_CODE (rhs) == MEM_REF
5952 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5954 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5955 if (TREE_CODE (val) == ADDR_EXPR
5956 && is_gimple_min_invariant (val))
5958 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5959 unshare_expr (val),
5960 TREE_OPERAND (rhs, 1));
5961 if (tem)
5962 rhs = tem;
5965 return fold_const_aggregate_ref_1 (rhs, valueize);
5967 else if (kind == tcc_declaration)
5968 return get_symbol_constant_value (rhs);
5969 return rhs;
5972 case GIMPLE_UNARY_RHS:
5973 return NULL_TREE;
5975 case GIMPLE_BINARY_RHS:
5976 /* Translate &x + CST into an invariant form suitable for
5977 further propagation. */
5978 if (subcode == POINTER_PLUS_EXPR)
5980 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5981 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5982 if (TREE_CODE (op0) == ADDR_EXPR
5983 && TREE_CODE (op1) == INTEGER_CST)
5985 tree off = fold_convert (ptr_type_node, op1);
5986 return build_fold_addr_expr_loc
5987 (loc,
5988 fold_build2 (MEM_REF,
5989 TREE_TYPE (TREE_TYPE (op0)),
5990 unshare_expr (op0), off));
5993 /* Canonicalize bool != 0 and bool == 0 appearing after
5994 valueization. While gimple_simplify handles this
5995 it can get confused by the ~X == 1 -> X == 0 transform
5996 which we cant reduce to a SSA name or a constant
5997 (and we have no way to tell gimple_simplify to not
5998 consider those transforms in the first place). */
5999 else if (subcode == EQ_EXPR
6000 || subcode == NE_EXPR)
6002 tree lhs = gimple_assign_lhs (stmt);
6003 tree op0 = gimple_assign_rhs1 (stmt);
6004 if (useless_type_conversion_p (TREE_TYPE (lhs),
6005 TREE_TYPE (op0)))
6007 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6008 op0 = (*valueize) (op0);
6009 if (TREE_CODE (op0) == INTEGER_CST)
6010 std::swap (op0, op1);
6011 if (TREE_CODE (op1) == INTEGER_CST
6012 && ((subcode == NE_EXPR && integer_zerop (op1))
6013 || (subcode == EQ_EXPR && integer_onep (op1))))
6014 return op0;
6017 return NULL_TREE;
6019 case GIMPLE_TERNARY_RHS:
6021 /* Handle ternary operators that can appear in GIMPLE form. */
6022 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6023 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6024 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6025 return fold_ternary_loc (loc, subcode,
6026 gimple_expr_type (stmt), op0, op1, op2);
6029 default:
6030 gcc_unreachable ();
6034 case GIMPLE_CALL:
6036 tree fn;
6037 gcall *call_stmt = as_a <gcall *> (stmt);
6039 if (gimple_call_internal_p (stmt))
6041 enum tree_code subcode = ERROR_MARK;
6042 switch (gimple_call_internal_fn (stmt))
6044 case IFN_UBSAN_CHECK_ADD:
6045 subcode = PLUS_EXPR;
6046 break;
6047 case IFN_UBSAN_CHECK_SUB:
6048 subcode = MINUS_EXPR;
6049 break;
6050 case IFN_UBSAN_CHECK_MUL:
6051 subcode = MULT_EXPR;
6052 break;
6053 case IFN_BUILTIN_EXPECT:
6055 tree arg0 = gimple_call_arg (stmt, 0);
6056 tree op0 = (*valueize) (arg0);
6057 if (TREE_CODE (op0) == INTEGER_CST)
6058 return op0;
6059 return NULL_TREE;
6061 default:
6062 return NULL_TREE;
6064 tree arg0 = gimple_call_arg (stmt, 0);
6065 tree arg1 = gimple_call_arg (stmt, 1);
6066 tree op0 = (*valueize) (arg0);
6067 tree op1 = (*valueize) (arg1);
6069 if (TREE_CODE (op0) != INTEGER_CST
6070 || TREE_CODE (op1) != INTEGER_CST)
6072 switch (subcode)
6074 case MULT_EXPR:
6075 /* x * 0 = 0 * x = 0 without overflow. */
6076 if (integer_zerop (op0) || integer_zerop (op1))
6077 return build_zero_cst (TREE_TYPE (arg0));
6078 break;
6079 case MINUS_EXPR:
6080 /* y - y = 0 without overflow. */
6081 if (operand_equal_p (op0, op1, 0))
6082 return build_zero_cst (TREE_TYPE (arg0));
6083 break;
6084 default:
6085 break;
6088 tree res
6089 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6090 if (res
6091 && TREE_CODE (res) == INTEGER_CST
6092 && !TREE_OVERFLOW (res))
6093 return res;
6094 return NULL_TREE;
6097 fn = (*valueize) (gimple_call_fn (stmt));
6098 if (TREE_CODE (fn) == ADDR_EXPR
6099 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6100 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6101 && gimple_builtin_call_types_compatible_p (stmt,
6102 TREE_OPERAND (fn, 0)))
6104 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6105 tree retval;
6106 unsigned i;
6107 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6108 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6109 retval = fold_builtin_call_array (loc,
6110 gimple_call_return_type (call_stmt),
6111 fn, gimple_call_num_args (stmt), args);
6112 if (retval)
6114 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6115 STRIP_NOPS (retval);
6116 retval = fold_convert (gimple_call_return_type (call_stmt),
6117 retval);
6119 return retval;
6121 return NULL_TREE;
6124 default:
6125 return NULL_TREE;
6129 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6130 Returns NULL_TREE if folding to a constant is not possible, otherwise
6131 returns a constant according to is_gimple_min_invariant. */
6133 tree
6134 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6136 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6137 if (res && is_gimple_min_invariant (res))
6138 return res;
6139 return NULL_TREE;
6143 /* The following set of functions are supposed to fold references using
6144 their constant initializers. */
6146 /* See if we can find constructor defining value of BASE.
6147 When we know the consructor with constant offset (such as
6148 base is array[40] and we do know constructor of array), then
6149 BIT_OFFSET is adjusted accordingly.
6151 As a special case, return error_mark_node when constructor
6152 is not explicitly available, but it is known to be zero
6153 such as 'static const int a;'. */
6154 static tree
6155 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
6156 tree (*valueize)(tree))
6158 HOST_WIDE_INT bit_offset2, size, max_size;
6159 bool reverse;
6161 if (TREE_CODE (base) == MEM_REF)
6163 if (!integer_zerop (TREE_OPERAND (base, 1)))
6165 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
6166 return NULL_TREE;
6167 *bit_offset += (mem_ref_offset (base).to_short_addr ()
6168 * BITS_PER_UNIT);
6171 if (valueize
6172 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6173 base = valueize (TREE_OPERAND (base, 0));
6174 if (!base || TREE_CODE (base) != ADDR_EXPR)
6175 return NULL_TREE;
6176 base = TREE_OPERAND (base, 0);
6178 else if (valueize
6179 && TREE_CODE (base) == SSA_NAME)
6180 base = valueize (base);
6182 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6183 DECL_INITIAL. If BASE is a nested reference into another
6184 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6185 the inner reference. */
6186 switch (TREE_CODE (base))
6188 case VAR_DECL:
6189 case CONST_DECL:
6191 tree init = ctor_for_folding (base);
6193 /* Our semantic is exact opposite of ctor_for_folding;
6194 NULL means unknown, while error_mark_node is 0. */
6195 if (init == error_mark_node)
6196 return NULL_TREE;
6197 if (!init)
6198 return error_mark_node;
6199 return init;
6202 case VIEW_CONVERT_EXPR:
6203 return get_base_constructor (TREE_OPERAND (base, 0),
6204 bit_offset, valueize);
6206 case ARRAY_REF:
6207 case COMPONENT_REF:
6208 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6209 &reverse);
6210 if (max_size == -1 || size != max_size)
6211 return NULL_TREE;
6212 *bit_offset += bit_offset2;
6213 return get_base_constructor (base, bit_offset, valueize);
6215 case CONSTRUCTOR:
6216 return base;
6218 default:
6219 if (CONSTANT_CLASS_P (base))
6220 return base;
6222 return NULL_TREE;
6226 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6227 SIZE to the memory at bit OFFSET. */
6229 static tree
6230 fold_array_ctor_reference (tree type, tree ctor,
6231 unsigned HOST_WIDE_INT offset,
6232 unsigned HOST_WIDE_INT size,
6233 tree from_decl)
6235 offset_int low_bound;
6236 offset_int elt_size;
6237 offset_int access_index;
6238 tree domain_type = NULL_TREE;
6239 HOST_WIDE_INT inner_offset;
6241 /* Compute low bound and elt size. */
6242 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6243 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6244 if (domain_type && TYPE_MIN_VALUE (domain_type))
6246 /* Static constructors for variably sized objects makes no sense. */
6247 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6248 return NULL_TREE;
6249 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6251 else
6252 low_bound = 0;
6253 /* Static constructors for variably sized objects makes no sense. */
6254 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6255 return NULL_TREE;
6256 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6258 /* We can handle only constantly sized accesses that are known to not
6259 be larger than size of array element. */
6260 if (!TYPE_SIZE_UNIT (type)
6261 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6262 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6263 || elt_size == 0)
6264 return NULL_TREE;
6266 /* Compute the array index we look for. */
6267 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6268 elt_size);
6269 access_index += low_bound;
6271 /* And offset within the access. */
6272 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6274 /* See if the array field is large enough to span whole access. We do not
6275 care to fold accesses spanning multiple array indexes. */
6276 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6277 return NULL_TREE;
6278 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6279 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
6281 /* When memory is not explicitely mentioned in constructor,
6282 it is 0 (or out of range). */
6283 return build_zero_cst (type);
6286 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6287 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6289 static tree
6290 fold_nonarray_ctor_reference (tree type, tree ctor,
6291 unsigned HOST_WIDE_INT offset,
6292 unsigned HOST_WIDE_INT size,
6293 tree from_decl)
6295 unsigned HOST_WIDE_INT cnt;
6296 tree cfield, cval;
6298 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6299 cval)
6301 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6302 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6303 tree field_size = DECL_SIZE (cfield);
6304 offset_int bitoffset;
6305 offset_int bitoffset_end, access_end;
6307 /* Variable sized objects in static constructors makes no sense,
6308 but field_size can be NULL for flexible array members. */
6309 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6310 && TREE_CODE (byte_offset) == INTEGER_CST
6311 && (field_size != NULL_TREE
6312 ? TREE_CODE (field_size) == INTEGER_CST
6313 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6315 /* Compute bit offset of the field. */
6316 bitoffset = (wi::to_offset (field_offset)
6317 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6318 /* Compute bit offset where the field ends. */
6319 if (field_size != NULL_TREE)
6320 bitoffset_end = bitoffset + wi::to_offset (field_size);
6321 else
6322 bitoffset_end = 0;
6324 access_end = offset_int (offset) + size;
6326 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6327 [BITOFFSET, BITOFFSET_END)? */
6328 if (wi::cmps (access_end, bitoffset) > 0
6329 && (field_size == NULL_TREE
6330 || wi::lts_p (offset, bitoffset_end)))
6332 offset_int inner_offset = offset_int (offset) - bitoffset;
6333 /* We do have overlap. Now see if field is large enough to
6334 cover the access. Give up for accesses spanning multiple
6335 fields. */
6336 if (wi::cmps (access_end, bitoffset_end) > 0)
6337 return NULL_TREE;
6338 if (offset < bitoffset)
6339 return NULL_TREE;
6340 return fold_ctor_reference (type, cval,
6341 inner_offset.to_uhwi (), size,
6342 from_decl);
6345 /* When memory is not explicitely mentioned in constructor, it is 0. */
6346 return build_zero_cst (type);
6349 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
6350 to the memory at bit OFFSET. */
6352 tree
6353 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
6354 unsigned HOST_WIDE_INT size, tree from_decl)
6356 tree ret;
6358 /* We found the field with exact match. */
6359 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6360 && !offset)
6361 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6363 /* We are at the end of walk, see if we can view convert the
6364 result. */
6365 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6366 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6367 && !compare_tree_int (TYPE_SIZE (type), size)
6368 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6370 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6371 if (ret)
6373 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6374 if (ret)
6375 STRIP_USELESS_TYPE_CONVERSION (ret);
6377 return ret;
6379 /* For constants and byte-aligned/sized reads try to go through
6380 native_encode/interpret. */
6381 if (CONSTANT_CLASS_P (ctor)
6382 && BITS_PER_UNIT == 8
6383 && offset % BITS_PER_UNIT == 0
6384 && size % BITS_PER_UNIT == 0
6385 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6387 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6388 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6389 offset / BITS_PER_UNIT);
6390 if (len > 0)
6391 return native_interpret_expr (type, buf, len);
6393 if (TREE_CODE (ctor) == CONSTRUCTOR)
6396 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6397 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6398 return fold_array_ctor_reference (type, ctor, offset, size,
6399 from_decl);
6400 else
6401 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6402 from_decl);
6405 return NULL_TREE;
6408 /* Return the tree representing the element referenced by T if T is an
6409 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6410 names using VALUEIZE. Return NULL_TREE otherwise. */
6412 tree
6413 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6415 tree ctor, idx, base;
6416 HOST_WIDE_INT offset, size, max_size;
6417 tree tem;
6418 bool reverse;
6420 if (TREE_THIS_VOLATILE (t))
6421 return NULL_TREE;
6423 if (DECL_P (t))
6424 return get_symbol_constant_value (t);
6426 tem = fold_read_from_constant_string (t);
6427 if (tem)
6428 return tem;
6430 switch (TREE_CODE (t))
6432 case ARRAY_REF:
6433 case ARRAY_RANGE_REF:
6434 /* Constant indexes are handled well by get_base_constructor.
6435 Only special case variable offsets.
6436 FIXME: This code can't handle nested references with variable indexes
6437 (they will be handled only by iteration of ccp). Perhaps we can bring
6438 get_ref_base_and_extent here and make it use a valueize callback. */
6439 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6440 && valueize
6441 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6442 && TREE_CODE (idx) == INTEGER_CST)
6444 tree low_bound, unit_size;
6446 /* If the resulting bit-offset is constant, track it. */
6447 if ((low_bound = array_ref_low_bound (t),
6448 TREE_CODE (low_bound) == INTEGER_CST)
6449 && (unit_size = array_ref_element_size (t),
6450 tree_fits_uhwi_p (unit_size)))
6452 offset_int woffset
6453 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
6454 TYPE_PRECISION (TREE_TYPE (idx)));
6456 if (wi::fits_shwi_p (woffset))
6458 offset = woffset.to_shwi ();
6459 /* TODO: This code seems wrong, multiply then check
6460 to see if it fits. */
6461 offset *= tree_to_uhwi (unit_size);
6462 offset *= BITS_PER_UNIT;
6464 base = TREE_OPERAND (t, 0);
6465 ctor = get_base_constructor (base, &offset, valueize);
6466 /* Empty constructor. Always fold to 0. */
6467 if (ctor == error_mark_node)
6468 return build_zero_cst (TREE_TYPE (t));
6469 /* Out of bound array access. Value is undefined,
6470 but don't fold. */
6471 if (offset < 0)
6472 return NULL_TREE;
6473 /* We can not determine ctor. */
6474 if (!ctor)
6475 return NULL_TREE;
6476 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6477 tree_to_uhwi (unit_size)
6478 * BITS_PER_UNIT,
6479 base);
6483 /* Fallthru. */
6485 case COMPONENT_REF:
6486 case BIT_FIELD_REF:
6487 case TARGET_MEM_REF:
6488 case MEM_REF:
6489 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6490 ctor = get_base_constructor (base, &offset, valueize);
6492 /* Empty constructor. Always fold to 0. */
6493 if (ctor == error_mark_node)
6494 return build_zero_cst (TREE_TYPE (t));
6495 /* We do not know precise address. */
6496 if (max_size == -1 || max_size != size)
6497 return NULL_TREE;
6498 /* We can not determine ctor. */
6499 if (!ctor)
6500 return NULL_TREE;
6502 /* Out of bound array access. Value is undefined, but don't fold. */
6503 if (offset < 0)
6504 return NULL_TREE;
6506 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6507 base);
6509 case REALPART_EXPR:
6510 case IMAGPART_EXPR:
6512 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6513 if (c && TREE_CODE (c) == COMPLEX_CST)
6514 return fold_build1_loc (EXPR_LOCATION (t),
6515 TREE_CODE (t), TREE_TYPE (t), c);
6516 break;
6519 default:
6520 break;
6523 return NULL_TREE;
6526 tree
6527 fold_const_aggregate_ref (tree t)
6529 return fold_const_aggregate_ref_1 (t, NULL);
6532 /* Lookup virtual method with index TOKEN in a virtual table V
6533 at OFFSET.
6534 Set CAN_REFER if non-NULL to false if method
6535 is not referable or if the virtual table is ill-formed (such as rewriten
6536 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6538 tree
6539 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6540 tree v,
6541 unsigned HOST_WIDE_INT offset,
6542 bool *can_refer)
6544 tree vtable = v, init, fn;
6545 unsigned HOST_WIDE_INT size;
6546 unsigned HOST_WIDE_INT elt_size, access_index;
6547 tree domain_type;
6549 if (can_refer)
6550 *can_refer = true;
6552 /* First of all double check we have virtual table. */
6553 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6555 /* Pass down that we lost track of the target. */
6556 if (can_refer)
6557 *can_refer = false;
6558 return NULL_TREE;
6561 init = ctor_for_folding (v);
6563 /* The virtual tables should always be born with constructors
6564 and we always should assume that they are avaialble for
6565 folding. At the moment we do not stream them in all cases,
6566 but it should never happen that ctor seem unreachable. */
6567 gcc_assert (init);
6568 if (init == error_mark_node)
6570 gcc_assert (in_lto_p);
6571 /* Pass down that we lost track of the target. */
6572 if (can_refer)
6573 *can_refer = false;
6574 return NULL_TREE;
6576 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6577 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6578 offset *= BITS_PER_UNIT;
6579 offset += token * size;
6581 /* Lookup the value in the constructor that is assumed to be array.
6582 This is equivalent to
6583 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6584 offset, size, NULL);
6585 but in a constant time. We expect that frontend produced a simple
6586 array without indexed initializers. */
6588 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6589 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6590 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6591 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6593 access_index = offset / BITS_PER_UNIT / elt_size;
6594 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6596 /* This code makes an assumption that there are no
6597 indexed fileds produced by C++ FE, so we can directly index the array. */
6598 if (access_index < CONSTRUCTOR_NELTS (init))
6600 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6601 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6602 STRIP_NOPS (fn);
6604 else
6605 fn = NULL;
6607 /* For type inconsistent program we may end up looking up virtual method
6608 in virtual table that does not contain TOKEN entries. We may overrun
6609 the virtual table and pick up a constant or RTTI info pointer.
6610 In any case the call is undefined. */
6611 if (!fn
6612 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6613 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6614 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6615 else
6617 fn = TREE_OPERAND (fn, 0);
6619 /* When cgraph node is missing and function is not public, we cannot
6620 devirtualize. This can happen in WHOPR when the actual method
6621 ends up in other partition, because we found devirtualization
6622 possibility too late. */
6623 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6625 if (can_refer)
6627 *can_refer = false;
6628 return fn;
6630 return NULL_TREE;
6634 /* Make sure we create a cgraph node for functions we'll reference.
6635 They can be non-existent if the reference comes from an entry
6636 of an external vtable for example. */
6637 cgraph_node::get_create (fn);
6639 return fn;
6642 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6643 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6644 KNOWN_BINFO carries the binfo describing the true type of
6645 OBJ_TYPE_REF_OBJECT(REF).
6646 Set CAN_REFER if non-NULL to false if method
6647 is not referable or if the virtual table is ill-formed (such as rewriten
6648 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6650 tree
6651 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6652 bool *can_refer)
6654 unsigned HOST_WIDE_INT offset;
6655 tree v;
6657 v = BINFO_VTABLE (known_binfo);
6658 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6659 if (!v)
6660 return NULL_TREE;
6662 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6664 if (can_refer)
6665 *can_refer = false;
6666 return NULL_TREE;
6668 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6671 /* Given a pointer value T, return a simplified version of an
6672 indirection through T, or NULL_TREE if no simplification is
6673 possible. Note that the resulting type may be different from
6674 the type pointed to in the sense that it is still compatible
6675 from the langhooks point of view. */
6677 tree
6678 gimple_fold_indirect_ref (tree t)
6680 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6681 tree sub = t;
6682 tree subtype;
6684 STRIP_NOPS (sub);
6685 subtype = TREE_TYPE (sub);
6686 if (!POINTER_TYPE_P (subtype)
6687 || TYPE_REF_CAN_ALIAS_ALL (ptype))
6688 return NULL_TREE;
6690 if (TREE_CODE (sub) == ADDR_EXPR)
6692 tree op = TREE_OPERAND (sub, 0);
6693 tree optype = TREE_TYPE (op);
6694 /* *&p => p */
6695 if (useless_type_conversion_p (type, optype))
6696 return op;
6698 /* *(foo *)&fooarray => fooarray[0] */
6699 if (TREE_CODE (optype) == ARRAY_TYPE
6700 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6701 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6703 tree type_domain = TYPE_DOMAIN (optype);
6704 tree min_val = size_zero_node;
6705 if (type_domain && TYPE_MIN_VALUE (type_domain))
6706 min_val = TYPE_MIN_VALUE (type_domain);
6707 if (TREE_CODE (min_val) == INTEGER_CST)
6708 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6710 /* *(foo *)&complexfoo => __real__ complexfoo */
6711 else if (TREE_CODE (optype) == COMPLEX_TYPE
6712 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6713 return fold_build1 (REALPART_EXPR, type, op);
6714 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6715 else if (TREE_CODE (optype) == VECTOR_TYPE
6716 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6718 tree part_width = TYPE_SIZE (type);
6719 tree index = bitsize_int (0);
6720 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6724 /* *(p + CST) -> ... */
6725 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6726 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6728 tree addr = TREE_OPERAND (sub, 0);
6729 tree off = TREE_OPERAND (sub, 1);
6730 tree addrtype;
6732 STRIP_NOPS (addr);
6733 addrtype = TREE_TYPE (addr);
6735 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6736 if (TREE_CODE (addr) == ADDR_EXPR
6737 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6738 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
6739 && tree_fits_uhwi_p (off))
6741 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
6742 tree part_width = TYPE_SIZE (type);
6743 unsigned HOST_WIDE_INT part_widthi
6744 = tree_to_shwi (part_width) / BITS_PER_UNIT;
6745 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6746 tree index = bitsize_int (indexi);
6747 if (offset / part_widthi
6748 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
6749 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6750 part_width, index);
6753 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6754 if (TREE_CODE (addr) == ADDR_EXPR
6755 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6756 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6758 tree size = TYPE_SIZE_UNIT (type);
6759 if (tree_int_cst_equal (size, off))
6760 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6763 /* *(p + CST) -> MEM_REF <p, CST>. */
6764 if (TREE_CODE (addr) != ADDR_EXPR
6765 || DECL_P (TREE_OPERAND (addr, 0)))
6766 return fold_build2 (MEM_REF, type,
6767 addr,
6768 wide_int_to_tree (ptype, off));
6771 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6772 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6773 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6774 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6776 tree type_domain;
6777 tree min_val = size_zero_node;
6778 tree osub = sub;
6779 sub = gimple_fold_indirect_ref (sub);
6780 if (! sub)
6781 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6782 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6783 if (type_domain && TYPE_MIN_VALUE (type_domain))
6784 min_val = TYPE_MIN_VALUE (type_domain);
6785 if (TREE_CODE (min_val) == INTEGER_CST)
6786 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6789 return NULL_TREE;
6792 /* Return true if CODE is an operation that when operating on signed
6793 integer types involves undefined behavior on overflow and the
6794 operation can be expressed with unsigned arithmetic. */
6796 bool
6797 arith_code_with_undefined_signed_overflow (tree_code code)
6799 switch (code)
6801 case PLUS_EXPR:
6802 case MINUS_EXPR:
6803 case MULT_EXPR:
6804 case NEGATE_EXPR:
6805 case POINTER_PLUS_EXPR:
6806 return true;
6807 default:
6808 return false;
6812 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6813 operation that can be transformed to unsigned arithmetic by converting
6814 its operand, carrying out the operation in the corresponding unsigned
6815 type and converting the result back to the original type.
6817 Returns a sequence of statements that replace STMT and also contain
6818 a modified form of STMT itself. */
6820 gimple_seq
6821 rewrite_to_defined_overflow (gimple *stmt)
6823 if (dump_file && (dump_flags & TDF_DETAILS))
6825 fprintf (dump_file, "rewriting stmt with undefined signed "
6826 "overflow ");
6827 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6830 tree lhs = gimple_assign_lhs (stmt);
6831 tree type = unsigned_type_for (TREE_TYPE (lhs));
6832 gimple_seq stmts = NULL;
6833 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6835 tree op = gimple_op (stmt, i);
6836 op = gimple_convert (&stmts, type, op);
6837 gimple_set_op (stmt, i, op);
6839 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6840 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6841 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6842 gimple_seq_add_stmt (&stmts, stmt);
6843 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6844 gimple_seq_add_stmt (&stmts, cvt);
6846 return stmts;
6850 /* The valueization hook we use for the gimple_build API simplification.
6851 This makes us match fold_buildN behavior by only combining with
6852 statements in the sequence(s) we are currently building. */
6854 static tree
6855 gimple_build_valueize (tree op)
6857 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6858 return op;
6859 return NULL_TREE;
6862 /* Build the expression CODE OP0 of type TYPE with location LOC,
6863 simplifying it first if possible. Returns the built
6864 expression value and appends statements possibly defining it
6865 to SEQ. */
6867 tree
6868 gimple_build (gimple_seq *seq, location_t loc,
6869 enum tree_code code, tree type, tree op0)
6871 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
6872 if (!res)
6874 res = create_tmp_reg_or_ssa_name (type);
6875 gimple *stmt;
6876 if (code == REALPART_EXPR
6877 || code == IMAGPART_EXPR
6878 || code == VIEW_CONVERT_EXPR)
6879 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6880 else
6881 stmt = gimple_build_assign (res, code, op0);
6882 gimple_set_location (stmt, loc);
6883 gimple_seq_add_stmt_without_update (seq, stmt);
6885 return res;
6888 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6889 simplifying it first if possible. Returns the built
6890 expression value and appends statements possibly defining it
6891 to SEQ. */
6893 tree
6894 gimple_build (gimple_seq *seq, location_t loc,
6895 enum tree_code code, tree type, tree op0, tree op1)
6897 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6898 if (!res)
6900 res = create_tmp_reg_or_ssa_name (type);
6901 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6902 gimple_set_location (stmt, loc);
6903 gimple_seq_add_stmt_without_update (seq, stmt);
6905 return res;
6908 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6909 simplifying it first if possible. Returns the built
6910 expression value and appends statements possibly defining it
6911 to SEQ. */
6913 tree
6914 gimple_build (gimple_seq *seq, location_t loc,
6915 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6917 tree res = gimple_simplify (code, type, op0, op1, op2,
6918 seq, gimple_build_valueize);
6919 if (!res)
6921 res = create_tmp_reg_or_ssa_name (type);
6922 gimple *stmt;
6923 if (code == BIT_FIELD_REF)
6924 stmt = gimple_build_assign (res, code,
6925 build3 (code, type, op0, op1, op2));
6926 else
6927 stmt = gimple_build_assign (res, code, op0, op1, op2);
6928 gimple_set_location (stmt, loc);
6929 gimple_seq_add_stmt_without_update (seq, stmt);
6931 return res;
6934 /* Build the call FN (ARG0) with a result of type TYPE
6935 (or no result if TYPE is void) with location LOC,
6936 simplifying it first if possible. Returns the built
6937 expression value (or NULL_TREE if TYPE is void) and appends
6938 statements possibly defining it to SEQ. */
6940 tree
6941 gimple_build (gimple_seq *seq, location_t loc,
6942 enum built_in_function fn, tree type, tree arg0)
6944 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6945 if (!res)
6947 tree decl = builtin_decl_implicit (fn);
6948 gimple *stmt = gimple_build_call (decl, 1, arg0);
6949 if (!VOID_TYPE_P (type))
6951 res = create_tmp_reg_or_ssa_name (type);
6952 gimple_call_set_lhs (stmt, res);
6954 gimple_set_location (stmt, loc);
6955 gimple_seq_add_stmt_without_update (seq, stmt);
6957 return res;
6960 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6961 (or no result if TYPE is void) with location LOC,
6962 simplifying it first if possible. Returns the built
6963 expression value (or NULL_TREE if TYPE is void) and appends
6964 statements possibly defining it to SEQ. */
6966 tree
6967 gimple_build (gimple_seq *seq, location_t loc,
6968 enum built_in_function fn, tree type, tree arg0, tree arg1)
6970 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6971 if (!res)
6973 tree decl = builtin_decl_implicit (fn);
6974 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6975 if (!VOID_TYPE_P (type))
6977 res = create_tmp_reg_or_ssa_name (type);
6978 gimple_call_set_lhs (stmt, res);
6980 gimple_set_location (stmt, loc);
6981 gimple_seq_add_stmt_without_update (seq, stmt);
6983 return res;
6986 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6987 (or no result if TYPE is void) with location LOC,
6988 simplifying it first if possible. Returns the built
6989 expression value (or NULL_TREE if TYPE is void) and appends
6990 statements possibly defining it to SEQ. */
6992 tree
6993 gimple_build (gimple_seq *seq, location_t loc,
6994 enum built_in_function fn, tree type,
6995 tree arg0, tree arg1, tree arg2)
6997 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6998 seq, gimple_build_valueize);
6999 if (!res)
7001 tree decl = builtin_decl_implicit (fn);
7002 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7003 if (!VOID_TYPE_P (type))
7005 res = create_tmp_reg_or_ssa_name (type);
7006 gimple_call_set_lhs (stmt, res);
7008 gimple_set_location (stmt, loc);
7009 gimple_seq_add_stmt_without_update (seq, stmt);
7011 return res;
7014 /* Build the conversion (TYPE) OP with a result of type TYPE
7015 with location LOC if such conversion is neccesary in GIMPLE,
7016 simplifying it first.
7017 Returns the built expression value and appends
7018 statements possibly defining it to SEQ. */
7020 tree
7021 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7023 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7024 return op;
7025 return gimple_build (seq, loc, NOP_EXPR, type, op);
7028 /* Build the conversion (ptrofftype) OP with a result of a type
7029 compatible with ptrofftype with location LOC if such conversion
7030 is neccesary in GIMPLE, simplifying it first.
7031 Returns the built expression value and appends
7032 statements possibly defining it to SEQ. */
7034 tree
7035 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7037 if (ptrofftype_p (TREE_TYPE (op)))
7038 return op;
7039 return gimple_convert (seq, loc, sizetype, op);
7042 /* Return true if the result of assignment STMT is known to be non-negative.
7043 If the return value is based on the assumption that signed overflow is
7044 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7045 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7047 static bool
7048 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7049 int depth)
7051 enum tree_code code = gimple_assign_rhs_code (stmt);
7052 switch (get_gimple_rhs_class (code))
7054 case GIMPLE_UNARY_RHS:
7055 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7056 gimple_expr_type (stmt),
7057 gimple_assign_rhs1 (stmt),
7058 strict_overflow_p, depth);
7059 case GIMPLE_BINARY_RHS:
7060 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7061 gimple_expr_type (stmt),
7062 gimple_assign_rhs1 (stmt),
7063 gimple_assign_rhs2 (stmt),
7064 strict_overflow_p, depth);
7065 case GIMPLE_TERNARY_RHS:
7066 return false;
7067 case GIMPLE_SINGLE_RHS:
7068 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7069 strict_overflow_p, depth);
7070 case GIMPLE_INVALID_RHS:
7071 break;
7073 gcc_unreachable ();
7076 /* Return true if return value of call STMT is known to be non-negative.
7077 If the return value is based on the assumption that signed overflow is
7078 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7079 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7081 static bool
7082 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7083 int depth)
7085 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7086 gimple_call_arg (stmt, 0) : NULL_TREE;
7087 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7088 gimple_call_arg (stmt, 1) : NULL_TREE;
7090 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7091 gimple_call_combined_fn (stmt),
7092 arg0,
7093 arg1,
7094 strict_overflow_p, depth);
7097 /* Return true if return value of call STMT is known to be non-negative.
7098 If the return value is based on the assumption that signed overflow is
7099 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7100 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7102 static bool
7103 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7104 int depth)
7106 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7108 tree arg = gimple_phi_arg_def (stmt, i);
7109 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7110 return false;
7112 return true;
7115 /* Return true if STMT is known to compute a non-negative value.
7116 If the return value is based on the assumption that signed overflow is
7117 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7118 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7120 bool
7121 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7122 int depth)
7124 switch (gimple_code (stmt))
7126 case GIMPLE_ASSIGN:
7127 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7128 depth);
7129 case GIMPLE_CALL:
7130 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7131 depth);
7132 case GIMPLE_PHI:
7133 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7134 depth);
7135 default:
7136 return false;
7140 /* Return true if the floating-point value computed by assignment STMT
7141 is known to have an integer value. We also allow +Inf, -Inf and NaN
7142 to be considered integer values. Return false for signaling NaN.
7144 DEPTH is the current nesting depth of the query. */
7146 static bool
7147 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7149 enum tree_code code = gimple_assign_rhs_code (stmt);
7150 switch (get_gimple_rhs_class (code))
7152 case GIMPLE_UNARY_RHS:
7153 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7154 gimple_assign_rhs1 (stmt), depth);
7155 case GIMPLE_BINARY_RHS:
7156 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7157 gimple_assign_rhs1 (stmt),
7158 gimple_assign_rhs2 (stmt), depth);
7159 case GIMPLE_TERNARY_RHS:
7160 return false;
7161 case GIMPLE_SINGLE_RHS:
7162 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7163 case GIMPLE_INVALID_RHS:
7164 break;
7166 gcc_unreachable ();
7169 /* Return true if the floating-point value computed by call STMT is known
7170 to have an integer value. We also allow +Inf, -Inf and NaN to be
7171 considered integer values. Return false for signaling NaN.
7173 DEPTH is the current nesting depth of the query. */
7175 static bool
7176 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7178 tree arg0 = (gimple_call_num_args (stmt) > 0
7179 ? gimple_call_arg (stmt, 0)
7180 : NULL_TREE);
7181 tree arg1 = (gimple_call_num_args (stmt) > 1
7182 ? gimple_call_arg (stmt, 1)
7183 : NULL_TREE);
7184 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7185 arg0, arg1, depth);
7188 /* Return true if the floating-point result of phi STMT is known to have
7189 an integer value. We also allow +Inf, -Inf and NaN to be considered
7190 integer values. Return false for signaling NaN.
7192 DEPTH is the current nesting depth of the query. */
7194 static bool
7195 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7197 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7199 tree arg = gimple_phi_arg_def (stmt, i);
7200 if (!integer_valued_real_single_p (arg, depth + 1))
7201 return false;
7203 return true;
7206 /* Return true if the floating-point value computed by STMT is known
7207 to have an integer value. We also allow +Inf, -Inf and NaN to be
7208 considered integer values. Return false for signaling NaN.
7210 DEPTH is the current nesting depth of the query. */
7212 bool
7213 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7215 switch (gimple_code (stmt))
7217 case GIMPLE_ASSIGN:
7218 return gimple_assign_integer_valued_real_p (stmt, depth);
7219 case GIMPLE_CALL:
7220 return gimple_call_integer_valued_real_p (stmt, depth);
7221 case GIMPLE_PHI:
7222 return gimple_phi_integer_valued_real_p (stmt, depth);
7223 default:
7224 return false;