[10/77] Make assemble_real take a scalar_float_mode
[official-gcc.git] / gcc / gimple-fold.c
blobbf39f283ad93c2cd836a1fc4b82c5c171d2f0979
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2017 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stmt.h"
35 #include "expr.h"
36 #include "stor-layout.h"
37 #include "dumpfile.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "dbgcnt.h"
50 #include "builtins.h"
51 #include "tree-eh.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
55 #include "omp-general.h"
56 #include "ipa-chkp.h"
57 #include "tree-cfg.h"
58 #include "fold-const-call.h"
59 #include "stringpool.h"
60 #include "attribs.h"
61 #include "asan.h"
63 /* Return true when DECL can be referenced from current unit.
64 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
65 We can get declarations that are not possible to reference for various
66 reasons:
68 1) When analyzing C++ virtual tables.
69 C++ virtual tables do have known constructors even
70 when they are keyed to other compilation unit.
71 Those tables can contain pointers to methods and vars
72 in other units. Those methods have both STATIC and EXTERNAL
73 set.
74 2) In WHOPR mode devirtualization might lead to reference
75 to method that was partitioned elsehwere.
76 In this case we have static VAR_DECL or FUNCTION_DECL
77 that has no corresponding callgraph/varpool node
78 declaring the body.
79 3) COMDAT functions referred by external vtables that
80 we devirtualize only during final compilation stage.
81 At this time we already decided that we will not output
82 the function body and thus we can't reference the symbol
83 directly. */
85 static bool
86 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
88 varpool_node *vnode;
89 struct cgraph_node *node;
90 symtab_node *snode;
92 if (DECL_ABSTRACT_P (decl))
93 return false;
95 /* We are concerned only about static/external vars and functions. */
96 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
97 || !VAR_OR_FUNCTION_DECL_P (decl))
98 return true;
100 /* Static objects can be referred only if they was not optimized out yet. */
101 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
103 /* Before we start optimizing unreachable code we can be sure all
104 static objects are defined. */
105 if (symtab->function_flags_ready)
106 return true;
107 snode = symtab_node::get (decl);
108 if (!snode || !snode->definition)
109 return false;
110 node = dyn_cast <cgraph_node *> (snode);
111 return !node || !node->global.inlined_to;
114 /* We will later output the initializer, so we can refer to it.
115 So we are concerned only when DECL comes from initializer of
116 external var or var that has been optimized out. */
117 if (!from_decl
118 || !VAR_P (from_decl)
119 || (!DECL_EXTERNAL (from_decl)
120 && (vnode = varpool_node::get (from_decl)) != NULL
121 && vnode->definition)
122 || (flag_ltrans
123 && (vnode = varpool_node::get (from_decl)) != NULL
124 && vnode->in_other_partition))
125 return true;
126 /* We are folding reference from external vtable. The vtable may reffer
127 to a symbol keyed to other compilation unit. The other compilation
128 unit may be in separate DSO and the symbol may be hidden. */
129 if (DECL_VISIBILITY_SPECIFIED (decl)
130 && DECL_EXTERNAL (decl)
131 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
132 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
133 return false;
134 /* When function is public, we always can introduce new reference.
135 Exception are the COMDAT functions where introducing a direct
136 reference imply need to include function body in the curren tunit. */
137 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
138 return true;
139 /* We have COMDAT. We are going to check if we still have definition
140 or if the definition is going to be output in other partition.
141 Bypass this when gimplifying; all needed functions will be produced.
143 As observed in PR20991 for already optimized out comdat virtual functions
144 it may be tempting to not necessarily give up because the copy will be
145 output elsewhere when corresponding vtable is output.
146 This is however not possible - ABI specify that COMDATs are output in
147 units where they are used and when the other unit was compiled with LTO
148 it is possible that vtable was kept public while the function itself
149 was privatized. */
150 if (!symtab->function_flags_ready)
151 return true;
153 snode = symtab_node::get (decl);
154 if (!snode
155 || ((!snode->definition || DECL_EXTERNAL (decl))
156 && (!snode->in_other_partition
157 || (!snode->forced_by_abi && !snode->force_output))))
158 return false;
159 node = dyn_cast <cgraph_node *> (snode);
160 return !node || !node->global.inlined_to;
163 /* Create a temporary for TYPE for a statement STMT. If the current function
164 is in SSA form, a SSA name is created. Otherwise a temporary register
165 is made. */
167 tree
168 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
170 if (gimple_in_ssa_p (cfun))
171 return make_ssa_name (type, stmt);
172 else
173 return create_tmp_reg (type);
176 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
177 acceptable form for is_gimple_min_invariant.
178 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
180 tree
181 canonicalize_constructor_val (tree cval, tree from_decl)
183 tree orig_cval = cval;
184 STRIP_NOPS (cval);
185 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
186 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
188 tree ptr = TREE_OPERAND (cval, 0);
189 if (is_gimple_min_invariant (ptr))
190 cval = build1_loc (EXPR_LOCATION (cval),
191 ADDR_EXPR, TREE_TYPE (ptr),
192 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
193 ptr,
194 fold_convert (ptr_type_node,
195 TREE_OPERAND (cval, 1))));
197 if (TREE_CODE (cval) == ADDR_EXPR)
199 tree base = NULL_TREE;
200 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
202 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
203 if (base)
204 TREE_OPERAND (cval, 0) = base;
206 else
207 base = get_base_address (TREE_OPERAND (cval, 0));
208 if (!base)
209 return NULL_TREE;
211 if (VAR_OR_FUNCTION_DECL_P (base)
212 && !can_refer_decl_in_current_unit_p (base, from_decl))
213 return NULL_TREE;
214 if (TREE_TYPE (base) == error_mark_node)
215 return NULL_TREE;
216 if (VAR_P (base))
217 TREE_ADDRESSABLE (base) = 1;
218 else if (TREE_CODE (base) == FUNCTION_DECL)
220 /* Make sure we create a cgraph node for functions we'll reference.
221 They can be non-existent if the reference comes from an entry
222 of an external vtable for example. */
223 cgraph_node::get_create (base);
225 /* Fixup types in global initializers. */
226 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
227 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
229 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
230 cval = fold_convert (TREE_TYPE (orig_cval), cval);
231 return cval;
233 if (TREE_OVERFLOW_P (cval))
234 return drop_tree_overflow (cval);
235 return orig_cval;
238 /* If SYM is a constant variable with known value, return the value.
239 NULL_TREE is returned otherwise. */
241 tree
242 get_symbol_constant_value (tree sym)
244 tree val = ctor_for_folding (sym);
245 if (val != error_mark_node)
247 if (val)
249 val = canonicalize_constructor_val (unshare_expr (val), sym);
250 if (val && is_gimple_min_invariant (val))
251 return val;
252 else
253 return NULL_TREE;
255 /* Variables declared 'const' without an initializer
256 have zero as the initializer if they may not be
257 overridden at link or run time. */
258 if (!val
259 && is_gimple_reg_type (TREE_TYPE (sym)))
260 return build_zero_cst (TREE_TYPE (sym));
263 return NULL_TREE;
268 /* Subroutine of fold_stmt. We perform several simplifications of the
269 memory reference tree EXPR and make sure to re-gimplify them properly
270 after propagation of constant addresses. IS_LHS is true if the
271 reference is supposed to be an lvalue. */
273 static tree
274 maybe_fold_reference (tree expr, bool is_lhs)
276 tree result;
278 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
279 || TREE_CODE (expr) == REALPART_EXPR
280 || TREE_CODE (expr) == IMAGPART_EXPR)
281 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
282 return fold_unary_loc (EXPR_LOCATION (expr),
283 TREE_CODE (expr),
284 TREE_TYPE (expr),
285 TREE_OPERAND (expr, 0));
286 else if (TREE_CODE (expr) == BIT_FIELD_REF
287 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
288 return fold_ternary_loc (EXPR_LOCATION (expr),
289 TREE_CODE (expr),
290 TREE_TYPE (expr),
291 TREE_OPERAND (expr, 0),
292 TREE_OPERAND (expr, 1),
293 TREE_OPERAND (expr, 2));
295 if (!is_lhs
296 && (result = fold_const_aggregate_ref (expr))
297 && is_gimple_min_invariant (result))
298 return result;
300 return NULL_TREE;
304 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
305 replacement rhs for the statement or NULL_TREE if no simplification
306 could be made. It is assumed that the operands have been previously
307 folded. */
309 static tree
310 fold_gimple_assign (gimple_stmt_iterator *si)
312 gimple *stmt = gsi_stmt (*si);
313 enum tree_code subcode = gimple_assign_rhs_code (stmt);
314 location_t loc = gimple_location (stmt);
316 tree result = NULL_TREE;
318 switch (get_gimple_rhs_class (subcode))
320 case GIMPLE_SINGLE_RHS:
322 tree rhs = gimple_assign_rhs1 (stmt);
324 if (TREE_CLOBBER_P (rhs))
325 return NULL_TREE;
327 if (REFERENCE_CLASS_P (rhs))
328 return maybe_fold_reference (rhs, false);
330 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
332 tree val = OBJ_TYPE_REF_EXPR (rhs);
333 if (is_gimple_min_invariant (val))
334 return val;
335 else if (flag_devirtualize && virtual_method_call_p (rhs))
337 bool final;
338 vec <cgraph_node *>targets
339 = possible_polymorphic_call_targets (rhs, stmt, &final);
340 if (final && targets.length () <= 1 && dbg_cnt (devirt))
342 if (dump_enabled_p ())
344 location_t loc = gimple_location_safe (stmt);
345 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
346 "resolving virtual function address "
347 "reference to function %s\n",
348 targets.length () == 1
349 ? targets[0]->name ()
350 : "NULL");
352 if (targets.length () == 1)
354 val = fold_convert (TREE_TYPE (val),
355 build_fold_addr_expr_loc
356 (loc, targets[0]->decl));
357 STRIP_USELESS_TYPE_CONVERSION (val);
359 else
360 /* We can not use __builtin_unreachable here because it
361 can not have address taken. */
362 val = build_int_cst (TREE_TYPE (val), 0);
363 return val;
368 else if (TREE_CODE (rhs) == ADDR_EXPR)
370 tree ref = TREE_OPERAND (rhs, 0);
371 tree tem = maybe_fold_reference (ref, true);
372 if (tem
373 && TREE_CODE (tem) == MEM_REF
374 && integer_zerop (TREE_OPERAND (tem, 1)))
375 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
376 else if (tem)
377 result = fold_convert (TREE_TYPE (rhs),
378 build_fold_addr_expr_loc (loc, tem));
379 else if (TREE_CODE (ref) == MEM_REF
380 && integer_zerop (TREE_OPERAND (ref, 1)))
381 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
383 if (result)
385 /* Strip away useless type conversions. Both the
386 NON_LVALUE_EXPR that may have been added by fold, and
387 "useless" type conversions that might now be apparent
388 due to propagation. */
389 STRIP_USELESS_TYPE_CONVERSION (result);
391 if (result != rhs && valid_gimple_rhs_p (result))
392 return result;
396 else if (TREE_CODE (rhs) == CONSTRUCTOR
397 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
399 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
400 unsigned i;
401 tree val;
403 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
404 if (! CONSTANT_CLASS_P (val))
405 return NULL_TREE;
407 return build_vector_from_ctor (TREE_TYPE (rhs),
408 CONSTRUCTOR_ELTS (rhs));
411 else if (DECL_P (rhs))
412 return get_symbol_constant_value (rhs);
414 break;
416 case GIMPLE_UNARY_RHS:
417 break;
419 case GIMPLE_BINARY_RHS:
420 break;
422 case GIMPLE_TERNARY_RHS:
423 result = fold_ternary_loc (loc, subcode,
424 TREE_TYPE (gimple_assign_lhs (stmt)),
425 gimple_assign_rhs1 (stmt),
426 gimple_assign_rhs2 (stmt),
427 gimple_assign_rhs3 (stmt));
429 if (result)
431 STRIP_USELESS_TYPE_CONVERSION (result);
432 if (valid_gimple_rhs_p (result))
433 return result;
435 break;
437 case GIMPLE_INVALID_RHS:
438 gcc_unreachable ();
441 return NULL_TREE;
445 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
446 adjusting the replacement stmts location and virtual operands.
447 If the statement has a lhs the last stmt in the sequence is expected
448 to assign to that lhs. */
450 static void
451 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
453 gimple *stmt = gsi_stmt (*si_p);
455 if (gimple_has_location (stmt))
456 annotate_all_with_location (stmts, gimple_location (stmt));
458 /* First iterate over the replacement statements backward, assigning
459 virtual operands to their defining statements. */
460 gimple *laststore = NULL;
461 for (gimple_stmt_iterator i = gsi_last (stmts);
462 !gsi_end_p (i); gsi_prev (&i))
464 gimple *new_stmt = gsi_stmt (i);
465 if ((gimple_assign_single_p (new_stmt)
466 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
467 || (is_gimple_call (new_stmt)
468 && (gimple_call_flags (new_stmt)
469 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
471 tree vdef;
472 if (!laststore)
473 vdef = gimple_vdef (stmt);
474 else
475 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
476 gimple_set_vdef (new_stmt, vdef);
477 if (vdef && TREE_CODE (vdef) == SSA_NAME)
478 SSA_NAME_DEF_STMT (vdef) = new_stmt;
479 laststore = new_stmt;
483 /* Second iterate over the statements forward, assigning virtual
484 operands to their uses. */
485 tree reaching_vuse = gimple_vuse (stmt);
486 for (gimple_stmt_iterator i = gsi_start (stmts);
487 !gsi_end_p (i); gsi_next (&i))
489 gimple *new_stmt = gsi_stmt (i);
490 /* If the new statement possibly has a VUSE, update it with exact SSA
491 name we know will reach this one. */
492 if (gimple_has_mem_ops (new_stmt))
493 gimple_set_vuse (new_stmt, reaching_vuse);
494 gimple_set_modified (new_stmt, true);
495 if (gimple_vdef (new_stmt))
496 reaching_vuse = gimple_vdef (new_stmt);
499 /* If the new sequence does not do a store release the virtual
500 definition of the original statement. */
501 if (reaching_vuse
502 && reaching_vuse == gimple_vuse (stmt))
504 tree vdef = gimple_vdef (stmt);
505 if (vdef
506 && TREE_CODE (vdef) == SSA_NAME)
508 unlink_stmt_vdef (stmt);
509 release_ssa_name (vdef);
513 /* Finally replace the original statement with the sequence. */
514 gsi_replace_with_seq (si_p, stmts, false);
517 /* Convert EXPR into a GIMPLE value suitable for substitution on the
518 RHS of an assignment. Insert the necessary statements before
519 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
520 is replaced. If the call is expected to produces a result, then it
521 is replaced by an assignment of the new RHS to the result variable.
522 If the result is to be ignored, then the call is replaced by a
523 GIMPLE_NOP. A proper VDEF chain is retained by making the first
524 VUSE and the last VDEF of the whole sequence be the same as the replaced
525 statement and using new SSA names for stores in between. */
527 void
528 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
530 tree lhs;
531 gimple *stmt, *new_stmt;
532 gimple_stmt_iterator i;
533 gimple_seq stmts = NULL;
535 stmt = gsi_stmt (*si_p);
537 gcc_assert (is_gimple_call (stmt));
539 push_gimplify_context (gimple_in_ssa_p (cfun));
541 lhs = gimple_call_lhs (stmt);
542 if (lhs == NULL_TREE)
544 gimplify_and_add (expr, &stmts);
545 /* We can end up with folding a memcpy of an empty class assignment
546 which gets optimized away by C++ gimplification. */
547 if (gimple_seq_empty_p (stmts))
549 pop_gimplify_context (NULL);
550 if (gimple_in_ssa_p (cfun))
552 unlink_stmt_vdef (stmt);
553 release_defs (stmt);
555 gsi_replace (si_p, gimple_build_nop (), false);
556 return;
559 else
561 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
562 new_stmt = gimple_build_assign (lhs, tmp);
563 i = gsi_last (stmts);
564 gsi_insert_after_without_update (&i, new_stmt,
565 GSI_CONTINUE_LINKING);
568 pop_gimplify_context (NULL);
570 gsi_replace_with_seq_vops (si_p, stmts);
574 /* Replace the call at *GSI with the gimple value VAL. */
576 void
577 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
579 gimple *stmt = gsi_stmt (*gsi);
580 tree lhs = gimple_call_lhs (stmt);
581 gimple *repl;
582 if (lhs)
584 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
585 val = fold_convert (TREE_TYPE (lhs), val);
586 repl = gimple_build_assign (lhs, val);
588 else
589 repl = gimple_build_nop ();
590 tree vdef = gimple_vdef (stmt);
591 if (vdef && TREE_CODE (vdef) == SSA_NAME)
593 unlink_stmt_vdef (stmt);
594 release_ssa_name (vdef);
596 gsi_replace (gsi, repl, false);
599 /* Replace the call at *GSI with the new call REPL and fold that
600 again. */
602 static void
603 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
605 gimple *stmt = gsi_stmt (*gsi);
606 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
607 gimple_set_location (repl, gimple_location (stmt));
608 if (gimple_vdef (stmt)
609 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
611 gimple_set_vdef (repl, gimple_vdef (stmt));
612 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
614 if (gimple_vuse (stmt))
615 gimple_set_vuse (repl, gimple_vuse (stmt));
616 gsi_replace (gsi, repl, false);
617 fold_stmt (gsi);
620 /* Return true if VAR is a VAR_DECL or a component thereof. */
622 static bool
623 var_decl_component_p (tree var)
625 tree inner = var;
626 while (handled_component_p (inner))
627 inner = TREE_OPERAND (inner, 0);
628 return SSA_VAR_P (inner);
631 /* If the SIZE argument representing the size of an object is in a range
632 of values of which exactly one is valid (and that is zero), return
633 true, otherwise false. */
635 static bool
636 size_must_be_zero_p (tree size)
638 if (integer_zerop (size))
639 return true;
641 if (TREE_CODE (size) != SSA_NAME)
642 return false;
644 wide_int min, max;
645 enum value_range_type rtype = get_range_info (size, &min, &max);
646 if (rtype != VR_ANTI_RANGE)
647 return false;
649 tree type = TREE_TYPE (size);
650 int prec = TYPE_PRECISION (type);
652 wide_int wone = wi::one (prec);
654 /* Compute the value of SSIZE_MAX, the largest positive value that
655 can be stored in ssize_t, the signed counterpart of size_t. */
656 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
658 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
661 /* Fold function call to builtin mem{{,p}cpy,move}. Return
662 false if no simplification can be made.
663 If ENDP is 0, return DEST (like memcpy).
664 If ENDP is 1, return DEST+LEN (like mempcpy).
665 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
666 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
667 (memmove). */
669 static bool
670 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
671 tree dest, tree src, int endp)
673 gimple *stmt = gsi_stmt (*gsi);
674 tree lhs = gimple_call_lhs (stmt);
675 tree len = gimple_call_arg (stmt, 2);
676 tree destvar, srcvar;
677 location_t loc = gimple_location (stmt);
679 /* If the LEN parameter is a constant zero or in range where
680 the only valid value is zero, return DEST. */
681 if (size_must_be_zero_p (len))
683 gimple *repl;
684 if (gimple_call_lhs (stmt))
685 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
686 else
687 repl = gimple_build_nop ();
688 tree vdef = gimple_vdef (stmt);
689 if (vdef && TREE_CODE (vdef) == SSA_NAME)
691 unlink_stmt_vdef (stmt);
692 release_ssa_name (vdef);
694 gsi_replace (gsi, repl, false);
695 return true;
698 /* If SRC and DEST are the same (and not volatile), return
699 DEST{,+LEN,+LEN-1}. */
700 if (operand_equal_p (src, dest, 0))
702 unlink_stmt_vdef (stmt);
703 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
704 release_ssa_name (gimple_vdef (stmt));
705 if (!lhs)
707 gsi_replace (gsi, gimple_build_nop (), false);
708 return true;
710 goto done;
712 else
714 tree srctype, desttype;
715 unsigned int src_align, dest_align;
716 tree off0;
718 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
719 pointers as wide integer) and also may result in huge function
720 size because of inlined bounds copy. Thus don't inline for
721 functions we want to instrument. */
722 if (flag_check_pointer_bounds
723 && chkp_instrumentable_p (cfun->decl)
724 /* Even if data may contain pointers we can inline if copy
725 less than a pointer size. */
726 && (!tree_fits_uhwi_p (len)
727 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
728 return false;
730 /* Build accesses at offset zero with a ref-all character type. */
731 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
732 ptr_mode, true), 0);
734 /* If we can perform the copy efficiently with first doing all loads
735 and then all stores inline it that way. Currently efficiently
736 means that we can load all the memory into a single integer
737 register which is what MOVE_MAX gives us. */
738 src_align = get_pointer_alignment (src);
739 dest_align = get_pointer_alignment (dest);
740 if (tree_fits_uhwi_p (len)
741 && compare_tree_int (len, MOVE_MAX) <= 0
742 /* ??? Don't transform copies from strings with known length this
743 confuses the tree-ssa-strlen.c. This doesn't handle
744 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
745 reason. */
746 && !c_strlen (src, 2))
748 unsigned ilen = tree_to_uhwi (len);
749 if (pow2p_hwi (ilen))
751 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
752 if (type
753 && TYPE_MODE (type) != BLKmode
754 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
755 == ilen * 8)
756 /* If the destination pointer is not aligned we must be able
757 to emit an unaligned store. */
758 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
759 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
760 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
761 != CODE_FOR_nothing)))
763 tree srctype = type;
764 tree desttype = type;
765 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
766 srctype = build_aligned_type (type, src_align);
767 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
768 tree tem = fold_const_aggregate_ref (srcmem);
769 if (tem)
770 srcmem = tem;
771 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
772 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
773 src_align)
774 && (optab_handler (movmisalign_optab,
775 TYPE_MODE (type))
776 == CODE_FOR_nothing))
777 srcmem = NULL_TREE;
778 if (srcmem)
780 gimple *new_stmt;
781 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
783 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
784 srcmem
785 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
786 new_stmt);
787 gimple_assign_set_lhs (new_stmt, srcmem);
788 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
789 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
791 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
792 desttype = build_aligned_type (type, dest_align);
793 new_stmt
794 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
795 dest, off0),
796 srcmem);
797 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
798 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
799 if (gimple_vdef (new_stmt)
800 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
801 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
802 if (!lhs)
804 gsi_replace (gsi, new_stmt, false);
805 return true;
807 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
808 goto done;
814 if (endp == 3)
816 /* Both DEST and SRC must be pointer types.
817 ??? This is what old code did. Is the testing for pointer types
818 really mandatory?
820 If either SRC is readonly or length is 1, we can use memcpy. */
821 if (!dest_align || !src_align)
822 return false;
823 if (readonly_data_expr (src)
824 || (tree_fits_uhwi_p (len)
825 && (MIN (src_align, dest_align) / BITS_PER_UNIT
826 >= tree_to_uhwi (len))))
828 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
829 if (!fn)
830 return false;
831 gimple_call_set_fndecl (stmt, fn);
832 gimple_call_set_arg (stmt, 0, dest);
833 gimple_call_set_arg (stmt, 1, src);
834 fold_stmt (gsi);
835 return true;
838 /* If *src and *dest can't overlap, optimize into memcpy as well. */
839 if (TREE_CODE (src) == ADDR_EXPR
840 && TREE_CODE (dest) == ADDR_EXPR)
842 tree src_base, dest_base, fn;
843 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
844 HOST_WIDE_INT maxsize;
846 srcvar = TREE_OPERAND (src, 0);
847 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
848 if (src_base == NULL)
849 src_base = srcvar;
850 destvar = TREE_OPERAND (dest, 0);
851 dest_base = get_addr_base_and_unit_offset (destvar,
852 &dest_offset);
853 if (dest_base == NULL)
854 dest_base = destvar;
855 if (tree_fits_uhwi_p (len))
856 maxsize = tree_to_uhwi (len);
857 else
858 maxsize = -1;
859 if (SSA_VAR_P (src_base)
860 && SSA_VAR_P (dest_base))
862 if (operand_equal_p (src_base, dest_base, 0)
863 && ranges_overlap_p (src_offset, maxsize,
864 dest_offset, maxsize))
865 return false;
867 else if (TREE_CODE (src_base) == MEM_REF
868 && TREE_CODE (dest_base) == MEM_REF)
870 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
871 TREE_OPERAND (dest_base, 0), 0))
872 return false;
873 offset_int off = mem_ref_offset (src_base) + src_offset;
874 if (!wi::fits_shwi_p (off))
875 return false;
876 src_offset = off.to_shwi ();
878 off = mem_ref_offset (dest_base) + dest_offset;
879 if (!wi::fits_shwi_p (off))
880 return false;
881 dest_offset = off.to_shwi ();
882 if (ranges_overlap_p (src_offset, maxsize,
883 dest_offset, maxsize))
884 return false;
886 else
887 return false;
889 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
890 if (!fn)
891 return false;
892 gimple_call_set_fndecl (stmt, fn);
893 gimple_call_set_arg (stmt, 0, dest);
894 gimple_call_set_arg (stmt, 1, src);
895 fold_stmt (gsi);
896 return true;
899 /* If the destination and source do not alias optimize into
900 memcpy as well. */
901 if ((is_gimple_min_invariant (dest)
902 || TREE_CODE (dest) == SSA_NAME)
903 && (is_gimple_min_invariant (src)
904 || TREE_CODE (src) == SSA_NAME))
906 ao_ref destr, srcr;
907 ao_ref_init_from_ptr_and_size (&destr, dest, len);
908 ao_ref_init_from_ptr_and_size (&srcr, src, len);
909 if (!refs_may_alias_p_1 (&destr, &srcr, false))
911 tree fn;
912 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
913 if (!fn)
914 return false;
915 gimple_call_set_fndecl (stmt, fn);
916 gimple_call_set_arg (stmt, 0, dest);
917 gimple_call_set_arg (stmt, 1, src);
918 fold_stmt (gsi);
919 return true;
923 return false;
926 if (!tree_fits_shwi_p (len))
927 return false;
928 /* FIXME:
929 This logic lose for arguments like (type *)malloc (sizeof (type)),
930 since we strip the casts of up to VOID return value from malloc.
931 Perhaps we ought to inherit type from non-VOID argument here? */
932 STRIP_NOPS (src);
933 STRIP_NOPS (dest);
934 if (!POINTER_TYPE_P (TREE_TYPE (src))
935 || !POINTER_TYPE_P (TREE_TYPE (dest)))
936 return false;
937 /* In the following try to find a type that is most natural to be
938 used for the memcpy source and destination and that allows
939 the most optimization when memcpy is turned into a plain assignment
940 using that type. In theory we could always use a char[len] type
941 but that only gains us that the destination and source possibly
942 no longer will have their address taken. */
943 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
944 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
946 tree tem = TREE_OPERAND (src, 0);
947 STRIP_NOPS (tem);
948 if (tem != TREE_OPERAND (src, 0))
949 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
951 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
953 tree tem = TREE_OPERAND (dest, 0);
954 STRIP_NOPS (tem);
955 if (tem != TREE_OPERAND (dest, 0))
956 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
958 srctype = TREE_TYPE (TREE_TYPE (src));
959 if (TREE_CODE (srctype) == ARRAY_TYPE
960 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
962 srctype = TREE_TYPE (srctype);
963 STRIP_NOPS (src);
964 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
966 desttype = TREE_TYPE (TREE_TYPE (dest));
967 if (TREE_CODE (desttype) == ARRAY_TYPE
968 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
970 desttype = TREE_TYPE (desttype);
971 STRIP_NOPS (dest);
972 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
974 if (TREE_ADDRESSABLE (srctype)
975 || TREE_ADDRESSABLE (desttype))
976 return false;
978 /* Make sure we are not copying using a floating-point mode or
979 a type whose size possibly does not match its precision. */
980 if (FLOAT_MODE_P (TYPE_MODE (desttype))
981 || TREE_CODE (desttype) == BOOLEAN_TYPE
982 || TREE_CODE (desttype) == ENUMERAL_TYPE)
983 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
984 if (FLOAT_MODE_P (TYPE_MODE (srctype))
985 || TREE_CODE (srctype) == BOOLEAN_TYPE
986 || TREE_CODE (srctype) == ENUMERAL_TYPE)
987 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
988 if (!srctype)
989 srctype = desttype;
990 if (!desttype)
991 desttype = srctype;
992 if (!srctype)
993 return false;
995 src_align = get_pointer_alignment (src);
996 dest_align = get_pointer_alignment (dest);
997 if (dest_align < TYPE_ALIGN (desttype)
998 || src_align < TYPE_ALIGN (srctype))
999 return false;
1001 destvar = dest;
1002 STRIP_NOPS (destvar);
1003 if (TREE_CODE (destvar) == ADDR_EXPR
1004 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1005 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1006 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1007 else
1008 destvar = NULL_TREE;
1010 srcvar = src;
1011 STRIP_NOPS (srcvar);
1012 if (TREE_CODE (srcvar) == ADDR_EXPR
1013 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1014 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1016 if (!destvar
1017 || src_align >= TYPE_ALIGN (desttype))
1018 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1019 srcvar, off0);
1020 else if (!STRICT_ALIGNMENT)
1022 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1023 src_align);
1024 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1026 else
1027 srcvar = NULL_TREE;
1029 else
1030 srcvar = NULL_TREE;
1032 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1033 return false;
1035 if (srcvar == NULL_TREE)
1037 STRIP_NOPS (src);
1038 if (src_align >= TYPE_ALIGN (desttype))
1039 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1040 else
1042 if (STRICT_ALIGNMENT)
1043 return false;
1044 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1045 src_align);
1046 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1049 else if (destvar == NULL_TREE)
1051 STRIP_NOPS (dest);
1052 if (dest_align >= TYPE_ALIGN (srctype))
1053 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1054 else
1056 if (STRICT_ALIGNMENT)
1057 return false;
1058 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1059 dest_align);
1060 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1064 gimple *new_stmt;
1065 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1067 tree tem = fold_const_aggregate_ref (srcvar);
1068 if (tem)
1069 srcvar = tem;
1070 if (! is_gimple_min_invariant (srcvar))
1072 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1073 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1074 new_stmt);
1075 gimple_assign_set_lhs (new_stmt, srcvar);
1076 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1077 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1080 new_stmt = gimple_build_assign (destvar, srcvar);
1081 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1082 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1083 if (gimple_vdef (new_stmt)
1084 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1085 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1086 if (!lhs)
1088 gsi_replace (gsi, new_stmt, false);
1089 return true;
1091 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1094 done:
1095 gimple_seq stmts = NULL;
1096 if (endp == 0 || endp == 3)
1097 len = NULL_TREE;
1098 else if (endp == 2)
1099 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1100 ssize_int (1));
1101 if (endp == 2 || endp == 1)
1103 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1104 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1105 TREE_TYPE (dest), dest, len);
1108 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1109 gimple *repl = gimple_build_assign (lhs, dest);
1110 gsi_replace (gsi, repl, false);
1111 return true;
1114 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1115 to built-in memcmp (a, b, len). */
1117 static bool
1118 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1120 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1122 if (!fn)
1123 return false;
1125 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1127 gimple *stmt = gsi_stmt (*gsi);
1128 tree a = gimple_call_arg (stmt, 0);
1129 tree b = gimple_call_arg (stmt, 1);
1130 tree len = gimple_call_arg (stmt, 2);
1132 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1133 replace_call_with_call_and_fold (gsi, repl);
1135 return true;
1138 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1139 to built-in memmove (dest, src, len). */
1141 static bool
1142 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1144 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1146 if (!fn)
1147 return false;
1149 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1150 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1151 len) into memmove (dest, src, len). */
1153 gimple *stmt = gsi_stmt (*gsi);
1154 tree src = gimple_call_arg (stmt, 0);
1155 tree dest = gimple_call_arg (stmt, 1);
1156 tree len = gimple_call_arg (stmt, 2);
1158 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1159 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1160 replace_call_with_call_and_fold (gsi, repl);
1162 return true;
1165 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1166 to built-in memset (dest, 0, len). */
1168 static bool
1169 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1171 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1173 if (!fn)
1174 return false;
1176 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1178 gimple *stmt = gsi_stmt (*gsi);
1179 tree dest = gimple_call_arg (stmt, 0);
1180 tree len = gimple_call_arg (stmt, 1);
1182 gimple_seq seq = NULL;
1183 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1184 gimple_seq_add_stmt_without_update (&seq, repl);
1185 gsi_replace_with_seq_vops (gsi, seq);
1186 fold_stmt (gsi);
1188 return true;
1191 /* Fold function call to builtin memset or bzero at *GSI setting the
1192 memory of size LEN to VAL. Return whether a simplification was made. */
1194 static bool
1195 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1197 gimple *stmt = gsi_stmt (*gsi);
1198 tree etype;
1199 unsigned HOST_WIDE_INT length, cval;
1201 /* If the LEN parameter is zero, return DEST. */
1202 if (integer_zerop (len))
1204 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1205 return true;
1208 if (! tree_fits_uhwi_p (len))
1209 return false;
1211 if (TREE_CODE (c) != INTEGER_CST)
1212 return false;
1214 tree dest = gimple_call_arg (stmt, 0);
1215 tree var = dest;
1216 if (TREE_CODE (var) != ADDR_EXPR)
1217 return false;
1219 var = TREE_OPERAND (var, 0);
1220 if (TREE_THIS_VOLATILE (var))
1221 return false;
1223 etype = TREE_TYPE (var);
1224 if (TREE_CODE (etype) == ARRAY_TYPE)
1225 etype = TREE_TYPE (etype);
1227 if (!INTEGRAL_TYPE_P (etype)
1228 && !POINTER_TYPE_P (etype))
1229 return NULL_TREE;
1231 if (! var_decl_component_p (var))
1232 return NULL_TREE;
1234 length = tree_to_uhwi (len);
1235 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1236 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1237 return NULL_TREE;
1239 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1240 return NULL_TREE;
1242 if (integer_zerop (c))
1243 cval = 0;
1244 else
1246 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1247 return NULL_TREE;
1249 cval = TREE_INT_CST_LOW (c);
1250 cval &= 0xff;
1251 cval |= cval << 8;
1252 cval |= cval << 16;
1253 cval |= (cval << 31) << 1;
1256 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1257 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1258 gimple_set_vuse (store, gimple_vuse (stmt));
1259 tree vdef = gimple_vdef (stmt);
1260 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1262 gimple_set_vdef (store, gimple_vdef (stmt));
1263 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1265 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1266 if (gimple_call_lhs (stmt))
1268 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1269 gsi_replace (gsi, asgn, false);
1271 else
1273 gimple_stmt_iterator gsi2 = *gsi;
1274 gsi_prev (gsi);
1275 gsi_remove (&gsi2, true);
1278 return true;
1282 /* Obtain the minimum and maximum string length or minimum and maximum
1283 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1284 If ARG is an SSA name variable, follow its use-def chains. When
1285 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1286 if we are unable to determine the length or value, return False.
1287 VISITED is a bitmap of visited variables.
1288 TYPE is 0 if string length should be obtained, 1 for maximum string
1289 length and 2 for maximum value ARG can have.
1290 When FUZZY is set and the length of a string cannot be determined,
1291 the function instead considers as the maximum possible length the
1292 size of a character array it may refer to.
1293 Set *FLEXP to true if the range of the string lengths has been
1294 obtained from the upper bound of an array at the end of a struct.
1295 Such an array may hold a string that's longer than its upper bound
1296 due to it being used as a poor-man's flexible array member. */
1298 static bool
1299 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1300 bool fuzzy, bool *flexp)
1302 tree var, val;
1303 gimple *def_stmt;
1305 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1306 but MINLEN may be cleared during the execution of the function. */
1307 tree *minlen = length;
1308 tree *const maxlen = length + 1;
1310 if (TREE_CODE (arg) != SSA_NAME)
1312 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1313 if (TREE_CODE (arg) == ADDR_EXPR
1314 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1315 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1317 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1318 if (TREE_CODE (aop0) == INDIRECT_REF
1319 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1320 return get_range_strlen (TREE_OPERAND (aop0, 0),
1321 length, visited, type, fuzzy, flexp);
1324 if (type == 2)
1326 val = arg;
1327 if (TREE_CODE (val) != INTEGER_CST
1328 || tree_int_cst_sgn (val) < 0)
1329 return false;
1331 else
1332 val = c_strlen (arg, 1);
1334 if (!val && fuzzy)
1336 if (TREE_CODE (arg) == ADDR_EXPR)
1337 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1338 visited, type, fuzzy, flexp);
1340 if (TREE_CODE (arg) == COMPONENT_REF
1341 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE)
1343 /* Use the type of the member array to determine the upper
1344 bound on the length of the array. This may be overly
1345 optimistic if the array itself isn't NUL-terminated and
1346 the caller relies on the subsequent member to contain
1347 the NUL.
1348 Set *FLEXP to true if the array whose bound is being
1349 used is at the end of a struct. */
1350 if (array_at_struct_end_p (arg))
1351 *flexp = true;
1353 arg = TREE_OPERAND (arg, 1);
1354 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1355 if (!val || integer_zerop (val))
1356 return false;
1357 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1358 integer_one_node);
1359 /* Set the minimum size to zero since the string in
1360 the array could have zero length. */
1361 *minlen = ssize_int (0);
1365 if (!val)
1366 return false;
1368 if (minlen
1369 && (!*minlen
1370 || (type > 0
1371 && TREE_CODE (*minlen) == INTEGER_CST
1372 && TREE_CODE (val) == INTEGER_CST
1373 && tree_int_cst_lt (val, *minlen))))
1374 *minlen = val;
1376 if (*maxlen)
1378 if (type > 0)
1380 if (TREE_CODE (*maxlen) != INTEGER_CST
1381 || TREE_CODE (val) != INTEGER_CST)
1382 return false;
1384 if (tree_int_cst_lt (*maxlen, val))
1385 *maxlen = val;
1386 return true;
1388 else if (simple_cst_equal (val, *maxlen) != 1)
1389 return false;
1392 *maxlen = val;
1393 return true;
1396 /* If ARG is registered for SSA update we cannot look at its defining
1397 statement. */
1398 if (name_registered_for_update_p (arg))
1399 return false;
1401 /* If we were already here, break the infinite cycle. */
1402 if (!*visited)
1403 *visited = BITMAP_ALLOC (NULL);
1404 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1405 return true;
1407 var = arg;
1408 def_stmt = SSA_NAME_DEF_STMT (var);
1410 switch (gimple_code (def_stmt))
1412 case GIMPLE_ASSIGN:
1413 /* The RHS of the statement defining VAR must either have a
1414 constant length or come from another SSA_NAME with a constant
1415 length. */
1416 if (gimple_assign_single_p (def_stmt)
1417 || gimple_assign_unary_nop_p (def_stmt))
1419 tree rhs = gimple_assign_rhs1 (def_stmt);
1420 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
1422 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1424 tree op2 = gimple_assign_rhs2 (def_stmt);
1425 tree op3 = gimple_assign_rhs3 (def_stmt);
1426 return get_range_strlen (op2, length, visited, type, fuzzy, flexp)
1427 && get_range_strlen (op3, length, visited, type, fuzzy, flexp);
1429 return false;
1431 case GIMPLE_PHI:
1433 /* All the arguments of the PHI node must have the same constant
1434 length. */
1435 unsigned i;
1437 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1439 tree arg = gimple_phi_arg (def_stmt, i)->def;
1441 /* If this PHI has itself as an argument, we cannot
1442 determine the string length of this argument. However,
1443 if we can find a constant string length for the other
1444 PHI args then we can still be sure that this is a
1445 constant string length. So be optimistic and just
1446 continue with the next argument. */
1447 if (arg == gimple_phi_result (def_stmt))
1448 continue;
1450 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
1452 if (fuzzy)
1453 *maxlen = build_all_ones_cst (size_type_node);
1454 else
1455 return false;
1459 return true;
1461 default:
1462 return false;
1466 /* Determine the minimum and maximum value or string length that ARG
1467 refers to and store each in the first two elements of MINMAXLEN.
1468 For expressions that point to strings of unknown lengths that are
1469 character arrays, use the upper bound of the array as the maximum
1470 length. For example, given an expression like 'x ? array : "xyz"'
1471 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1472 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1473 stored in array.
1474 Return true if the range of the string lengths has been obtained
1475 from the upper bound of an array at the end of a struct. Such
1476 an array may hold a string that's longer than its upper bound
1477 due to it being used as a poor-man's flexible array member. */
1479 bool
1480 get_range_strlen (tree arg, tree minmaxlen[2])
1482 bitmap visited = NULL;
1484 minmaxlen[0] = NULL_TREE;
1485 minmaxlen[1] = NULL_TREE;
1487 bool flexarray = false;
1488 get_range_strlen (arg, minmaxlen, &visited, 1, true, &flexarray);
1490 if (visited)
1491 BITMAP_FREE (visited);
1493 return flexarray;
1496 tree
1497 get_maxval_strlen (tree arg, int type)
1499 bitmap visited = NULL;
1500 tree len[2] = { NULL_TREE, NULL_TREE };
1502 bool dummy;
1503 if (!get_range_strlen (arg, len, &visited, type, false, &dummy))
1504 len[1] = NULL_TREE;
1505 if (visited)
1506 BITMAP_FREE (visited);
1508 return len[1];
1512 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1513 If LEN is not NULL, it represents the length of the string to be
1514 copied. Return NULL_TREE if no simplification can be made. */
1516 static bool
1517 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1518 tree dest, tree src)
1520 location_t loc = gimple_location (gsi_stmt (*gsi));
1521 tree fn;
1523 /* If SRC and DEST are the same (and not volatile), return DEST. */
1524 if (operand_equal_p (src, dest, 0))
1526 replace_call_with_value (gsi, dest);
1527 return true;
1530 if (optimize_function_for_size_p (cfun))
1531 return false;
1533 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1534 if (!fn)
1535 return false;
1537 tree len = get_maxval_strlen (src, 0);
1538 if (!len)
1539 return false;
1541 len = fold_convert_loc (loc, size_type_node, len);
1542 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1543 len = force_gimple_operand_gsi (gsi, len, true,
1544 NULL_TREE, true, GSI_SAME_STMT);
1545 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1546 replace_call_with_call_and_fold (gsi, repl);
1547 return true;
1550 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1551 If SLEN is not NULL, it represents the length of the source string.
1552 Return NULL_TREE if no simplification can be made. */
1554 static bool
1555 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1556 tree dest, tree src, tree len)
1558 location_t loc = gimple_location (gsi_stmt (*gsi));
1559 tree fn;
1561 /* If the LEN parameter is zero, return DEST. */
1562 if (integer_zerop (len))
1564 replace_call_with_value (gsi, dest);
1565 return true;
1568 /* We can't compare slen with len as constants below if len is not a
1569 constant. */
1570 if (TREE_CODE (len) != INTEGER_CST)
1571 return false;
1573 /* Now, we must be passed a constant src ptr parameter. */
1574 tree slen = get_maxval_strlen (src, 0);
1575 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1576 return false;
1578 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1580 /* We do not support simplification of this case, though we do
1581 support it when expanding trees into RTL. */
1582 /* FIXME: generate a call to __builtin_memset. */
1583 if (tree_int_cst_lt (slen, len))
1584 return false;
1586 /* OK transform into builtin memcpy. */
1587 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1588 if (!fn)
1589 return false;
1591 len = fold_convert_loc (loc, size_type_node, len);
1592 len = force_gimple_operand_gsi (gsi, len, true,
1593 NULL_TREE, true, GSI_SAME_STMT);
1594 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1595 replace_call_with_call_and_fold (gsi, repl);
1596 return true;
1599 /* Fold function call to builtin strchr or strrchr.
1600 If both arguments are constant, evaluate and fold the result,
1601 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1602 In general strlen is significantly faster than strchr
1603 due to being a simpler operation. */
1604 static bool
1605 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1607 gimple *stmt = gsi_stmt (*gsi);
1608 tree str = gimple_call_arg (stmt, 0);
1609 tree c = gimple_call_arg (stmt, 1);
1610 location_t loc = gimple_location (stmt);
1611 const char *p;
1612 char ch;
1614 if (!gimple_call_lhs (stmt))
1615 return false;
1617 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1619 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1621 if (p1 == NULL)
1623 replace_call_with_value (gsi, integer_zero_node);
1624 return true;
1627 tree len = build_int_cst (size_type_node, p1 - p);
1628 gimple_seq stmts = NULL;
1629 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1630 POINTER_PLUS_EXPR, str, len);
1631 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1632 gsi_replace_with_seq_vops (gsi, stmts);
1633 return true;
1636 if (!integer_zerop (c))
1637 return false;
1639 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1640 if (is_strrchr && optimize_function_for_size_p (cfun))
1642 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1644 if (strchr_fn)
1646 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1647 replace_call_with_call_and_fold (gsi, repl);
1648 return true;
1651 return false;
1654 tree len;
1655 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1657 if (!strlen_fn)
1658 return false;
1660 /* Create newstr = strlen (str). */
1661 gimple_seq stmts = NULL;
1662 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1663 gimple_set_location (new_stmt, loc);
1664 len = create_tmp_reg_or_ssa_name (size_type_node);
1665 gimple_call_set_lhs (new_stmt, len);
1666 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1668 /* Create (str p+ strlen (str)). */
1669 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1670 POINTER_PLUS_EXPR, str, len);
1671 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1672 gsi_replace_with_seq_vops (gsi, stmts);
1673 /* gsi now points at the assignment to the lhs, get a
1674 stmt iterator to the strlen.
1675 ??? We can't use gsi_for_stmt as that doesn't work when the
1676 CFG isn't built yet. */
1677 gimple_stmt_iterator gsi2 = *gsi;
1678 gsi_prev (&gsi2);
1679 fold_stmt (&gsi2);
1680 return true;
1683 /* Fold function call to builtin strstr.
1684 If both arguments are constant, evaluate and fold the result,
1685 additionally fold strstr (x, "") into x and strstr (x, "c")
1686 into strchr (x, 'c'). */
1687 static bool
1688 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1690 gimple *stmt = gsi_stmt (*gsi);
1691 tree haystack = gimple_call_arg (stmt, 0);
1692 tree needle = gimple_call_arg (stmt, 1);
1693 const char *p, *q;
1695 if (!gimple_call_lhs (stmt))
1696 return false;
1698 q = c_getstr (needle);
1699 if (q == NULL)
1700 return false;
1702 if ((p = c_getstr (haystack)))
1704 const char *r = strstr (p, q);
1706 if (r == NULL)
1708 replace_call_with_value (gsi, integer_zero_node);
1709 return true;
1712 tree len = build_int_cst (size_type_node, r - p);
1713 gimple_seq stmts = NULL;
1714 gimple *new_stmt
1715 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1716 haystack, len);
1717 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1718 gsi_replace_with_seq_vops (gsi, stmts);
1719 return true;
1722 /* For strstr (x, "") return x. */
1723 if (q[0] == '\0')
1725 replace_call_with_value (gsi, haystack);
1726 return true;
1729 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1730 if (q[1] == '\0')
1732 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1733 if (strchr_fn)
1735 tree c = build_int_cst (integer_type_node, q[0]);
1736 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1737 replace_call_with_call_and_fold (gsi, repl);
1738 return true;
1742 return false;
1745 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1746 to the call.
1748 Return NULL_TREE if no simplification was possible, otherwise return the
1749 simplified form of the call as a tree.
1751 The simplified form may be a constant or other expression which
1752 computes the same value, but in a more efficient manner (including
1753 calls to other builtin functions).
1755 The call may contain arguments which need to be evaluated, but
1756 which are not useful to determine the result of the call. In
1757 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1758 COMPOUND_EXPR will be an argument which must be evaluated.
1759 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1760 COMPOUND_EXPR in the chain will contain the tree for the simplified
1761 form of the builtin function call. */
1763 static bool
1764 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1766 gimple *stmt = gsi_stmt (*gsi);
1767 location_t loc = gimple_location (stmt);
1769 const char *p = c_getstr (src);
1771 /* If the string length is zero, return the dst parameter. */
1772 if (p && *p == '\0')
1774 replace_call_with_value (gsi, dst);
1775 return true;
1778 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1779 return false;
1781 /* See if we can store by pieces into (dst + strlen(dst)). */
1782 tree newdst;
1783 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1784 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1786 if (!strlen_fn || !memcpy_fn)
1787 return false;
1789 /* If the length of the source string isn't computable don't
1790 split strcat into strlen and memcpy. */
1791 tree len = get_maxval_strlen (src, 0);
1792 if (! len)
1793 return false;
1795 /* Create strlen (dst). */
1796 gimple_seq stmts = NULL, stmts2;
1797 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1798 gimple_set_location (repl, loc);
1799 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1800 gimple_call_set_lhs (repl, newdst);
1801 gimple_seq_add_stmt_without_update (&stmts, repl);
1803 /* Create (dst p+ strlen (dst)). */
1804 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1805 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1806 gimple_seq_add_seq_without_update (&stmts, stmts2);
1808 len = fold_convert_loc (loc, size_type_node, len);
1809 len = size_binop_loc (loc, PLUS_EXPR, len,
1810 build_int_cst (size_type_node, 1));
1811 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1812 gimple_seq_add_seq_without_update (&stmts, stmts2);
1814 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1815 gimple_seq_add_stmt_without_update (&stmts, repl);
1816 if (gimple_call_lhs (stmt))
1818 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1819 gimple_seq_add_stmt_without_update (&stmts, repl);
1820 gsi_replace_with_seq_vops (gsi, stmts);
1821 /* gsi now points at the assignment to the lhs, get a
1822 stmt iterator to the memcpy call.
1823 ??? We can't use gsi_for_stmt as that doesn't work when the
1824 CFG isn't built yet. */
1825 gimple_stmt_iterator gsi2 = *gsi;
1826 gsi_prev (&gsi2);
1827 fold_stmt (&gsi2);
1829 else
1831 gsi_replace_with_seq_vops (gsi, stmts);
1832 fold_stmt (gsi);
1834 return true;
1837 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1838 are the arguments to the call. */
1840 static bool
1841 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1843 gimple *stmt = gsi_stmt (*gsi);
1844 tree dest = gimple_call_arg (stmt, 0);
1845 tree src = gimple_call_arg (stmt, 1);
1846 tree size = gimple_call_arg (stmt, 2);
1847 tree fn;
1848 const char *p;
1851 p = c_getstr (src);
1852 /* If the SRC parameter is "", return DEST. */
1853 if (p && *p == '\0')
1855 replace_call_with_value (gsi, dest);
1856 return true;
1859 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1860 return false;
1862 /* If __builtin_strcat_chk is used, assume strcat is available. */
1863 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1864 if (!fn)
1865 return false;
1867 gimple *repl = gimple_build_call (fn, 2, dest, src);
1868 replace_call_with_call_and_fold (gsi, repl);
1869 return true;
1872 /* Simplify a call to the strncat builtin. */
1874 static bool
1875 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1877 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1878 tree dst = gimple_call_arg (stmt, 0);
1879 tree src = gimple_call_arg (stmt, 1);
1880 tree len = gimple_call_arg (stmt, 2);
1882 const char *p = c_getstr (src);
1884 /* If the requested length is zero, or the src parameter string
1885 length is zero, return the dst parameter. */
1886 if (integer_zerop (len) || (p && *p == '\0'))
1888 replace_call_with_value (gsi, dst);
1889 return true;
1892 /* If the requested len is greater than or equal to the string
1893 length, call strcat. */
1894 if (TREE_CODE (len) == INTEGER_CST && p
1895 && compare_tree_int (len, strlen (p)) >= 0)
1897 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1899 /* If the replacement _DECL isn't initialized, don't do the
1900 transformation. */
1901 if (!fn)
1902 return false;
1904 gcall *repl = gimple_build_call (fn, 2, dst, src);
1905 replace_call_with_call_and_fold (gsi, repl);
1906 return true;
1909 return false;
1912 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1913 LEN, and SIZE. */
1915 static bool
1916 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1918 gimple *stmt = gsi_stmt (*gsi);
1919 tree dest = gimple_call_arg (stmt, 0);
1920 tree src = gimple_call_arg (stmt, 1);
1921 tree len = gimple_call_arg (stmt, 2);
1922 tree size = gimple_call_arg (stmt, 3);
1923 tree fn;
1924 const char *p;
1926 p = c_getstr (src);
1927 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1928 if ((p && *p == '\0')
1929 || integer_zerop (len))
1931 replace_call_with_value (gsi, dest);
1932 return true;
1935 if (! tree_fits_uhwi_p (size))
1936 return false;
1938 if (! integer_all_onesp (size))
1940 tree src_len = c_strlen (src, 1);
1941 if (src_len
1942 && tree_fits_uhwi_p (src_len)
1943 && tree_fits_uhwi_p (len)
1944 && ! tree_int_cst_lt (len, src_len))
1946 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1947 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1948 if (!fn)
1949 return false;
1951 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1952 replace_call_with_call_and_fold (gsi, repl);
1953 return true;
1955 return false;
1958 /* If __builtin_strncat_chk is used, assume strncat is available. */
1959 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1960 if (!fn)
1961 return false;
1963 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1964 replace_call_with_call_and_fold (gsi, repl);
1965 return true;
1968 /* Build and append gimple statements to STMTS that would load a first
1969 character of a memory location identified by STR. LOC is location
1970 of the statement. */
1972 static tree
1973 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
1975 tree var;
1977 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
1978 tree cst_uchar_ptr_node
1979 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
1980 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
1982 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
1983 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
1984 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
1986 gimple_assign_set_lhs (stmt, var);
1987 gimple_seq_add_stmt_without_update (stmts, stmt);
1989 return var;
1992 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
1993 FCODE is the name of the builtin. */
1995 static bool
1996 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
1998 gimple *stmt = gsi_stmt (*gsi);
1999 tree callee = gimple_call_fndecl (stmt);
2000 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2002 tree type = integer_type_node;
2003 tree str1 = gimple_call_arg (stmt, 0);
2004 tree str2 = gimple_call_arg (stmt, 1);
2005 tree lhs = gimple_call_lhs (stmt);
2006 HOST_WIDE_INT length = -1;
2008 /* Handle strncmp and strncasecmp functions. */
2009 if (gimple_call_num_args (stmt) == 3)
2011 tree len = gimple_call_arg (stmt, 2);
2012 if (tree_fits_uhwi_p (len))
2013 length = tree_to_uhwi (len);
2016 /* If the LEN parameter is zero, return zero. */
2017 if (length == 0)
2019 replace_call_with_value (gsi, integer_zero_node);
2020 return true;
2023 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2024 if (operand_equal_p (str1, str2, 0))
2026 replace_call_with_value (gsi, integer_zero_node);
2027 return true;
2030 const char *p1 = c_getstr (str1);
2031 const char *p2 = c_getstr (str2);
2033 /* For known strings, return an immediate value. */
2034 if (p1 && p2)
2036 int r = 0;
2037 bool known_result = false;
2039 switch (fcode)
2041 case BUILT_IN_STRCMP:
2043 r = strcmp (p1, p2);
2044 known_result = true;
2045 break;
2047 case BUILT_IN_STRNCMP:
2049 if (length == -1)
2050 break;
2051 r = strncmp (p1, p2, length);
2052 known_result = true;
2053 break;
2055 /* Only handleable situation is where the string are equal (result 0),
2056 which is already handled by operand_equal_p case. */
2057 case BUILT_IN_STRCASECMP:
2058 break;
2059 case BUILT_IN_STRNCASECMP:
2061 if (length == -1)
2062 break;
2063 r = strncmp (p1, p2, length);
2064 if (r == 0)
2065 known_result = true;
2066 break;;
2068 default:
2069 gcc_unreachable ();
2072 if (known_result)
2074 replace_call_with_value (gsi, build_cmp_result (type, r));
2075 return true;
2079 bool nonzero_length = length >= 1
2080 || fcode == BUILT_IN_STRCMP
2081 || fcode == BUILT_IN_STRCASECMP;
2083 location_t loc = gimple_location (stmt);
2085 /* If the second arg is "", return *(const unsigned char*)arg1. */
2086 if (p2 && *p2 == '\0' && nonzero_length)
2088 gimple_seq stmts = NULL;
2089 tree var = gimple_load_first_char (loc, str1, &stmts);
2090 if (lhs)
2092 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2093 gimple_seq_add_stmt_without_update (&stmts, stmt);
2096 gsi_replace_with_seq_vops (gsi, stmts);
2097 return true;
2100 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2101 if (p1 && *p1 == '\0' && nonzero_length)
2103 gimple_seq stmts = NULL;
2104 tree var = gimple_load_first_char (loc, str2, &stmts);
2106 if (lhs)
2108 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2109 stmt = gimple_build_assign (c, NOP_EXPR, var);
2110 gimple_seq_add_stmt_without_update (&stmts, stmt);
2112 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2113 gimple_seq_add_stmt_without_update (&stmts, stmt);
2116 gsi_replace_with_seq_vops (gsi, stmts);
2117 return true;
2120 /* If len parameter is one, return an expression corresponding to
2121 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2122 if (fcode == BUILT_IN_STRNCMP && length == 1)
2124 gimple_seq stmts = NULL;
2125 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2126 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2128 if (lhs)
2130 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2131 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2132 gimple_seq_add_stmt_without_update (&stmts, convert1);
2134 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2135 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2136 gimple_seq_add_stmt_without_update (&stmts, convert2);
2138 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2139 gimple_seq_add_stmt_without_update (&stmts, stmt);
2142 gsi_replace_with_seq_vops (gsi, stmts);
2143 return true;
2146 return false;
2149 /* Fold a call to the memchr pointed by GSI iterator. */
2151 static bool
2152 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2154 gimple *stmt = gsi_stmt (*gsi);
2155 tree lhs = gimple_call_lhs (stmt);
2156 tree arg1 = gimple_call_arg (stmt, 0);
2157 tree arg2 = gimple_call_arg (stmt, 1);
2158 tree len = gimple_call_arg (stmt, 2);
2160 /* If the LEN parameter is zero, return zero. */
2161 if (integer_zerop (len))
2163 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2164 return true;
2167 char c;
2168 if (TREE_CODE (arg2) != INTEGER_CST
2169 || !tree_fits_uhwi_p (len)
2170 || !target_char_cst_p (arg2, &c))
2171 return false;
2173 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2174 unsigned HOST_WIDE_INT string_length;
2175 const char *p1 = c_getstr (arg1, &string_length);
2177 if (p1)
2179 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2180 if (r == NULL)
2182 if (length <= string_length)
2184 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2185 return true;
2188 else
2190 unsigned HOST_WIDE_INT offset = r - p1;
2191 gimple_seq stmts = NULL;
2192 if (lhs != NULL_TREE)
2194 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2195 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2196 arg1, offset_cst);
2197 gimple_seq_add_stmt_without_update (&stmts, stmt);
2199 else
2200 gimple_seq_add_stmt_without_update (&stmts,
2201 gimple_build_nop ());
2203 gsi_replace_with_seq_vops (gsi, stmts);
2204 return true;
2208 return false;
2211 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2212 to the call. IGNORE is true if the value returned
2213 by the builtin will be ignored. UNLOCKED is true is true if this
2214 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2215 the known length of the string. Return NULL_TREE if no simplification
2216 was possible. */
2218 static bool
2219 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2220 tree arg0, tree arg1,
2221 bool unlocked)
2223 gimple *stmt = gsi_stmt (*gsi);
2225 /* If we're using an unlocked function, assume the other unlocked
2226 functions exist explicitly. */
2227 tree const fn_fputc = (unlocked
2228 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2229 : builtin_decl_implicit (BUILT_IN_FPUTC));
2230 tree const fn_fwrite = (unlocked
2231 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2232 : builtin_decl_implicit (BUILT_IN_FWRITE));
2234 /* If the return value is used, don't do the transformation. */
2235 if (gimple_call_lhs (stmt))
2236 return false;
2238 /* Get the length of the string passed to fputs. If the length
2239 can't be determined, punt. */
2240 tree len = get_maxval_strlen (arg0, 0);
2241 if (!len
2242 || TREE_CODE (len) != INTEGER_CST)
2243 return false;
2245 switch (compare_tree_int (len, 1))
2247 case -1: /* length is 0, delete the call entirely . */
2248 replace_call_with_value (gsi, integer_zero_node);
2249 return true;
2251 case 0: /* length is 1, call fputc. */
2253 const char *p = c_getstr (arg0);
2254 if (p != NULL)
2256 if (!fn_fputc)
2257 return false;
2259 gimple *repl = gimple_build_call (fn_fputc, 2,
2260 build_int_cst
2261 (integer_type_node, p[0]), arg1);
2262 replace_call_with_call_and_fold (gsi, repl);
2263 return true;
2266 /* FALLTHROUGH */
2267 case 1: /* length is greater than 1, call fwrite. */
2269 /* If optimizing for size keep fputs. */
2270 if (optimize_function_for_size_p (cfun))
2271 return false;
2272 /* New argument list transforming fputs(string, stream) to
2273 fwrite(string, 1, len, stream). */
2274 if (!fn_fwrite)
2275 return false;
2277 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2278 size_one_node, len, arg1);
2279 replace_call_with_call_and_fold (gsi, repl);
2280 return true;
2282 default:
2283 gcc_unreachable ();
2285 return false;
2288 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2289 DEST, SRC, LEN, and SIZE are the arguments to the call.
2290 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2291 code of the builtin. If MAXLEN is not NULL, it is maximum length
2292 passed as third argument. */
2294 static bool
2295 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2296 tree dest, tree src, tree len, tree size,
2297 enum built_in_function fcode)
2299 gimple *stmt = gsi_stmt (*gsi);
2300 location_t loc = gimple_location (stmt);
2301 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2302 tree fn;
2304 /* If SRC and DEST are the same (and not volatile), return DEST
2305 (resp. DEST+LEN for __mempcpy_chk). */
2306 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2308 if (fcode != BUILT_IN_MEMPCPY_CHK)
2310 replace_call_with_value (gsi, dest);
2311 return true;
2313 else
2315 gimple_seq stmts = NULL;
2316 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2317 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2318 TREE_TYPE (dest), dest, len);
2319 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2320 replace_call_with_value (gsi, temp);
2321 return true;
2325 if (! tree_fits_uhwi_p (size))
2326 return false;
2328 tree maxlen = get_maxval_strlen (len, 2);
2329 if (! integer_all_onesp (size))
2331 if (! tree_fits_uhwi_p (len))
2333 /* If LEN is not constant, try MAXLEN too.
2334 For MAXLEN only allow optimizing into non-_ocs function
2335 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2336 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2338 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2340 /* (void) __mempcpy_chk () can be optimized into
2341 (void) __memcpy_chk (). */
2342 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2343 if (!fn)
2344 return false;
2346 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2347 replace_call_with_call_and_fold (gsi, repl);
2348 return true;
2350 return false;
2353 else
2354 maxlen = len;
2356 if (tree_int_cst_lt (size, maxlen))
2357 return false;
2360 fn = NULL_TREE;
2361 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2362 mem{cpy,pcpy,move,set} is available. */
2363 switch (fcode)
2365 case BUILT_IN_MEMCPY_CHK:
2366 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2367 break;
2368 case BUILT_IN_MEMPCPY_CHK:
2369 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2370 break;
2371 case BUILT_IN_MEMMOVE_CHK:
2372 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2373 break;
2374 case BUILT_IN_MEMSET_CHK:
2375 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2376 break;
2377 default:
2378 break;
2381 if (!fn)
2382 return false;
2384 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2385 replace_call_with_call_and_fold (gsi, repl);
2386 return true;
2389 /* Fold a call to the __st[rp]cpy_chk builtin.
2390 DEST, SRC, and SIZE are the arguments to the call.
2391 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2392 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2393 strings passed as second argument. */
2395 static bool
2396 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2397 tree dest,
2398 tree src, tree size,
2399 enum built_in_function fcode)
2401 gimple *stmt = gsi_stmt (*gsi);
2402 location_t loc = gimple_location (stmt);
2403 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2404 tree len, fn;
2406 /* If SRC and DEST are the same (and not volatile), return DEST. */
2407 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2409 replace_call_with_value (gsi, dest);
2410 return true;
2413 if (! tree_fits_uhwi_p (size))
2414 return false;
2416 tree maxlen = get_maxval_strlen (src, 1);
2417 if (! integer_all_onesp (size))
2419 len = c_strlen (src, 1);
2420 if (! len || ! tree_fits_uhwi_p (len))
2422 /* If LEN is not constant, try MAXLEN too.
2423 For MAXLEN only allow optimizing into non-_ocs function
2424 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2425 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2427 if (fcode == BUILT_IN_STPCPY_CHK)
2429 if (! ignore)
2430 return false;
2432 /* If return value of __stpcpy_chk is ignored,
2433 optimize into __strcpy_chk. */
2434 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2435 if (!fn)
2436 return false;
2438 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2439 replace_call_with_call_and_fold (gsi, repl);
2440 return true;
2443 if (! len || TREE_SIDE_EFFECTS (len))
2444 return false;
2446 /* If c_strlen returned something, but not a constant,
2447 transform __strcpy_chk into __memcpy_chk. */
2448 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2449 if (!fn)
2450 return false;
2452 gimple_seq stmts = NULL;
2453 len = gimple_convert (&stmts, loc, size_type_node, len);
2454 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2455 build_int_cst (size_type_node, 1));
2456 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2457 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2458 replace_call_with_call_and_fold (gsi, repl);
2459 return true;
2462 else
2463 maxlen = len;
2465 if (! tree_int_cst_lt (maxlen, size))
2466 return false;
2469 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2470 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2471 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2472 if (!fn)
2473 return false;
2475 gimple *repl = gimple_build_call (fn, 2, dest, src);
2476 replace_call_with_call_and_fold (gsi, repl);
2477 return true;
2480 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2481 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2482 length passed as third argument. IGNORE is true if return value can be
2483 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2485 static bool
2486 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2487 tree dest, tree src,
2488 tree len, tree size,
2489 enum built_in_function fcode)
2491 gimple *stmt = gsi_stmt (*gsi);
2492 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2493 tree fn;
2495 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2497 /* If return value of __stpncpy_chk is ignored,
2498 optimize into __strncpy_chk. */
2499 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2500 if (fn)
2502 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2503 replace_call_with_call_and_fold (gsi, repl);
2504 return true;
2508 if (! tree_fits_uhwi_p (size))
2509 return false;
2511 tree maxlen = get_maxval_strlen (len, 2);
2512 if (! integer_all_onesp (size))
2514 if (! tree_fits_uhwi_p (len))
2516 /* If LEN is not constant, try MAXLEN too.
2517 For MAXLEN only allow optimizing into non-_ocs function
2518 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2519 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2520 return false;
2522 else
2523 maxlen = len;
2525 if (tree_int_cst_lt (size, maxlen))
2526 return false;
2529 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2530 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2531 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2532 if (!fn)
2533 return false;
2535 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2536 replace_call_with_call_and_fold (gsi, repl);
2537 return true;
2540 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2541 Return NULL_TREE if no simplification can be made. */
2543 static bool
2544 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2546 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2547 location_t loc = gimple_location (stmt);
2548 tree dest = gimple_call_arg (stmt, 0);
2549 tree src = gimple_call_arg (stmt, 1);
2550 tree fn, len, lenp1;
2552 /* If the result is unused, replace stpcpy with strcpy. */
2553 if (gimple_call_lhs (stmt) == NULL_TREE)
2555 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2556 if (!fn)
2557 return false;
2558 gimple_call_set_fndecl (stmt, fn);
2559 fold_stmt (gsi);
2560 return true;
2563 len = c_strlen (src, 1);
2564 if (!len
2565 || TREE_CODE (len) != INTEGER_CST)
2566 return false;
2568 if (optimize_function_for_size_p (cfun)
2569 /* If length is zero it's small enough. */
2570 && !integer_zerop (len))
2571 return false;
2573 /* If the source has a known length replace stpcpy with memcpy. */
2574 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2575 if (!fn)
2576 return false;
2578 gimple_seq stmts = NULL;
2579 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2580 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2581 tem, build_int_cst (size_type_node, 1));
2582 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2583 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2584 gimple_set_vuse (repl, gimple_vuse (stmt));
2585 gimple_set_vdef (repl, gimple_vdef (stmt));
2586 if (gimple_vdef (repl)
2587 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2588 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2589 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2590 /* Replace the result with dest + len. */
2591 stmts = NULL;
2592 tem = gimple_convert (&stmts, loc, sizetype, len);
2593 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2594 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2595 POINTER_PLUS_EXPR, dest, tem);
2596 gsi_replace (gsi, ret, false);
2597 /* Finally fold the memcpy call. */
2598 gimple_stmt_iterator gsi2 = *gsi;
2599 gsi_prev (&gsi2);
2600 fold_stmt (&gsi2);
2601 return true;
2604 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2605 NULL_TREE if a normal call should be emitted rather than expanding
2606 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2607 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2608 passed as second argument. */
2610 static bool
2611 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2612 enum built_in_function fcode)
2614 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2615 tree dest, size, len, fn, fmt, flag;
2616 const char *fmt_str;
2618 /* Verify the required arguments in the original call. */
2619 if (gimple_call_num_args (stmt) < 5)
2620 return false;
2622 dest = gimple_call_arg (stmt, 0);
2623 len = gimple_call_arg (stmt, 1);
2624 flag = gimple_call_arg (stmt, 2);
2625 size = gimple_call_arg (stmt, 3);
2626 fmt = gimple_call_arg (stmt, 4);
2628 if (! tree_fits_uhwi_p (size))
2629 return false;
2631 if (! integer_all_onesp (size))
2633 tree maxlen = get_maxval_strlen (len, 2);
2634 if (! tree_fits_uhwi_p (len))
2636 /* If LEN is not constant, try MAXLEN too.
2637 For MAXLEN only allow optimizing into non-_ocs function
2638 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2639 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2640 return false;
2642 else
2643 maxlen = len;
2645 if (tree_int_cst_lt (size, maxlen))
2646 return false;
2649 if (!init_target_chars ())
2650 return false;
2652 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2653 or if format doesn't contain % chars or is "%s". */
2654 if (! integer_zerop (flag))
2656 fmt_str = c_getstr (fmt);
2657 if (fmt_str == NULL)
2658 return false;
2659 if (strchr (fmt_str, target_percent) != NULL
2660 && strcmp (fmt_str, target_percent_s))
2661 return false;
2664 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2665 available. */
2666 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2667 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2668 if (!fn)
2669 return false;
2671 /* Replace the called function and the first 5 argument by 3 retaining
2672 trailing varargs. */
2673 gimple_call_set_fndecl (stmt, fn);
2674 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2675 gimple_call_set_arg (stmt, 0, dest);
2676 gimple_call_set_arg (stmt, 1, len);
2677 gimple_call_set_arg (stmt, 2, fmt);
2678 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2679 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2680 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2681 fold_stmt (gsi);
2682 return true;
2685 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2686 Return NULL_TREE if a normal call should be emitted rather than
2687 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2688 or BUILT_IN_VSPRINTF_CHK. */
2690 static bool
2691 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2692 enum built_in_function fcode)
2694 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2695 tree dest, size, len, fn, fmt, flag;
2696 const char *fmt_str;
2697 unsigned nargs = gimple_call_num_args (stmt);
2699 /* Verify the required arguments in the original call. */
2700 if (nargs < 4)
2701 return false;
2702 dest = gimple_call_arg (stmt, 0);
2703 flag = gimple_call_arg (stmt, 1);
2704 size = gimple_call_arg (stmt, 2);
2705 fmt = gimple_call_arg (stmt, 3);
2707 if (! tree_fits_uhwi_p (size))
2708 return false;
2710 len = NULL_TREE;
2712 if (!init_target_chars ())
2713 return false;
2715 /* Check whether the format is a literal string constant. */
2716 fmt_str = c_getstr (fmt);
2717 if (fmt_str != NULL)
2719 /* If the format doesn't contain % args or %%, we know the size. */
2720 if (strchr (fmt_str, target_percent) == 0)
2722 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2723 len = build_int_cstu (size_type_node, strlen (fmt_str));
2725 /* If the format is "%s" and first ... argument is a string literal,
2726 we know the size too. */
2727 else if (fcode == BUILT_IN_SPRINTF_CHK
2728 && strcmp (fmt_str, target_percent_s) == 0)
2730 tree arg;
2732 if (nargs == 5)
2734 arg = gimple_call_arg (stmt, 4);
2735 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2737 len = c_strlen (arg, 1);
2738 if (! len || ! tree_fits_uhwi_p (len))
2739 len = NULL_TREE;
2745 if (! integer_all_onesp (size))
2747 if (! len || ! tree_int_cst_lt (len, size))
2748 return false;
2751 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2752 or if format doesn't contain % chars or is "%s". */
2753 if (! integer_zerop (flag))
2755 if (fmt_str == NULL)
2756 return false;
2757 if (strchr (fmt_str, target_percent) != NULL
2758 && strcmp (fmt_str, target_percent_s))
2759 return false;
2762 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2763 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2764 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2765 if (!fn)
2766 return false;
2768 /* Replace the called function and the first 4 argument by 2 retaining
2769 trailing varargs. */
2770 gimple_call_set_fndecl (stmt, fn);
2771 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2772 gimple_call_set_arg (stmt, 0, dest);
2773 gimple_call_set_arg (stmt, 1, fmt);
2774 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2775 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2776 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2777 fold_stmt (gsi);
2778 return true;
2781 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2782 ORIG may be null if this is a 2-argument call. We don't attempt to
2783 simplify calls with more than 3 arguments.
2785 Return true if simplification was possible, otherwise false. */
2787 bool
2788 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2790 gimple *stmt = gsi_stmt (*gsi);
2791 tree dest = gimple_call_arg (stmt, 0);
2792 tree fmt = gimple_call_arg (stmt, 1);
2793 tree orig = NULL_TREE;
2794 const char *fmt_str = NULL;
2796 /* Verify the required arguments in the original call. We deal with two
2797 types of sprintf() calls: 'sprintf (str, fmt)' and
2798 'sprintf (dest, "%s", orig)'. */
2799 if (gimple_call_num_args (stmt) > 3)
2800 return false;
2802 if (gimple_call_num_args (stmt) == 3)
2803 orig = gimple_call_arg (stmt, 2);
2805 /* Check whether the format is a literal string constant. */
2806 fmt_str = c_getstr (fmt);
2807 if (fmt_str == NULL)
2808 return false;
2810 if (!init_target_chars ())
2811 return false;
2813 /* If the format doesn't contain % args or %%, use strcpy. */
2814 if (strchr (fmt_str, target_percent) == NULL)
2816 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2818 if (!fn)
2819 return false;
2821 /* Don't optimize sprintf (buf, "abc", ptr++). */
2822 if (orig)
2823 return false;
2825 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2826 'format' is known to contain no % formats. */
2827 gimple_seq stmts = NULL;
2828 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2829 gimple_seq_add_stmt_without_update (&stmts, repl);
2830 if (gimple_call_lhs (stmt))
2832 repl = gimple_build_assign (gimple_call_lhs (stmt),
2833 build_int_cst (integer_type_node,
2834 strlen (fmt_str)));
2835 gimple_seq_add_stmt_without_update (&stmts, repl);
2836 gsi_replace_with_seq_vops (gsi, stmts);
2837 /* gsi now points at the assignment to the lhs, get a
2838 stmt iterator to the memcpy call.
2839 ??? We can't use gsi_for_stmt as that doesn't work when the
2840 CFG isn't built yet. */
2841 gimple_stmt_iterator gsi2 = *gsi;
2842 gsi_prev (&gsi2);
2843 fold_stmt (&gsi2);
2845 else
2847 gsi_replace_with_seq_vops (gsi, stmts);
2848 fold_stmt (gsi);
2850 return true;
2853 /* If the format is "%s", use strcpy if the result isn't used. */
2854 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2856 tree fn;
2857 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2859 if (!fn)
2860 return false;
2862 /* Don't crash on sprintf (str1, "%s"). */
2863 if (!orig)
2864 return false;
2866 tree orig_len = NULL_TREE;
2867 if (gimple_call_lhs (stmt))
2869 orig_len = get_maxval_strlen (orig, 0);
2870 if (!orig_len)
2871 return false;
2874 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2875 gimple_seq stmts = NULL;
2876 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2877 gimple_seq_add_stmt_without_update (&stmts, repl);
2878 if (gimple_call_lhs (stmt))
2880 if (!useless_type_conversion_p (integer_type_node,
2881 TREE_TYPE (orig_len)))
2882 orig_len = fold_convert (integer_type_node, orig_len);
2883 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2884 gimple_seq_add_stmt_without_update (&stmts, repl);
2885 gsi_replace_with_seq_vops (gsi, stmts);
2886 /* gsi now points at the assignment to the lhs, get a
2887 stmt iterator to the memcpy call.
2888 ??? We can't use gsi_for_stmt as that doesn't work when the
2889 CFG isn't built yet. */
2890 gimple_stmt_iterator gsi2 = *gsi;
2891 gsi_prev (&gsi2);
2892 fold_stmt (&gsi2);
2894 else
2896 gsi_replace_with_seq_vops (gsi, stmts);
2897 fold_stmt (gsi);
2899 return true;
2901 return false;
2904 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2905 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2906 attempt to simplify calls with more than 4 arguments.
2908 Return true if simplification was possible, otherwise false. */
2910 bool
2911 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2913 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2914 tree dest = gimple_call_arg (stmt, 0);
2915 tree destsize = gimple_call_arg (stmt, 1);
2916 tree fmt = gimple_call_arg (stmt, 2);
2917 tree orig = NULL_TREE;
2918 const char *fmt_str = NULL;
2920 if (gimple_call_num_args (stmt) > 4)
2921 return false;
2923 if (gimple_call_num_args (stmt) == 4)
2924 orig = gimple_call_arg (stmt, 3);
2926 if (!tree_fits_uhwi_p (destsize))
2927 return false;
2928 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2930 /* Check whether the format is a literal string constant. */
2931 fmt_str = c_getstr (fmt);
2932 if (fmt_str == NULL)
2933 return false;
2935 if (!init_target_chars ())
2936 return false;
2938 /* If the format doesn't contain % args or %%, use strcpy. */
2939 if (strchr (fmt_str, target_percent) == NULL)
2941 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2942 if (!fn)
2943 return false;
2945 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2946 if (orig)
2947 return false;
2949 /* We could expand this as
2950 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2951 or to
2952 memcpy (str, fmt_with_nul_at_cstm1, cst);
2953 but in the former case that might increase code size
2954 and in the latter case grow .rodata section too much.
2955 So punt for now. */
2956 size_t len = strlen (fmt_str);
2957 if (len >= destlen)
2958 return false;
2960 gimple_seq stmts = NULL;
2961 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2962 gimple_seq_add_stmt_without_update (&stmts, repl);
2963 if (gimple_call_lhs (stmt))
2965 repl = gimple_build_assign (gimple_call_lhs (stmt),
2966 build_int_cst (integer_type_node, len));
2967 gimple_seq_add_stmt_without_update (&stmts, repl);
2968 gsi_replace_with_seq_vops (gsi, stmts);
2969 /* gsi now points at the assignment to the lhs, get a
2970 stmt iterator to the memcpy call.
2971 ??? We can't use gsi_for_stmt as that doesn't work when the
2972 CFG isn't built yet. */
2973 gimple_stmt_iterator gsi2 = *gsi;
2974 gsi_prev (&gsi2);
2975 fold_stmt (&gsi2);
2977 else
2979 gsi_replace_with_seq_vops (gsi, stmts);
2980 fold_stmt (gsi);
2982 return true;
2985 /* If the format is "%s", use strcpy if the result isn't used. */
2986 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2988 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2989 if (!fn)
2990 return false;
2992 /* Don't crash on snprintf (str1, cst, "%s"). */
2993 if (!orig)
2994 return false;
2996 tree orig_len = get_maxval_strlen (orig, 0);
2997 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2998 return false;
3000 /* We could expand this as
3001 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3002 or to
3003 memcpy (str1, str2_with_nul_at_cstm1, cst);
3004 but in the former case that might increase code size
3005 and in the latter case grow .rodata section too much.
3006 So punt for now. */
3007 if (compare_tree_int (orig_len, destlen) >= 0)
3008 return false;
3010 /* Convert snprintf (str1, cst, "%s", str2) into
3011 strcpy (str1, str2) if strlen (str2) < cst. */
3012 gimple_seq stmts = NULL;
3013 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3014 gimple_seq_add_stmt_without_update (&stmts, repl);
3015 if (gimple_call_lhs (stmt))
3017 if (!useless_type_conversion_p (integer_type_node,
3018 TREE_TYPE (orig_len)))
3019 orig_len = fold_convert (integer_type_node, orig_len);
3020 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3021 gimple_seq_add_stmt_without_update (&stmts, repl);
3022 gsi_replace_with_seq_vops (gsi, stmts);
3023 /* gsi now points at the assignment to the lhs, get a
3024 stmt iterator to the memcpy call.
3025 ??? We can't use gsi_for_stmt as that doesn't work when the
3026 CFG isn't built yet. */
3027 gimple_stmt_iterator gsi2 = *gsi;
3028 gsi_prev (&gsi2);
3029 fold_stmt (&gsi2);
3031 else
3033 gsi_replace_with_seq_vops (gsi, stmts);
3034 fold_stmt (gsi);
3036 return true;
3038 return false;
3041 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3042 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3043 more than 3 arguments, and ARG may be null in the 2-argument case.
3045 Return NULL_TREE if no simplification was possible, otherwise return the
3046 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3047 code of the function to be simplified. */
3049 static bool
3050 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3051 tree fp, tree fmt, tree arg,
3052 enum built_in_function fcode)
3054 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3055 tree fn_fputc, fn_fputs;
3056 const char *fmt_str = NULL;
3058 /* If the return value is used, don't do the transformation. */
3059 if (gimple_call_lhs (stmt) != NULL_TREE)
3060 return false;
3062 /* Check whether the format is a literal string constant. */
3063 fmt_str = c_getstr (fmt);
3064 if (fmt_str == NULL)
3065 return false;
3067 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3069 /* If we're using an unlocked function, assume the other
3070 unlocked functions exist explicitly. */
3071 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3072 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3074 else
3076 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3077 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3080 if (!init_target_chars ())
3081 return false;
3083 /* If the format doesn't contain % args or %%, use strcpy. */
3084 if (strchr (fmt_str, target_percent) == NULL)
3086 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3087 && arg)
3088 return false;
3090 /* If the format specifier was "", fprintf does nothing. */
3091 if (fmt_str[0] == '\0')
3093 replace_call_with_value (gsi, NULL_TREE);
3094 return true;
3097 /* When "string" doesn't contain %, replace all cases of
3098 fprintf (fp, string) with fputs (string, fp). The fputs
3099 builtin will take care of special cases like length == 1. */
3100 if (fn_fputs)
3102 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3103 replace_call_with_call_and_fold (gsi, repl);
3104 return true;
3108 /* The other optimizations can be done only on the non-va_list variants. */
3109 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3110 return false;
3112 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3113 else if (strcmp (fmt_str, target_percent_s) == 0)
3115 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3116 return false;
3117 if (fn_fputs)
3119 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3120 replace_call_with_call_and_fold (gsi, repl);
3121 return true;
3125 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3126 else if (strcmp (fmt_str, target_percent_c) == 0)
3128 if (!arg
3129 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3130 return false;
3131 if (fn_fputc)
3133 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3134 replace_call_with_call_and_fold (gsi, repl);
3135 return true;
3139 return false;
3142 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3143 FMT and ARG are the arguments to the call; we don't fold cases with
3144 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3146 Return NULL_TREE if no simplification was possible, otherwise return the
3147 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3148 code of the function to be simplified. */
3150 static bool
3151 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3152 tree arg, enum built_in_function fcode)
3154 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3155 tree fn_putchar, fn_puts, newarg;
3156 const char *fmt_str = NULL;
3158 /* If the return value is used, don't do the transformation. */
3159 if (gimple_call_lhs (stmt) != NULL_TREE)
3160 return false;
3162 /* Check whether the format is a literal string constant. */
3163 fmt_str = c_getstr (fmt);
3164 if (fmt_str == NULL)
3165 return false;
3167 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3169 /* If we're using an unlocked function, assume the other
3170 unlocked functions exist explicitly. */
3171 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3172 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3174 else
3176 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3177 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3180 if (!init_target_chars ())
3181 return false;
3183 if (strcmp (fmt_str, target_percent_s) == 0
3184 || strchr (fmt_str, target_percent) == NULL)
3186 const char *str;
3188 if (strcmp (fmt_str, target_percent_s) == 0)
3190 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3191 return false;
3193 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3194 return false;
3196 str = c_getstr (arg);
3197 if (str == NULL)
3198 return false;
3200 else
3202 /* The format specifier doesn't contain any '%' characters. */
3203 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3204 && arg)
3205 return false;
3206 str = fmt_str;
3209 /* If the string was "", printf does nothing. */
3210 if (str[0] == '\0')
3212 replace_call_with_value (gsi, NULL_TREE);
3213 return true;
3216 /* If the string has length of 1, call putchar. */
3217 if (str[1] == '\0')
3219 /* Given printf("c"), (where c is any one character,)
3220 convert "c"[0] to an int and pass that to the replacement
3221 function. */
3222 newarg = build_int_cst (integer_type_node, str[0]);
3223 if (fn_putchar)
3225 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3226 replace_call_with_call_and_fold (gsi, repl);
3227 return true;
3230 else
3232 /* If the string was "string\n", call puts("string"). */
3233 size_t len = strlen (str);
3234 if ((unsigned char)str[len - 1] == target_newline
3235 && (size_t) (int) len == len
3236 && (int) len > 0)
3238 char *newstr;
3239 tree offset_node, string_cst;
3241 /* Create a NUL-terminated string that's one char shorter
3242 than the original, stripping off the trailing '\n'. */
3243 newarg = build_string_literal (len, str);
3244 string_cst = string_constant (newarg, &offset_node);
3245 gcc_checking_assert (string_cst
3246 && (TREE_STRING_LENGTH (string_cst)
3247 == (int) len)
3248 && integer_zerop (offset_node)
3249 && (unsigned char)
3250 TREE_STRING_POINTER (string_cst)[len - 1]
3251 == target_newline);
3252 /* build_string_literal creates a new STRING_CST,
3253 modify it in place to avoid double copying. */
3254 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3255 newstr[len - 1] = '\0';
3256 if (fn_puts)
3258 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3259 replace_call_with_call_and_fold (gsi, repl);
3260 return true;
3263 else
3264 /* We'd like to arrange to call fputs(string,stdout) here,
3265 but we need stdout and don't have a way to get it yet. */
3266 return false;
3270 /* The other optimizations can be done only on the non-va_list variants. */
3271 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3272 return false;
3274 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3275 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3277 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3278 return false;
3279 if (fn_puts)
3281 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3282 replace_call_with_call_and_fold (gsi, repl);
3283 return true;
3287 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3288 else if (strcmp (fmt_str, target_percent_c) == 0)
3290 if (!arg || ! useless_type_conversion_p (integer_type_node,
3291 TREE_TYPE (arg)))
3292 return false;
3293 if (fn_putchar)
3295 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3296 replace_call_with_call_and_fold (gsi, repl);
3297 return true;
3301 return false;
3306 /* Fold a call to __builtin_strlen with known length LEN. */
3308 static bool
3309 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3311 gimple *stmt = gsi_stmt (*gsi);
3312 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
3313 if (!len)
3314 return false;
3315 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
3316 replace_call_with_value (gsi, len);
3317 return true;
3320 /* Fold a call to __builtin_acc_on_device. */
3322 static bool
3323 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3325 /* Defer folding until we know which compiler we're in. */
3326 if (symtab->state != EXPANSION)
3327 return false;
3329 unsigned val_host = GOMP_DEVICE_HOST;
3330 unsigned val_dev = GOMP_DEVICE_NONE;
3332 #ifdef ACCEL_COMPILER
3333 val_host = GOMP_DEVICE_NOT_HOST;
3334 val_dev = ACCEL_COMPILER_acc_device;
3335 #endif
3337 location_t loc = gimple_location (gsi_stmt (*gsi));
3339 tree host_eq = make_ssa_name (boolean_type_node);
3340 gimple *host_ass = gimple_build_assign
3341 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3342 gimple_set_location (host_ass, loc);
3343 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3345 tree dev_eq = make_ssa_name (boolean_type_node);
3346 gimple *dev_ass = gimple_build_assign
3347 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3348 gimple_set_location (dev_ass, loc);
3349 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3351 tree result = make_ssa_name (boolean_type_node);
3352 gimple *result_ass = gimple_build_assign
3353 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3354 gimple_set_location (result_ass, loc);
3355 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3357 replace_call_with_value (gsi, result);
3359 return true;
3362 /* Fold realloc (0, n) -> malloc (n). */
3364 static bool
3365 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3367 gimple *stmt = gsi_stmt (*gsi);
3368 tree arg = gimple_call_arg (stmt, 0);
3369 tree size = gimple_call_arg (stmt, 1);
3371 if (operand_equal_p (arg, null_pointer_node, 0))
3373 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3374 if (fn_malloc)
3376 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3377 replace_call_with_call_and_fold (gsi, repl);
3378 return true;
3381 return false;
3384 /* Fold the non-target builtin at *GSI and return whether any simplification
3385 was made. */
3387 static bool
3388 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3390 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3391 tree callee = gimple_call_fndecl (stmt);
3393 /* Give up for always_inline inline builtins until they are
3394 inlined. */
3395 if (avoid_folding_inline_builtin (callee))
3396 return false;
3398 unsigned n = gimple_call_num_args (stmt);
3399 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3400 switch (fcode)
3402 case BUILT_IN_BCMP:
3403 return gimple_fold_builtin_bcmp (gsi);
3404 case BUILT_IN_BCOPY:
3405 return gimple_fold_builtin_bcopy (gsi);
3406 case BUILT_IN_BZERO:
3407 return gimple_fold_builtin_bzero (gsi);
3409 case BUILT_IN_MEMSET:
3410 return gimple_fold_builtin_memset (gsi,
3411 gimple_call_arg (stmt, 1),
3412 gimple_call_arg (stmt, 2));
3413 case BUILT_IN_MEMCPY:
3414 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3415 gimple_call_arg (stmt, 1), 0);
3416 case BUILT_IN_MEMPCPY:
3417 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3418 gimple_call_arg (stmt, 1), 1);
3419 case BUILT_IN_MEMMOVE:
3420 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3421 gimple_call_arg (stmt, 1), 3);
3422 case BUILT_IN_SPRINTF_CHK:
3423 case BUILT_IN_VSPRINTF_CHK:
3424 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3425 case BUILT_IN_STRCAT_CHK:
3426 return gimple_fold_builtin_strcat_chk (gsi);
3427 case BUILT_IN_STRNCAT_CHK:
3428 return gimple_fold_builtin_strncat_chk (gsi);
3429 case BUILT_IN_STRLEN:
3430 return gimple_fold_builtin_strlen (gsi);
3431 case BUILT_IN_STRCPY:
3432 return gimple_fold_builtin_strcpy (gsi,
3433 gimple_call_arg (stmt, 0),
3434 gimple_call_arg (stmt, 1));
3435 case BUILT_IN_STRNCPY:
3436 return gimple_fold_builtin_strncpy (gsi,
3437 gimple_call_arg (stmt, 0),
3438 gimple_call_arg (stmt, 1),
3439 gimple_call_arg (stmt, 2));
3440 case BUILT_IN_STRCAT:
3441 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3442 gimple_call_arg (stmt, 1));
3443 case BUILT_IN_STRNCAT:
3444 return gimple_fold_builtin_strncat (gsi);
3445 case BUILT_IN_INDEX:
3446 case BUILT_IN_STRCHR:
3447 return gimple_fold_builtin_strchr (gsi, false);
3448 case BUILT_IN_RINDEX:
3449 case BUILT_IN_STRRCHR:
3450 return gimple_fold_builtin_strchr (gsi, true);
3451 case BUILT_IN_STRSTR:
3452 return gimple_fold_builtin_strstr (gsi);
3453 case BUILT_IN_STRCMP:
3454 case BUILT_IN_STRCASECMP:
3455 case BUILT_IN_STRNCMP:
3456 case BUILT_IN_STRNCASECMP:
3457 return gimple_fold_builtin_string_compare (gsi);
3458 case BUILT_IN_MEMCHR:
3459 return gimple_fold_builtin_memchr (gsi);
3460 case BUILT_IN_FPUTS:
3461 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3462 gimple_call_arg (stmt, 1), false);
3463 case BUILT_IN_FPUTS_UNLOCKED:
3464 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3465 gimple_call_arg (stmt, 1), true);
3466 case BUILT_IN_MEMCPY_CHK:
3467 case BUILT_IN_MEMPCPY_CHK:
3468 case BUILT_IN_MEMMOVE_CHK:
3469 case BUILT_IN_MEMSET_CHK:
3470 return gimple_fold_builtin_memory_chk (gsi,
3471 gimple_call_arg (stmt, 0),
3472 gimple_call_arg (stmt, 1),
3473 gimple_call_arg (stmt, 2),
3474 gimple_call_arg (stmt, 3),
3475 fcode);
3476 case BUILT_IN_STPCPY:
3477 return gimple_fold_builtin_stpcpy (gsi);
3478 case BUILT_IN_STRCPY_CHK:
3479 case BUILT_IN_STPCPY_CHK:
3480 return gimple_fold_builtin_stxcpy_chk (gsi,
3481 gimple_call_arg (stmt, 0),
3482 gimple_call_arg (stmt, 1),
3483 gimple_call_arg (stmt, 2),
3484 fcode);
3485 case BUILT_IN_STRNCPY_CHK:
3486 case BUILT_IN_STPNCPY_CHK:
3487 return gimple_fold_builtin_stxncpy_chk (gsi,
3488 gimple_call_arg (stmt, 0),
3489 gimple_call_arg (stmt, 1),
3490 gimple_call_arg (stmt, 2),
3491 gimple_call_arg (stmt, 3),
3492 fcode);
3493 case BUILT_IN_SNPRINTF_CHK:
3494 case BUILT_IN_VSNPRINTF_CHK:
3495 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3497 case BUILT_IN_FPRINTF:
3498 case BUILT_IN_FPRINTF_UNLOCKED:
3499 case BUILT_IN_VFPRINTF:
3500 if (n == 2 || n == 3)
3501 return gimple_fold_builtin_fprintf (gsi,
3502 gimple_call_arg (stmt, 0),
3503 gimple_call_arg (stmt, 1),
3504 n == 3
3505 ? gimple_call_arg (stmt, 2)
3506 : NULL_TREE,
3507 fcode);
3508 break;
3509 case BUILT_IN_FPRINTF_CHK:
3510 case BUILT_IN_VFPRINTF_CHK:
3511 if (n == 3 || n == 4)
3512 return gimple_fold_builtin_fprintf (gsi,
3513 gimple_call_arg (stmt, 0),
3514 gimple_call_arg (stmt, 2),
3515 n == 4
3516 ? gimple_call_arg (stmt, 3)
3517 : NULL_TREE,
3518 fcode);
3519 break;
3520 case BUILT_IN_PRINTF:
3521 case BUILT_IN_PRINTF_UNLOCKED:
3522 case BUILT_IN_VPRINTF:
3523 if (n == 1 || n == 2)
3524 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3525 n == 2
3526 ? gimple_call_arg (stmt, 1)
3527 : NULL_TREE, fcode);
3528 break;
3529 case BUILT_IN_PRINTF_CHK:
3530 case BUILT_IN_VPRINTF_CHK:
3531 if (n == 2 || n == 3)
3532 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3533 n == 3
3534 ? gimple_call_arg (stmt, 2)
3535 : NULL_TREE, fcode);
3536 break;
3537 case BUILT_IN_ACC_ON_DEVICE:
3538 return gimple_fold_builtin_acc_on_device (gsi,
3539 gimple_call_arg (stmt, 0));
3540 case BUILT_IN_REALLOC:
3541 return gimple_fold_builtin_realloc (gsi);
3543 default:;
3546 /* Try the generic builtin folder. */
3547 bool ignore = (gimple_call_lhs (stmt) == NULL);
3548 tree result = fold_call_stmt (stmt, ignore);
3549 if (result)
3551 if (ignore)
3552 STRIP_NOPS (result);
3553 else
3554 result = fold_convert (gimple_call_return_type (stmt), result);
3555 if (!update_call_from_tree (gsi, result))
3556 gimplify_and_update_call_from_tree (gsi, result);
3557 return true;
3560 return false;
3563 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3564 function calls to constants, where possible. */
3566 static tree
3567 fold_internal_goacc_dim (const gimple *call)
3569 int axis = oacc_get_ifn_dim_arg (call);
3570 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3571 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
3572 tree result = NULL_TREE;
3574 /* If the size is 1, or we only want the size and it is not dynamic,
3575 we know the answer. */
3576 if (size == 1 || (!is_pos && size))
3578 tree type = TREE_TYPE (gimple_call_lhs (call));
3579 result = build_int_cst (type, size - is_pos);
3582 return result;
3585 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3586 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3587 &var where var is only addressable because of such calls. */
3589 bool
3590 optimize_atomic_compare_exchange_p (gimple *stmt)
3592 if (gimple_call_num_args (stmt) != 6
3593 || !flag_inline_atomics
3594 || !optimize
3595 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3596 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3597 || !gimple_vdef (stmt)
3598 || !gimple_vuse (stmt))
3599 return false;
3601 tree fndecl = gimple_call_fndecl (stmt);
3602 switch (DECL_FUNCTION_CODE (fndecl))
3604 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3605 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3606 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3607 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3608 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3609 break;
3610 default:
3611 return false;
3614 tree expected = gimple_call_arg (stmt, 1);
3615 if (TREE_CODE (expected) != ADDR_EXPR
3616 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3617 return false;
3619 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3620 if (!is_gimple_reg_type (etype)
3621 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3622 || TREE_THIS_VOLATILE (etype)
3623 || VECTOR_TYPE_P (etype)
3624 || TREE_CODE (etype) == COMPLEX_TYPE
3625 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3626 might not preserve all the bits. See PR71716. */
3627 || SCALAR_FLOAT_TYPE_P (etype)
3628 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype)))
3629 return false;
3631 tree weak = gimple_call_arg (stmt, 3);
3632 if (!integer_zerop (weak) && !integer_onep (weak))
3633 return false;
3635 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3636 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3637 machine_mode mode = TYPE_MODE (itype);
3639 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3640 == CODE_FOR_nothing
3641 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3642 return false;
3644 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode))
3645 return false;
3647 return true;
3650 /* Fold
3651 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3652 into
3653 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3654 i = IMAGPART_EXPR <t>;
3655 r = (_Bool) i;
3656 e = REALPART_EXPR <t>; */
3658 void
3659 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3661 gimple *stmt = gsi_stmt (*gsi);
3662 tree fndecl = gimple_call_fndecl (stmt);
3663 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3664 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3665 tree ctype = build_complex_type (itype);
3666 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3667 bool throws = false;
3668 edge e = NULL;
3669 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3670 expected);
3671 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3672 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3673 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3675 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3676 build1 (VIEW_CONVERT_EXPR, itype,
3677 gimple_assign_lhs (g)));
3678 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3680 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3681 + int_size_in_bytes (itype);
3682 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3683 gimple_call_arg (stmt, 0),
3684 gimple_assign_lhs (g),
3685 gimple_call_arg (stmt, 2),
3686 build_int_cst (integer_type_node, flag),
3687 gimple_call_arg (stmt, 4),
3688 gimple_call_arg (stmt, 5));
3689 tree lhs = make_ssa_name (ctype);
3690 gimple_call_set_lhs (g, lhs);
3691 gimple_set_vdef (g, gimple_vdef (stmt));
3692 gimple_set_vuse (g, gimple_vuse (stmt));
3693 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3694 tree oldlhs = gimple_call_lhs (stmt);
3695 if (stmt_can_throw_internal (stmt))
3697 throws = true;
3698 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3700 gimple_call_set_nothrow (as_a <gcall *> (g),
3701 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3702 gimple_call_set_lhs (stmt, NULL_TREE);
3703 gsi_replace (gsi, g, true);
3704 if (oldlhs)
3706 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3707 build1 (IMAGPART_EXPR, itype, lhs));
3708 if (throws)
3710 gsi_insert_on_edge_immediate (e, g);
3711 *gsi = gsi_for_stmt (g);
3713 else
3714 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3715 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3716 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3718 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3719 build1 (REALPART_EXPR, itype, lhs));
3720 if (throws && oldlhs == NULL_TREE)
3722 gsi_insert_on_edge_immediate (e, g);
3723 *gsi = gsi_for_stmt (g);
3725 else
3726 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3727 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3729 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3730 VIEW_CONVERT_EXPR,
3731 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3732 gimple_assign_lhs (g)));
3733 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3735 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3736 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3737 *gsi = gsiret;
3740 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3741 doesn't fit into TYPE. The test for overflow should be regardless of
3742 -fwrapv, and even for unsigned types. */
3744 bool
3745 arith_overflowed_p (enum tree_code code, const_tree type,
3746 const_tree arg0, const_tree arg1)
3748 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3749 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3750 widest2_int_cst;
3751 widest2_int warg0 = widest2_int_cst (arg0);
3752 widest2_int warg1 = widest2_int_cst (arg1);
3753 widest2_int wres;
3754 switch (code)
3756 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3757 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3758 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3759 default: gcc_unreachable ();
3761 signop sign = TYPE_SIGN (type);
3762 if (sign == UNSIGNED && wi::neg_p (wres))
3763 return true;
3764 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3767 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3768 The statement may be replaced by another statement, e.g., if the call
3769 simplifies to a constant value. Return true if any changes were made.
3770 It is assumed that the operands have been previously folded. */
3772 static bool
3773 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3775 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3776 tree callee;
3777 bool changed = false;
3778 unsigned i;
3780 /* Fold *& in call arguments. */
3781 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3782 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3784 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3785 if (tmp)
3787 gimple_call_set_arg (stmt, i, tmp);
3788 changed = true;
3792 /* Check for virtual calls that became direct calls. */
3793 callee = gimple_call_fn (stmt);
3794 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3796 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3798 if (dump_file && virtual_method_call_p (callee)
3799 && !possible_polymorphic_call_target_p
3800 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3801 (OBJ_TYPE_REF_EXPR (callee)))))
3803 fprintf (dump_file,
3804 "Type inheritance inconsistent devirtualization of ");
3805 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3806 fprintf (dump_file, " to ");
3807 print_generic_expr (dump_file, callee, TDF_SLIM);
3808 fprintf (dump_file, "\n");
3811 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3812 changed = true;
3814 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3816 bool final;
3817 vec <cgraph_node *>targets
3818 = possible_polymorphic_call_targets (callee, stmt, &final);
3819 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3821 tree lhs = gimple_call_lhs (stmt);
3822 if (dump_enabled_p ())
3824 location_t loc = gimple_location_safe (stmt);
3825 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3826 "folding virtual function call to %s\n",
3827 targets.length () == 1
3828 ? targets[0]->name ()
3829 : "__builtin_unreachable");
3831 if (targets.length () == 1)
3833 tree fndecl = targets[0]->decl;
3834 gimple_call_set_fndecl (stmt, fndecl);
3835 changed = true;
3836 /* If changing the call to __cxa_pure_virtual
3837 or similar noreturn function, adjust gimple_call_fntype
3838 too. */
3839 if (gimple_call_noreturn_p (stmt)
3840 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3841 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3842 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3843 == void_type_node))
3844 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
3845 /* If the call becomes noreturn, remove the lhs. */
3846 if (lhs
3847 && gimple_call_noreturn_p (stmt)
3848 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
3849 || should_remove_lhs_p (lhs)))
3851 if (TREE_CODE (lhs) == SSA_NAME)
3853 tree var = create_tmp_var (TREE_TYPE (lhs));
3854 tree def = get_or_create_ssa_default_def (cfun, var);
3855 gimple *new_stmt = gimple_build_assign (lhs, def);
3856 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3858 gimple_call_set_lhs (stmt, NULL_TREE);
3860 maybe_remove_unused_call_args (cfun, stmt);
3862 else
3864 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3865 gimple *new_stmt = gimple_build_call (fndecl, 0);
3866 gimple_set_location (new_stmt, gimple_location (stmt));
3867 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3869 tree var = create_tmp_var (TREE_TYPE (lhs));
3870 tree def = get_or_create_ssa_default_def (cfun, var);
3872 /* To satisfy condition for
3873 cgraph_update_edges_for_call_stmt_node,
3874 we need to preserve GIMPLE_CALL statement
3875 at position of GSI iterator. */
3876 update_call_from_tree (gsi, def);
3877 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3879 else
3881 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3882 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3883 gsi_replace (gsi, new_stmt, false);
3885 return true;
3891 /* Check for indirect calls that became direct calls, and then
3892 no longer require a static chain. */
3893 if (gimple_call_chain (stmt))
3895 tree fn = gimple_call_fndecl (stmt);
3896 if (fn && !DECL_STATIC_CHAIN (fn))
3898 gimple_call_set_chain (stmt, NULL);
3899 changed = true;
3901 else
3903 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3904 if (tmp)
3906 gimple_call_set_chain (stmt, tmp);
3907 changed = true;
3912 if (inplace)
3913 return changed;
3915 /* Check for builtins that CCP can handle using information not
3916 available in the generic fold routines. */
3917 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3919 if (gimple_fold_builtin (gsi))
3920 changed = true;
3922 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3924 changed |= targetm.gimple_fold_builtin (gsi);
3926 else if (gimple_call_internal_p (stmt))
3928 enum tree_code subcode = ERROR_MARK;
3929 tree result = NULL_TREE;
3930 bool cplx_result = false;
3931 tree overflow = NULL_TREE;
3932 switch (gimple_call_internal_fn (stmt))
3934 case IFN_BUILTIN_EXPECT:
3935 result = fold_builtin_expect (gimple_location (stmt),
3936 gimple_call_arg (stmt, 0),
3937 gimple_call_arg (stmt, 1),
3938 gimple_call_arg (stmt, 2));
3939 break;
3940 case IFN_UBSAN_OBJECT_SIZE:
3941 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3942 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3943 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3944 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3945 gimple_call_arg (stmt, 2))))
3947 gsi_replace (gsi, gimple_build_nop (), false);
3948 unlink_stmt_vdef (stmt);
3949 release_defs (stmt);
3950 return true;
3952 break;
3953 case IFN_GOACC_DIM_SIZE:
3954 case IFN_GOACC_DIM_POS:
3955 result = fold_internal_goacc_dim (stmt);
3956 break;
3957 case IFN_UBSAN_CHECK_ADD:
3958 subcode = PLUS_EXPR;
3959 break;
3960 case IFN_UBSAN_CHECK_SUB:
3961 subcode = MINUS_EXPR;
3962 break;
3963 case IFN_UBSAN_CHECK_MUL:
3964 subcode = MULT_EXPR;
3965 break;
3966 case IFN_ADD_OVERFLOW:
3967 subcode = PLUS_EXPR;
3968 cplx_result = true;
3969 break;
3970 case IFN_SUB_OVERFLOW:
3971 subcode = MINUS_EXPR;
3972 cplx_result = true;
3973 break;
3974 case IFN_MUL_OVERFLOW:
3975 subcode = MULT_EXPR;
3976 cplx_result = true;
3977 break;
3978 default:
3979 break;
3981 if (subcode != ERROR_MARK)
3983 tree arg0 = gimple_call_arg (stmt, 0);
3984 tree arg1 = gimple_call_arg (stmt, 1);
3985 tree type = TREE_TYPE (arg0);
3986 if (cplx_result)
3988 tree lhs = gimple_call_lhs (stmt);
3989 if (lhs == NULL_TREE)
3990 type = NULL_TREE;
3991 else
3992 type = TREE_TYPE (TREE_TYPE (lhs));
3994 if (type == NULL_TREE)
3996 /* x = y + 0; x = y - 0; x = y * 0; */
3997 else if (integer_zerop (arg1))
3998 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3999 /* x = 0 + y; x = 0 * y; */
4000 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4001 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4002 /* x = y - y; */
4003 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4004 result = integer_zero_node;
4005 /* x = y * 1; x = 1 * y; */
4006 else if (subcode == MULT_EXPR && integer_onep (arg1))
4007 result = arg0;
4008 else if (subcode == MULT_EXPR && integer_onep (arg0))
4009 result = arg1;
4010 else if (TREE_CODE (arg0) == INTEGER_CST
4011 && TREE_CODE (arg1) == INTEGER_CST)
4013 if (cplx_result)
4014 result = int_const_binop (subcode, fold_convert (type, arg0),
4015 fold_convert (type, arg1));
4016 else
4017 result = int_const_binop (subcode, arg0, arg1);
4018 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4020 if (cplx_result)
4021 overflow = build_one_cst (type);
4022 else
4023 result = NULL_TREE;
4026 if (result)
4028 if (result == integer_zero_node)
4029 result = build_zero_cst (type);
4030 else if (cplx_result && TREE_TYPE (result) != type)
4032 if (TREE_CODE (result) == INTEGER_CST)
4034 if (arith_overflowed_p (PLUS_EXPR, type, result,
4035 integer_zero_node))
4036 overflow = build_one_cst (type);
4038 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4039 && TYPE_UNSIGNED (type))
4040 || (TYPE_PRECISION (type)
4041 < (TYPE_PRECISION (TREE_TYPE (result))
4042 + (TYPE_UNSIGNED (TREE_TYPE (result))
4043 && !TYPE_UNSIGNED (type)))))
4044 result = NULL_TREE;
4045 if (result)
4046 result = fold_convert (type, result);
4051 if (result)
4053 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4054 result = drop_tree_overflow (result);
4055 if (cplx_result)
4057 if (overflow == NULL_TREE)
4058 overflow = build_zero_cst (TREE_TYPE (result));
4059 tree ctype = build_complex_type (TREE_TYPE (result));
4060 if (TREE_CODE (result) == INTEGER_CST
4061 && TREE_CODE (overflow) == INTEGER_CST)
4062 result = build_complex (ctype, result, overflow);
4063 else
4064 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4065 ctype, result, overflow);
4067 if (!update_call_from_tree (gsi, result))
4068 gimplify_and_update_call_from_tree (gsi, result);
4069 changed = true;
4073 return changed;
4077 /* Return true whether NAME has a use on STMT. */
4079 static bool
4080 has_use_on_stmt (tree name, gimple *stmt)
4082 imm_use_iterator iter;
4083 use_operand_p use_p;
4084 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4085 if (USE_STMT (use_p) == stmt)
4086 return true;
4087 return false;
4090 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4091 gimple_simplify.
4093 Replaces *GSI with the simplification result in RCODE and OPS
4094 and the associated statements in *SEQ. Does the replacement
4095 according to INPLACE and returns true if the operation succeeded. */
4097 static bool
4098 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4099 code_helper rcode, tree *ops,
4100 gimple_seq *seq, bool inplace)
4102 gimple *stmt = gsi_stmt (*gsi);
4104 /* Play safe and do not allow abnormals to be mentioned in
4105 newly created statements. See also maybe_push_res_to_seq.
4106 As an exception allow such uses if there was a use of the
4107 same SSA name on the old stmt. */
4108 if ((TREE_CODE (ops[0]) == SSA_NAME
4109 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
4110 && !has_use_on_stmt (ops[0], stmt))
4111 || (ops[1]
4112 && TREE_CODE (ops[1]) == SSA_NAME
4113 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
4114 && !has_use_on_stmt (ops[1], stmt))
4115 || (ops[2]
4116 && TREE_CODE (ops[2]) == SSA_NAME
4117 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
4118 && !has_use_on_stmt (ops[2], stmt))
4119 || (COMPARISON_CLASS_P (ops[0])
4120 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
4121 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
4122 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
4123 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
4124 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
4125 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
4126 return false;
4128 /* Don't insert new statements when INPLACE is true, even if we could
4129 reuse STMT for the final statement. */
4130 if (inplace && !gimple_seq_empty_p (*seq))
4131 return false;
4133 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4135 gcc_assert (rcode.is_tree_code ());
4136 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
4137 /* GIMPLE_CONDs condition may not throw. */
4138 && (!flag_exceptions
4139 || !cfun->can_throw_non_call_exceptions
4140 || !operation_could_trap_p (rcode,
4141 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4142 false, NULL_TREE)))
4143 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
4144 else if (rcode == SSA_NAME)
4145 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4146 build_zero_cst (TREE_TYPE (ops[0])));
4147 else if (rcode == INTEGER_CST)
4149 if (integer_zerop (ops[0]))
4150 gimple_cond_make_false (cond_stmt);
4151 else
4152 gimple_cond_make_true (cond_stmt);
4154 else if (!inplace)
4156 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
4157 ops, seq);
4158 if (!res)
4159 return false;
4160 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4161 build_zero_cst (TREE_TYPE (res)));
4163 else
4164 return false;
4165 if (dump_file && (dump_flags & TDF_DETAILS))
4167 fprintf (dump_file, "gimple_simplified to ");
4168 if (!gimple_seq_empty_p (*seq))
4169 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4170 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4171 0, TDF_SLIM);
4173 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4174 return true;
4176 else if (is_gimple_assign (stmt)
4177 && rcode.is_tree_code ())
4179 if (!inplace
4180 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
4182 maybe_build_generic_op (rcode,
4183 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
4184 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
4185 if (dump_file && (dump_flags & TDF_DETAILS))
4187 fprintf (dump_file, "gimple_simplified to ");
4188 if (!gimple_seq_empty_p (*seq))
4189 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4190 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4191 0, TDF_SLIM);
4193 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4194 return true;
4197 else if (rcode.is_fn_code ()
4198 && gimple_call_combined_fn (stmt) == rcode)
4200 unsigned i;
4201 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4203 gcc_assert (ops[i] != NULL_TREE);
4204 gimple_call_set_arg (stmt, i, ops[i]);
4206 if (i < 3)
4207 gcc_assert (ops[i] == NULL_TREE);
4208 if (dump_file && (dump_flags & TDF_DETAILS))
4210 fprintf (dump_file, "gimple_simplified to ");
4211 if (!gimple_seq_empty_p (*seq))
4212 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4213 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4215 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4216 return true;
4218 else if (!inplace)
4220 if (gimple_has_lhs (stmt))
4222 tree lhs = gimple_get_lhs (stmt);
4223 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
4224 ops, seq, lhs))
4225 return false;
4226 if (dump_file && (dump_flags & TDF_DETAILS))
4228 fprintf (dump_file, "gimple_simplified to ");
4229 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4231 gsi_replace_with_seq_vops (gsi, *seq);
4232 return true;
4234 else
4235 gcc_unreachable ();
4238 return false;
4241 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4243 static bool
4244 maybe_canonicalize_mem_ref_addr (tree *t)
4246 bool res = false;
4248 if (TREE_CODE (*t) == ADDR_EXPR)
4249 t = &TREE_OPERAND (*t, 0);
4251 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4252 generic vector extension. The actual vector referenced is
4253 view-converted to an array type for this purpose. If the index
4254 is constant the canonical representation in the middle-end is a
4255 BIT_FIELD_REF so re-write the former to the latter here. */
4256 if (TREE_CODE (*t) == ARRAY_REF
4257 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4258 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4259 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4261 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4262 if (VECTOR_TYPE_P (vtype))
4264 tree low = array_ref_low_bound (*t);
4265 if (TREE_CODE (low) == INTEGER_CST)
4267 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4269 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4270 wi::to_widest (low));
4271 idx = wi::mul (idx, wi::to_widest
4272 (TYPE_SIZE (TREE_TYPE (*t))));
4273 widest_int ext
4274 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4275 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4277 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4278 TREE_TYPE (*t),
4279 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4280 TYPE_SIZE (TREE_TYPE (*t)),
4281 wide_int_to_tree (bitsizetype, idx));
4282 res = true;
4289 while (handled_component_p (*t))
4290 t = &TREE_OPERAND (*t, 0);
4292 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4293 of invariant addresses into a SSA name MEM_REF address. */
4294 if (TREE_CODE (*t) == MEM_REF
4295 || TREE_CODE (*t) == TARGET_MEM_REF)
4297 tree addr = TREE_OPERAND (*t, 0);
4298 if (TREE_CODE (addr) == ADDR_EXPR
4299 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4300 || handled_component_p (TREE_OPERAND (addr, 0))))
4302 tree base;
4303 HOST_WIDE_INT coffset;
4304 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4305 &coffset);
4306 if (!base)
4307 gcc_unreachable ();
4309 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4310 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4311 TREE_OPERAND (*t, 1),
4312 size_int (coffset));
4313 res = true;
4315 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4316 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4319 /* Canonicalize back MEM_REFs to plain reference trees if the object
4320 accessed is a decl that has the same access semantics as the MEM_REF. */
4321 if (TREE_CODE (*t) == MEM_REF
4322 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4323 && integer_zerop (TREE_OPERAND (*t, 1))
4324 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4326 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4327 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4328 if (/* Same volatile qualification. */
4329 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4330 /* Same TBAA behavior with -fstrict-aliasing. */
4331 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4332 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4333 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4334 /* Same alignment. */
4335 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4336 /* We have to look out here to not drop a required conversion
4337 from the rhs to the lhs if *t appears on the lhs or vice-versa
4338 if it appears on the rhs. Thus require strict type
4339 compatibility. */
4340 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4342 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4343 res = true;
4347 /* Canonicalize TARGET_MEM_REF in particular with respect to
4348 the indexes becoming constant. */
4349 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4351 tree tem = maybe_fold_tmr (*t);
4352 if (tem)
4354 *t = tem;
4355 res = true;
4359 return res;
4362 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4363 distinguishes both cases. */
4365 static bool
4366 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4368 bool changed = false;
4369 gimple *stmt = gsi_stmt (*gsi);
4370 bool nowarning = gimple_no_warning_p (stmt);
4371 unsigned i;
4372 fold_defer_overflow_warnings ();
4374 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4375 after propagation.
4376 ??? This shouldn't be done in generic folding but in the
4377 propagation helpers which also know whether an address was
4378 propagated.
4379 Also canonicalize operand order. */
4380 switch (gimple_code (stmt))
4382 case GIMPLE_ASSIGN:
4383 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4385 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4386 if ((REFERENCE_CLASS_P (*rhs)
4387 || TREE_CODE (*rhs) == ADDR_EXPR)
4388 && maybe_canonicalize_mem_ref_addr (rhs))
4389 changed = true;
4390 tree *lhs = gimple_assign_lhs_ptr (stmt);
4391 if (REFERENCE_CLASS_P (*lhs)
4392 && maybe_canonicalize_mem_ref_addr (lhs))
4393 changed = true;
4395 else
4397 /* Canonicalize operand order. */
4398 enum tree_code code = gimple_assign_rhs_code (stmt);
4399 if (TREE_CODE_CLASS (code) == tcc_comparison
4400 || commutative_tree_code (code)
4401 || commutative_ternary_tree_code (code))
4403 tree rhs1 = gimple_assign_rhs1 (stmt);
4404 tree rhs2 = gimple_assign_rhs2 (stmt);
4405 if (tree_swap_operands_p (rhs1, rhs2))
4407 gimple_assign_set_rhs1 (stmt, rhs2);
4408 gimple_assign_set_rhs2 (stmt, rhs1);
4409 if (TREE_CODE_CLASS (code) == tcc_comparison)
4410 gimple_assign_set_rhs_code (stmt,
4411 swap_tree_comparison (code));
4412 changed = true;
4416 break;
4417 case GIMPLE_CALL:
4419 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4421 tree *arg = gimple_call_arg_ptr (stmt, i);
4422 if (REFERENCE_CLASS_P (*arg)
4423 && maybe_canonicalize_mem_ref_addr (arg))
4424 changed = true;
4426 tree *lhs = gimple_call_lhs_ptr (stmt);
4427 if (*lhs
4428 && REFERENCE_CLASS_P (*lhs)
4429 && maybe_canonicalize_mem_ref_addr (lhs))
4430 changed = true;
4431 break;
4433 case GIMPLE_ASM:
4435 gasm *asm_stmt = as_a <gasm *> (stmt);
4436 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4438 tree link = gimple_asm_output_op (asm_stmt, i);
4439 tree op = TREE_VALUE (link);
4440 if (REFERENCE_CLASS_P (op)
4441 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4442 changed = true;
4444 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4446 tree link = gimple_asm_input_op (asm_stmt, i);
4447 tree op = TREE_VALUE (link);
4448 if ((REFERENCE_CLASS_P (op)
4449 || TREE_CODE (op) == ADDR_EXPR)
4450 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4451 changed = true;
4454 break;
4455 case GIMPLE_DEBUG:
4456 if (gimple_debug_bind_p (stmt))
4458 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4459 if (*val
4460 && (REFERENCE_CLASS_P (*val)
4461 || TREE_CODE (*val) == ADDR_EXPR)
4462 && maybe_canonicalize_mem_ref_addr (val))
4463 changed = true;
4465 break;
4466 case GIMPLE_COND:
4468 /* Canonicalize operand order. */
4469 tree lhs = gimple_cond_lhs (stmt);
4470 tree rhs = gimple_cond_rhs (stmt);
4471 if (tree_swap_operands_p (lhs, rhs))
4473 gcond *gc = as_a <gcond *> (stmt);
4474 gimple_cond_set_lhs (gc, rhs);
4475 gimple_cond_set_rhs (gc, lhs);
4476 gimple_cond_set_code (gc,
4477 swap_tree_comparison (gimple_cond_code (gc)));
4478 changed = true;
4481 default:;
4484 /* Dispatch to pattern-based folding. */
4485 if (!inplace
4486 || is_gimple_assign (stmt)
4487 || gimple_code (stmt) == GIMPLE_COND)
4489 gimple_seq seq = NULL;
4490 code_helper rcode;
4491 tree ops[3] = {};
4492 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4493 valueize, valueize))
4495 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4496 changed = true;
4497 else
4498 gimple_seq_discard (seq);
4502 stmt = gsi_stmt (*gsi);
4504 /* Fold the main computation performed by the statement. */
4505 switch (gimple_code (stmt))
4507 case GIMPLE_ASSIGN:
4509 /* Try to canonicalize for boolean-typed X the comparisons
4510 X == 0, X == 1, X != 0, and X != 1. */
4511 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4512 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4514 tree lhs = gimple_assign_lhs (stmt);
4515 tree op1 = gimple_assign_rhs1 (stmt);
4516 tree op2 = gimple_assign_rhs2 (stmt);
4517 tree type = TREE_TYPE (op1);
4519 /* Check whether the comparison operands are of the same boolean
4520 type as the result type is.
4521 Check that second operand is an integer-constant with value
4522 one or zero. */
4523 if (TREE_CODE (op2) == INTEGER_CST
4524 && (integer_zerop (op2) || integer_onep (op2))
4525 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4527 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4528 bool is_logical_not = false;
4530 /* X == 0 and X != 1 is a logical-not.of X
4531 X == 1 and X != 0 is X */
4532 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4533 || (cmp_code == NE_EXPR && integer_onep (op2)))
4534 is_logical_not = true;
4536 if (is_logical_not == false)
4537 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4538 /* Only for one-bit precision typed X the transformation
4539 !X -> ~X is valied. */
4540 else if (TYPE_PRECISION (type) == 1)
4541 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4542 /* Otherwise we use !X -> X ^ 1. */
4543 else
4544 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4545 build_int_cst (type, 1));
4546 changed = true;
4547 break;
4551 unsigned old_num_ops = gimple_num_ops (stmt);
4552 tree lhs = gimple_assign_lhs (stmt);
4553 tree new_rhs = fold_gimple_assign (gsi);
4554 if (new_rhs
4555 && !useless_type_conversion_p (TREE_TYPE (lhs),
4556 TREE_TYPE (new_rhs)))
4557 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4558 if (new_rhs
4559 && (!inplace
4560 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4562 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4563 changed = true;
4565 break;
4568 case GIMPLE_CALL:
4569 changed |= gimple_fold_call (gsi, inplace);
4570 break;
4572 case GIMPLE_ASM:
4573 /* Fold *& in asm operands. */
4575 gasm *asm_stmt = as_a <gasm *> (stmt);
4576 size_t noutputs;
4577 const char **oconstraints;
4578 const char *constraint;
4579 bool allows_mem, allows_reg;
4581 noutputs = gimple_asm_noutputs (asm_stmt);
4582 oconstraints = XALLOCAVEC (const char *, noutputs);
4584 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4586 tree link = gimple_asm_output_op (asm_stmt, i);
4587 tree op = TREE_VALUE (link);
4588 oconstraints[i]
4589 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4590 if (REFERENCE_CLASS_P (op)
4591 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4593 TREE_VALUE (link) = op;
4594 changed = true;
4597 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4599 tree link = gimple_asm_input_op (asm_stmt, i);
4600 tree op = TREE_VALUE (link);
4601 constraint
4602 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4603 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4604 oconstraints, &allows_mem, &allows_reg);
4605 if (REFERENCE_CLASS_P (op)
4606 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4607 != NULL_TREE)
4609 TREE_VALUE (link) = op;
4610 changed = true;
4614 break;
4616 case GIMPLE_DEBUG:
4617 if (gimple_debug_bind_p (stmt))
4619 tree val = gimple_debug_bind_get_value (stmt);
4620 if (val
4621 && REFERENCE_CLASS_P (val))
4623 tree tem = maybe_fold_reference (val, false);
4624 if (tem)
4626 gimple_debug_bind_set_value (stmt, tem);
4627 changed = true;
4630 else if (val
4631 && TREE_CODE (val) == ADDR_EXPR)
4633 tree ref = TREE_OPERAND (val, 0);
4634 tree tem = maybe_fold_reference (ref, false);
4635 if (tem)
4637 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4638 gimple_debug_bind_set_value (stmt, tem);
4639 changed = true;
4643 break;
4645 case GIMPLE_RETURN:
4647 greturn *ret_stmt = as_a<greturn *> (stmt);
4648 tree ret = gimple_return_retval(ret_stmt);
4650 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4652 tree val = valueize (ret);
4653 if (val && val != ret
4654 && may_propagate_copy (ret, val))
4656 gimple_return_set_retval (ret_stmt, val);
4657 changed = true;
4661 break;
4663 default:;
4666 stmt = gsi_stmt (*gsi);
4668 /* Fold *& on the lhs. */
4669 if (gimple_has_lhs (stmt))
4671 tree lhs = gimple_get_lhs (stmt);
4672 if (lhs && REFERENCE_CLASS_P (lhs))
4674 tree new_lhs = maybe_fold_reference (lhs, true);
4675 if (new_lhs)
4677 gimple_set_lhs (stmt, new_lhs);
4678 changed = true;
4683 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4684 return changed;
4687 /* Valueziation callback that ends up not following SSA edges. */
4689 tree
4690 no_follow_ssa_edges (tree)
4692 return NULL_TREE;
4695 /* Valueization callback that ends up following single-use SSA edges only. */
4697 tree
4698 follow_single_use_edges (tree val)
4700 if (TREE_CODE (val) == SSA_NAME
4701 && !has_single_use (val))
4702 return NULL_TREE;
4703 return val;
4706 /* Fold the statement pointed to by GSI. In some cases, this function may
4707 replace the whole statement with a new one. Returns true iff folding
4708 makes any changes.
4709 The statement pointed to by GSI should be in valid gimple form but may
4710 be in unfolded state as resulting from for example constant propagation
4711 which can produce *&x = 0. */
4713 bool
4714 fold_stmt (gimple_stmt_iterator *gsi)
4716 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4719 bool
4720 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4722 return fold_stmt_1 (gsi, false, valueize);
4725 /* Perform the minimal folding on statement *GSI. Only operations like
4726 *&x created by constant propagation are handled. The statement cannot
4727 be replaced with a new one. Return true if the statement was
4728 changed, false otherwise.
4729 The statement *GSI should be in valid gimple form but may
4730 be in unfolded state as resulting from for example constant propagation
4731 which can produce *&x = 0. */
4733 bool
4734 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4736 gimple *stmt = gsi_stmt (*gsi);
4737 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4738 gcc_assert (gsi_stmt (*gsi) == stmt);
4739 return changed;
4742 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4743 if EXPR is null or we don't know how.
4744 If non-null, the result always has boolean type. */
4746 static tree
4747 canonicalize_bool (tree expr, bool invert)
4749 if (!expr)
4750 return NULL_TREE;
4751 else if (invert)
4753 if (integer_nonzerop (expr))
4754 return boolean_false_node;
4755 else if (integer_zerop (expr))
4756 return boolean_true_node;
4757 else if (TREE_CODE (expr) == SSA_NAME)
4758 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4759 build_int_cst (TREE_TYPE (expr), 0));
4760 else if (COMPARISON_CLASS_P (expr))
4761 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4762 boolean_type_node,
4763 TREE_OPERAND (expr, 0),
4764 TREE_OPERAND (expr, 1));
4765 else
4766 return NULL_TREE;
4768 else
4770 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4771 return expr;
4772 if (integer_nonzerop (expr))
4773 return boolean_true_node;
4774 else if (integer_zerop (expr))
4775 return boolean_false_node;
4776 else if (TREE_CODE (expr) == SSA_NAME)
4777 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4778 build_int_cst (TREE_TYPE (expr), 0));
4779 else if (COMPARISON_CLASS_P (expr))
4780 return fold_build2 (TREE_CODE (expr),
4781 boolean_type_node,
4782 TREE_OPERAND (expr, 0),
4783 TREE_OPERAND (expr, 1));
4784 else
4785 return NULL_TREE;
4789 /* Check to see if a boolean expression EXPR is logically equivalent to the
4790 comparison (OP1 CODE OP2). Check for various identities involving
4791 SSA_NAMEs. */
4793 static bool
4794 same_bool_comparison_p (const_tree expr, enum tree_code code,
4795 const_tree op1, const_tree op2)
4797 gimple *s;
4799 /* The obvious case. */
4800 if (TREE_CODE (expr) == code
4801 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4802 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4803 return true;
4805 /* Check for comparing (name, name != 0) and the case where expr
4806 is an SSA_NAME with a definition matching the comparison. */
4807 if (TREE_CODE (expr) == SSA_NAME
4808 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4810 if (operand_equal_p (expr, op1, 0))
4811 return ((code == NE_EXPR && integer_zerop (op2))
4812 || (code == EQ_EXPR && integer_nonzerop (op2)));
4813 s = SSA_NAME_DEF_STMT (expr);
4814 if (is_gimple_assign (s)
4815 && gimple_assign_rhs_code (s) == code
4816 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4817 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4818 return true;
4821 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4822 of name is a comparison, recurse. */
4823 if (TREE_CODE (op1) == SSA_NAME
4824 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4826 s = SSA_NAME_DEF_STMT (op1);
4827 if (is_gimple_assign (s)
4828 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
4830 enum tree_code c = gimple_assign_rhs_code (s);
4831 if ((c == NE_EXPR && integer_zerop (op2))
4832 || (c == EQ_EXPR && integer_nonzerop (op2)))
4833 return same_bool_comparison_p (expr, c,
4834 gimple_assign_rhs1 (s),
4835 gimple_assign_rhs2 (s));
4836 if ((c == EQ_EXPR && integer_zerop (op2))
4837 || (c == NE_EXPR && integer_nonzerop (op2)))
4838 return same_bool_comparison_p (expr,
4839 invert_tree_comparison (c, false),
4840 gimple_assign_rhs1 (s),
4841 gimple_assign_rhs2 (s));
4844 return false;
4847 /* Check to see if two boolean expressions OP1 and OP2 are logically
4848 equivalent. */
4850 static bool
4851 same_bool_result_p (const_tree op1, const_tree op2)
4853 /* Simple cases first. */
4854 if (operand_equal_p (op1, op2, 0))
4855 return true;
4857 /* Check the cases where at least one of the operands is a comparison.
4858 These are a bit smarter than operand_equal_p in that they apply some
4859 identifies on SSA_NAMEs. */
4860 if (COMPARISON_CLASS_P (op2)
4861 && same_bool_comparison_p (op1, TREE_CODE (op2),
4862 TREE_OPERAND (op2, 0),
4863 TREE_OPERAND (op2, 1)))
4864 return true;
4865 if (COMPARISON_CLASS_P (op1)
4866 && same_bool_comparison_p (op2, TREE_CODE (op1),
4867 TREE_OPERAND (op1, 0),
4868 TREE_OPERAND (op1, 1)))
4869 return true;
4871 /* Default case. */
4872 return false;
4875 /* Forward declarations for some mutually recursive functions. */
4877 static tree
4878 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4879 enum tree_code code2, tree op2a, tree op2b);
4880 static tree
4881 and_var_with_comparison (tree var, bool invert,
4882 enum tree_code code2, tree op2a, tree op2b);
4883 static tree
4884 and_var_with_comparison_1 (gimple *stmt,
4885 enum tree_code code2, tree op2a, tree op2b);
4886 static tree
4887 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4888 enum tree_code code2, tree op2a, tree op2b);
4889 static tree
4890 or_var_with_comparison (tree var, bool invert,
4891 enum tree_code code2, tree op2a, tree op2b);
4892 static tree
4893 or_var_with_comparison_1 (gimple *stmt,
4894 enum tree_code code2, tree op2a, tree op2b);
4896 /* Helper function for and_comparisons_1: try to simplify the AND of the
4897 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4898 If INVERT is true, invert the value of the VAR before doing the AND.
4899 Return NULL_EXPR if we can't simplify this to a single expression. */
4901 static tree
4902 and_var_with_comparison (tree var, bool invert,
4903 enum tree_code code2, tree op2a, tree op2b)
4905 tree t;
4906 gimple *stmt = SSA_NAME_DEF_STMT (var);
4908 /* We can only deal with variables whose definitions are assignments. */
4909 if (!is_gimple_assign (stmt))
4910 return NULL_TREE;
4912 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4913 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4914 Then we only have to consider the simpler non-inverted cases. */
4915 if (invert)
4916 t = or_var_with_comparison_1 (stmt,
4917 invert_tree_comparison (code2, false),
4918 op2a, op2b);
4919 else
4920 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4921 return canonicalize_bool (t, invert);
4924 /* Try to simplify the AND of the ssa variable defined by the assignment
4925 STMT with the comparison specified by (OP2A CODE2 OP2B).
4926 Return NULL_EXPR if we can't simplify this to a single expression. */
4928 static tree
4929 and_var_with_comparison_1 (gimple *stmt,
4930 enum tree_code code2, tree op2a, tree op2b)
4932 tree var = gimple_assign_lhs (stmt);
4933 tree true_test_var = NULL_TREE;
4934 tree false_test_var = NULL_TREE;
4935 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4937 /* Check for identities like (var AND (var == 0)) => false. */
4938 if (TREE_CODE (op2a) == SSA_NAME
4939 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4941 if ((code2 == NE_EXPR && integer_zerop (op2b))
4942 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4944 true_test_var = op2a;
4945 if (var == true_test_var)
4946 return var;
4948 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4949 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4951 false_test_var = op2a;
4952 if (var == false_test_var)
4953 return boolean_false_node;
4957 /* If the definition is a comparison, recurse on it. */
4958 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4960 tree t = and_comparisons_1 (innercode,
4961 gimple_assign_rhs1 (stmt),
4962 gimple_assign_rhs2 (stmt),
4963 code2,
4964 op2a,
4965 op2b);
4966 if (t)
4967 return t;
4970 /* If the definition is an AND or OR expression, we may be able to
4971 simplify by reassociating. */
4972 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4973 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4975 tree inner1 = gimple_assign_rhs1 (stmt);
4976 tree inner2 = gimple_assign_rhs2 (stmt);
4977 gimple *s;
4978 tree t;
4979 tree partial = NULL_TREE;
4980 bool is_and = (innercode == BIT_AND_EXPR);
4982 /* Check for boolean identities that don't require recursive examination
4983 of inner1/inner2:
4984 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4985 inner1 AND (inner1 OR inner2) => inner1
4986 !inner1 AND (inner1 AND inner2) => false
4987 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4988 Likewise for similar cases involving inner2. */
4989 if (inner1 == true_test_var)
4990 return (is_and ? var : inner1);
4991 else if (inner2 == true_test_var)
4992 return (is_and ? var : inner2);
4993 else if (inner1 == false_test_var)
4994 return (is_and
4995 ? boolean_false_node
4996 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4997 else if (inner2 == false_test_var)
4998 return (is_and
4999 ? boolean_false_node
5000 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5002 /* Next, redistribute/reassociate the AND across the inner tests.
5003 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5004 if (TREE_CODE (inner1) == SSA_NAME
5005 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5006 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5007 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5008 gimple_assign_rhs1 (s),
5009 gimple_assign_rhs2 (s),
5010 code2, op2a, op2b)))
5012 /* Handle the AND case, where we are reassociating:
5013 (inner1 AND inner2) AND (op2a code2 op2b)
5014 => (t AND inner2)
5015 If the partial result t is a constant, we win. Otherwise
5016 continue on to try reassociating with the other inner test. */
5017 if (is_and)
5019 if (integer_onep (t))
5020 return inner2;
5021 else if (integer_zerop (t))
5022 return boolean_false_node;
5025 /* Handle the OR case, where we are redistributing:
5026 (inner1 OR inner2) AND (op2a code2 op2b)
5027 => (t OR (inner2 AND (op2a code2 op2b))) */
5028 else if (integer_onep (t))
5029 return boolean_true_node;
5031 /* Save partial result for later. */
5032 partial = t;
5035 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5036 if (TREE_CODE (inner2) == SSA_NAME
5037 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5038 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5039 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5040 gimple_assign_rhs1 (s),
5041 gimple_assign_rhs2 (s),
5042 code2, op2a, op2b)))
5044 /* Handle the AND case, where we are reassociating:
5045 (inner1 AND inner2) AND (op2a code2 op2b)
5046 => (inner1 AND t) */
5047 if (is_and)
5049 if (integer_onep (t))
5050 return inner1;
5051 else if (integer_zerop (t))
5052 return boolean_false_node;
5053 /* If both are the same, we can apply the identity
5054 (x AND x) == x. */
5055 else if (partial && same_bool_result_p (t, partial))
5056 return t;
5059 /* Handle the OR case. where we are redistributing:
5060 (inner1 OR inner2) AND (op2a code2 op2b)
5061 => (t OR (inner1 AND (op2a code2 op2b)))
5062 => (t OR partial) */
5063 else
5065 if (integer_onep (t))
5066 return boolean_true_node;
5067 else if (partial)
5069 /* We already got a simplification for the other
5070 operand to the redistributed OR expression. The
5071 interesting case is when at least one is false.
5072 Or, if both are the same, we can apply the identity
5073 (x OR x) == x. */
5074 if (integer_zerop (partial))
5075 return t;
5076 else if (integer_zerop (t))
5077 return partial;
5078 else if (same_bool_result_p (t, partial))
5079 return t;
5084 return NULL_TREE;
5087 /* Try to simplify the AND of two comparisons defined by
5088 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5089 If this can be done without constructing an intermediate value,
5090 return the resulting tree; otherwise NULL_TREE is returned.
5091 This function is deliberately asymmetric as it recurses on SSA_DEFs
5092 in the first comparison but not the second. */
5094 static tree
5095 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5096 enum tree_code code2, tree op2a, tree op2b)
5098 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5100 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5101 if (operand_equal_p (op1a, op2a, 0)
5102 && operand_equal_p (op1b, op2b, 0))
5104 /* Result will be either NULL_TREE, or a combined comparison. */
5105 tree t = combine_comparisons (UNKNOWN_LOCATION,
5106 TRUTH_ANDIF_EXPR, code1, code2,
5107 truth_type, op1a, op1b);
5108 if (t)
5109 return t;
5112 /* Likewise the swapped case of the above. */
5113 if (operand_equal_p (op1a, op2b, 0)
5114 && operand_equal_p (op1b, op2a, 0))
5116 /* Result will be either NULL_TREE, or a combined comparison. */
5117 tree t = combine_comparisons (UNKNOWN_LOCATION,
5118 TRUTH_ANDIF_EXPR, code1,
5119 swap_tree_comparison (code2),
5120 truth_type, op1a, op1b);
5121 if (t)
5122 return t;
5125 /* If both comparisons are of the same value against constants, we might
5126 be able to merge them. */
5127 if (operand_equal_p (op1a, op2a, 0)
5128 && TREE_CODE (op1b) == INTEGER_CST
5129 && TREE_CODE (op2b) == INTEGER_CST)
5131 int cmp = tree_int_cst_compare (op1b, op2b);
5133 /* If we have (op1a == op1b), we should either be able to
5134 return that or FALSE, depending on whether the constant op1b
5135 also satisfies the other comparison against op2b. */
5136 if (code1 == EQ_EXPR)
5138 bool done = true;
5139 bool val;
5140 switch (code2)
5142 case EQ_EXPR: val = (cmp == 0); break;
5143 case NE_EXPR: val = (cmp != 0); break;
5144 case LT_EXPR: val = (cmp < 0); break;
5145 case GT_EXPR: val = (cmp > 0); break;
5146 case LE_EXPR: val = (cmp <= 0); break;
5147 case GE_EXPR: val = (cmp >= 0); break;
5148 default: done = false;
5150 if (done)
5152 if (val)
5153 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5154 else
5155 return boolean_false_node;
5158 /* Likewise if the second comparison is an == comparison. */
5159 else if (code2 == EQ_EXPR)
5161 bool done = true;
5162 bool val;
5163 switch (code1)
5165 case EQ_EXPR: val = (cmp == 0); break;
5166 case NE_EXPR: val = (cmp != 0); break;
5167 case LT_EXPR: val = (cmp > 0); break;
5168 case GT_EXPR: val = (cmp < 0); break;
5169 case LE_EXPR: val = (cmp >= 0); break;
5170 case GE_EXPR: val = (cmp <= 0); break;
5171 default: done = false;
5173 if (done)
5175 if (val)
5176 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5177 else
5178 return boolean_false_node;
5182 /* Same business with inequality tests. */
5183 else if (code1 == NE_EXPR)
5185 bool val;
5186 switch (code2)
5188 case EQ_EXPR: val = (cmp != 0); break;
5189 case NE_EXPR: val = (cmp == 0); break;
5190 case LT_EXPR: val = (cmp >= 0); break;
5191 case GT_EXPR: val = (cmp <= 0); break;
5192 case LE_EXPR: val = (cmp > 0); break;
5193 case GE_EXPR: val = (cmp < 0); break;
5194 default:
5195 val = false;
5197 if (val)
5198 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5200 else if (code2 == NE_EXPR)
5202 bool val;
5203 switch (code1)
5205 case EQ_EXPR: val = (cmp == 0); break;
5206 case NE_EXPR: val = (cmp != 0); break;
5207 case LT_EXPR: val = (cmp <= 0); break;
5208 case GT_EXPR: val = (cmp >= 0); break;
5209 case LE_EXPR: val = (cmp < 0); break;
5210 case GE_EXPR: val = (cmp > 0); break;
5211 default:
5212 val = false;
5214 if (val)
5215 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5218 /* Chose the more restrictive of two < or <= comparisons. */
5219 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5220 && (code2 == LT_EXPR || code2 == LE_EXPR))
5222 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5223 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5224 else
5225 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5228 /* Likewise chose the more restrictive of two > or >= comparisons. */
5229 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5230 && (code2 == GT_EXPR || code2 == GE_EXPR))
5232 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5233 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5234 else
5235 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5238 /* Check for singleton ranges. */
5239 else if (cmp == 0
5240 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5241 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5242 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5244 /* Check for disjoint ranges. */
5245 else if (cmp <= 0
5246 && (code1 == LT_EXPR || code1 == LE_EXPR)
5247 && (code2 == GT_EXPR || code2 == GE_EXPR))
5248 return boolean_false_node;
5249 else if (cmp >= 0
5250 && (code1 == GT_EXPR || code1 == GE_EXPR)
5251 && (code2 == LT_EXPR || code2 == LE_EXPR))
5252 return boolean_false_node;
5255 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5256 NAME's definition is a truth value. See if there are any simplifications
5257 that can be done against the NAME's definition. */
5258 if (TREE_CODE (op1a) == SSA_NAME
5259 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5260 && (integer_zerop (op1b) || integer_onep (op1b)))
5262 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5263 || (code1 == NE_EXPR && integer_onep (op1b)));
5264 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5265 switch (gimple_code (stmt))
5267 case GIMPLE_ASSIGN:
5268 /* Try to simplify by copy-propagating the definition. */
5269 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5271 case GIMPLE_PHI:
5272 /* If every argument to the PHI produces the same result when
5273 ANDed with the second comparison, we win.
5274 Do not do this unless the type is bool since we need a bool
5275 result here anyway. */
5276 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5278 tree result = NULL_TREE;
5279 unsigned i;
5280 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5282 tree arg = gimple_phi_arg_def (stmt, i);
5284 /* If this PHI has itself as an argument, ignore it.
5285 If all the other args produce the same result,
5286 we're still OK. */
5287 if (arg == gimple_phi_result (stmt))
5288 continue;
5289 else if (TREE_CODE (arg) == INTEGER_CST)
5291 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5293 if (!result)
5294 result = boolean_false_node;
5295 else if (!integer_zerop (result))
5296 return NULL_TREE;
5298 else if (!result)
5299 result = fold_build2 (code2, boolean_type_node,
5300 op2a, op2b);
5301 else if (!same_bool_comparison_p (result,
5302 code2, op2a, op2b))
5303 return NULL_TREE;
5305 else if (TREE_CODE (arg) == SSA_NAME
5306 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5308 tree temp;
5309 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5310 /* In simple cases we can look through PHI nodes,
5311 but we have to be careful with loops.
5312 See PR49073. */
5313 if (! dom_info_available_p (CDI_DOMINATORS)
5314 || gimple_bb (def_stmt) == gimple_bb (stmt)
5315 || dominated_by_p (CDI_DOMINATORS,
5316 gimple_bb (def_stmt),
5317 gimple_bb (stmt)))
5318 return NULL_TREE;
5319 temp = and_var_with_comparison (arg, invert, code2,
5320 op2a, op2b);
5321 if (!temp)
5322 return NULL_TREE;
5323 else if (!result)
5324 result = temp;
5325 else if (!same_bool_result_p (result, temp))
5326 return NULL_TREE;
5328 else
5329 return NULL_TREE;
5331 return result;
5334 default:
5335 break;
5338 return NULL_TREE;
5341 /* Try to simplify the AND of two comparisons, specified by
5342 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5343 If this can be simplified to a single expression (without requiring
5344 introducing more SSA variables to hold intermediate values),
5345 return the resulting tree. Otherwise return NULL_TREE.
5346 If the result expression is non-null, it has boolean type. */
5348 tree
5349 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5350 enum tree_code code2, tree op2a, tree op2b)
5352 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5353 if (t)
5354 return t;
5355 else
5356 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5359 /* Helper function for or_comparisons_1: try to simplify the OR of the
5360 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5361 If INVERT is true, invert the value of VAR before doing the OR.
5362 Return NULL_EXPR if we can't simplify this to a single expression. */
5364 static tree
5365 or_var_with_comparison (tree var, bool invert,
5366 enum tree_code code2, tree op2a, tree op2b)
5368 tree t;
5369 gimple *stmt = SSA_NAME_DEF_STMT (var);
5371 /* We can only deal with variables whose definitions are assignments. */
5372 if (!is_gimple_assign (stmt))
5373 return NULL_TREE;
5375 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5376 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5377 Then we only have to consider the simpler non-inverted cases. */
5378 if (invert)
5379 t = and_var_with_comparison_1 (stmt,
5380 invert_tree_comparison (code2, false),
5381 op2a, op2b);
5382 else
5383 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5384 return canonicalize_bool (t, invert);
5387 /* Try to simplify the OR of the ssa variable defined by the assignment
5388 STMT with the comparison specified by (OP2A CODE2 OP2B).
5389 Return NULL_EXPR if we can't simplify this to a single expression. */
5391 static tree
5392 or_var_with_comparison_1 (gimple *stmt,
5393 enum tree_code code2, tree op2a, tree op2b)
5395 tree var = gimple_assign_lhs (stmt);
5396 tree true_test_var = NULL_TREE;
5397 tree false_test_var = NULL_TREE;
5398 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5400 /* Check for identities like (var OR (var != 0)) => true . */
5401 if (TREE_CODE (op2a) == SSA_NAME
5402 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5404 if ((code2 == NE_EXPR && integer_zerop (op2b))
5405 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5407 true_test_var = op2a;
5408 if (var == true_test_var)
5409 return var;
5411 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5412 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5414 false_test_var = op2a;
5415 if (var == false_test_var)
5416 return boolean_true_node;
5420 /* If the definition is a comparison, recurse on it. */
5421 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5423 tree t = or_comparisons_1 (innercode,
5424 gimple_assign_rhs1 (stmt),
5425 gimple_assign_rhs2 (stmt),
5426 code2,
5427 op2a,
5428 op2b);
5429 if (t)
5430 return t;
5433 /* If the definition is an AND or OR expression, we may be able to
5434 simplify by reassociating. */
5435 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5436 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5438 tree inner1 = gimple_assign_rhs1 (stmt);
5439 tree inner2 = gimple_assign_rhs2 (stmt);
5440 gimple *s;
5441 tree t;
5442 tree partial = NULL_TREE;
5443 bool is_or = (innercode == BIT_IOR_EXPR);
5445 /* Check for boolean identities that don't require recursive examination
5446 of inner1/inner2:
5447 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5448 inner1 OR (inner1 AND inner2) => inner1
5449 !inner1 OR (inner1 OR inner2) => true
5450 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5452 if (inner1 == true_test_var)
5453 return (is_or ? var : inner1);
5454 else if (inner2 == true_test_var)
5455 return (is_or ? var : inner2);
5456 else if (inner1 == false_test_var)
5457 return (is_or
5458 ? boolean_true_node
5459 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5460 else if (inner2 == false_test_var)
5461 return (is_or
5462 ? boolean_true_node
5463 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5465 /* Next, redistribute/reassociate the OR across the inner tests.
5466 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5467 if (TREE_CODE (inner1) == SSA_NAME
5468 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5469 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5470 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5471 gimple_assign_rhs1 (s),
5472 gimple_assign_rhs2 (s),
5473 code2, op2a, op2b)))
5475 /* Handle the OR case, where we are reassociating:
5476 (inner1 OR inner2) OR (op2a code2 op2b)
5477 => (t OR inner2)
5478 If the partial result t is a constant, we win. Otherwise
5479 continue on to try reassociating with the other inner test. */
5480 if (is_or)
5482 if (integer_onep (t))
5483 return boolean_true_node;
5484 else if (integer_zerop (t))
5485 return inner2;
5488 /* Handle the AND case, where we are redistributing:
5489 (inner1 AND inner2) OR (op2a code2 op2b)
5490 => (t AND (inner2 OR (op2a code op2b))) */
5491 else if (integer_zerop (t))
5492 return boolean_false_node;
5494 /* Save partial result for later. */
5495 partial = t;
5498 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5499 if (TREE_CODE (inner2) == SSA_NAME
5500 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5501 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5502 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5503 gimple_assign_rhs1 (s),
5504 gimple_assign_rhs2 (s),
5505 code2, op2a, op2b)))
5507 /* Handle the OR case, where we are reassociating:
5508 (inner1 OR inner2) OR (op2a code2 op2b)
5509 => (inner1 OR t)
5510 => (t OR partial) */
5511 if (is_or)
5513 if (integer_zerop (t))
5514 return inner1;
5515 else if (integer_onep (t))
5516 return boolean_true_node;
5517 /* If both are the same, we can apply the identity
5518 (x OR x) == x. */
5519 else if (partial && same_bool_result_p (t, partial))
5520 return t;
5523 /* Handle the AND case, where we are redistributing:
5524 (inner1 AND inner2) OR (op2a code2 op2b)
5525 => (t AND (inner1 OR (op2a code2 op2b)))
5526 => (t AND partial) */
5527 else
5529 if (integer_zerop (t))
5530 return boolean_false_node;
5531 else if (partial)
5533 /* We already got a simplification for the other
5534 operand to the redistributed AND expression. The
5535 interesting case is when at least one is true.
5536 Or, if both are the same, we can apply the identity
5537 (x AND x) == x. */
5538 if (integer_onep (partial))
5539 return t;
5540 else if (integer_onep (t))
5541 return partial;
5542 else if (same_bool_result_p (t, partial))
5543 return t;
5548 return NULL_TREE;
5551 /* Try to simplify the OR of two comparisons defined by
5552 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5553 If this can be done without constructing an intermediate value,
5554 return the resulting tree; otherwise NULL_TREE is returned.
5555 This function is deliberately asymmetric as it recurses on SSA_DEFs
5556 in the first comparison but not the second. */
5558 static tree
5559 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5560 enum tree_code code2, tree op2a, tree op2b)
5562 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5564 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5565 if (operand_equal_p (op1a, op2a, 0)
5566 && operand_equal_p (op1b, op2b, 0))
5568 /* Result will be either NULL_TREE, or a combined comparison. */
5569 tree t = combine_comparisons (UNKNOWN_LOCATION,
5570 TRUTH_ORIF_EXPR, code1, code2,
5571 truth_type, op1a, op1b);
5572 if (t)
5573 return t;
5576 /* Likewise the swapped case of the above. */
5577 if (operand_equal_p (op1a, op2b, 0)
5578 && operand_equal_p (op1b, op2a, 0))
5580 /* Result will be either NULL_TREE, or a combined comparison. */
5581 tree t = combine_comparisons (UNKNOWN_LOCATION,
5582 TRUTH_ORIF_EXPR, code1,
5583 swap_tree_comparison (code2),
5584 truth_type, op1a, op1b);
5585 if (t)
5586 return t;
5589 /* If both comparisons are of the same value against constants, we might
5590 be able to merge them. */
5591 if (operand_equal_p (op1a, op2a, 0)
5592 && TREE_CODE (op1b) == INTEGER_CST
5593 && TREE_CODE (op2b) == INTEGER_CST)
5595 int cmp = tree_int_cst_compare (op1b, op2b);
5597 /* If we have (op1a != op1b), we should either be able to
5598 return that or TRUE, depending on whether the constant op1b
5599 also satisfies the other comparison against op2b. */
5600 if (code1 == NE_EXPR)
5602 bool done = true;
5603 bool val;
5604 switch (code2)
5606 case EQ_EXPR: val = (cmp == 0); break;
5607 case NE_EXPR: val = (cmp != 0); break;
5608 case LT_EXPR: val = (cmp < 0); break;
5609 case GT_EXPR: val = (cmp > 0); break;
5610 case LE_EXPR: val = (cmp <= 0); break;
5611 case GE_EXPR: val = (cmp >= 0); break;
5612 default: done = false;
5614 if (done)
5616 if (val)
5617 return boolean_true_node;
5618 else
5619 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5622 /* Likewise if the second comparison is a != comparison. */
5623 else if (code2 == NE_EXPR)
5625 bool done = true;
5626 bool val;
5627 switch (code1)
5629 case EQ_EXPR: val = (cmp == 0); break;
5630 case NE_EXPR: val = (cmp != 0); break;
5631 case LT_EXPR: val = (cmp > 0); break;
5632 case GT_EXPR: val = (cmp < 0); break;
5633 case LE_EXPR: val = (cmp >= 0); break;
5634 case GE_EXPR: val = (cmp <= 0); break;
5635 default: done = false;
5637 if (done)
5639 if (val)
5640 return boolean_true_node;
5641 else
5642 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5646 /* See if an equality test is redundant with the other comparison. */
5647 else if (code1 == EQ_EXPR)
5649 bool val;
5650 switch (code2)
5652 case EQ_EXPR: val = (cmp == 0); break;
5653 case NE_EXPR: val = (cmp != 0); break;
5654 case LT_EXPR: val = (cmp < 0); break;
5655 case GT_EXPR: val = (cmp > 0); break;
5656 case LE_EXPR: val = (cmp <= 0); break;
5657 case GE_EXPR: val = (cmp >= 0); break;
5658 default:
5659 val = false;
5661 if (val)
5662 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5664 else if (code2 == EQ_EXPR)
5666 bool val;
5667 switch (code1)
5669 case EQ_EXPR: val = (cmp == 0); break;
5670 case NE_EXPR: val = (cmp != 0); break;
5671 case LT_EXPR: val = (cmp > 0); break;
5672 case GT_EXPR: val = (cmp < 0); break;
5673 case LE_EXPR: val = (cmp >= 0); break;
5674 case GE_EXPR: val = (cmp <= 0); break;
5675 default:
5676 val = false;
5678 if (val)
5679 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5682 /* Chose the less restrictive of two < or <= comparisons. */
5683 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5684 && (code2 == LT_EXPR || code2 == LE_EXPR))
5686 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5687 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5688 else
5689 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5692 /* Likewise chose the less restrictive of two > or >= comparisons. */
5693 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5694 && (code2 == GT_EXPR || code2 == GE_EXPR))
5696 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5697 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5698 else
5699 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5702 /* Check for singleton ranges. */
5703 else if (cmp == 0
5704 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5705 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5706 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5708 /* Check for less/greater pairs that don't restrict the range at all. */
5709 else if (cmp >= 0
5710 && (code1 == LT_EXPR || code1 == LE_EXPR)
5711 && (code2 == GT_EXPR || code2 == GE_EXPR))
5712 return boolean_true_node;
5713 else if (cmp <= 0
5714 && (code1 == GT_EXPR || code1 == GE_EXPR)
5715 && (code2 == LT_EXPR || code2 == LE_EXPR))
5716 return boolean_true_node;
5719 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5720 NAME's definition is a truth value. See if there are any simplifications
5721 that can be done against the NAME's definition. */
5722 if (TREE_CODE (op1a) == SSA_NAME
5723 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5724 && (integer_zerop (op1b) || integer_onep (op1b)))
5726 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5727 || (code1 == NE_EXPR && integer_onep (op1b)));
5728 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5729 switch (gimple_code (stmt))
5731 case GIMPLE_ASSIGN:
5732 /* Try to simplify by copy-propagating the definition. */
5733 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5735 case GIMPLE_PHI:
5736 /* If every argument to the PHI produces the same result when
5737 ORed with the second comparison, we win.
5738 Do not do this unless the type is bool since we need a bool
5739 result here anyway. */
5740 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5742 tree result = NULL_TREE;
5743 unsigned i;
5744 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5746 tree arg = gimple_phi_arg_def (stmt, i);
5748 /* If this PHI has itself as an argument, ignore it.
5749 If all the other args produce the same result,
5750 we're still OK. */
5751 if (arg == gimple_phi_result (stmt))
5752 continue;
5753 else if (TREE_CODE (arg) == INTEGER_CST)
5755 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5757 if (!result)
5758 result = boolean_true_node;
5759 else if (!integer_onep (result))
5760 return NULL_TREE;
5762 else if (!result)
5763 result = fold_build2 (code2, boolean_type_node,
5764 op2a, op2b);
5765 else if (!same_bool_comparison_p (result,
5766 code2, op2a, op2b))
5767 return NULL_TREE;
5769 else if (TREE_CODE (arg) == SSA_NAME
5770 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5772 tree temp;
5773 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5774 /* In simple cases we can look through PHI nodes,
5775 but we have to be careful with loops.
5776 See PR49073. */
5777 if (! dom_info_available_p (CDI_DOMINATORS)
5778 || gimple_bb (def_stmt) == gimple_bb (stmt)
5779 || dominated_by_p (CDI_DOMINATORS,
5780 gimple_bb (def_stmt),
5781 gimple_bb (stmt)))
5782 return NULL_TREE;
5783 temp = or_var_with_comparison (arg, invert, code2,
5784 op2a, op2b);
5785 if (!temp)
5786 return NULL_TREE;
5787 else if (!result)
5788 result = temp;
5789 else if (!same_bool_result_p (result, temp))
5790 return NULL_TREE;
5792 else
5793 return NULL_TREE;
5795 return result;
5798 default:
5799 break;
5802 return NULL_TREE;
5805 /* Try to simplify the OR of two comparisons, specified by
5806 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5807 If this can be simplified to a single expression (without requiring
5808 introducing more SSA variables to hold intermediate values),
5809 return the resulting tree. Otherwise return NULL_TREE.
5810 If the result expression is non-null, it has boolean type. */
5812 tree
5813 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5814 enum tree_code code2, tree op2a, tree op2b)
5816 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5817 if (t)
5818 return t;
5819 else
5820 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5824 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5826 Either NULL_TREE, a simplified but non-constant or a constant
5827 is returned.
5829 ??? This should go into a gimple-fold-inline.h file to be eventually
5830 privatized with the single valueize function used in the various TUs
5831 to avoid the indirect function call overhead. */
5833 tree
5834 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
5835 tree (*gvalueize) (tree))
5837 code_helper rcode;
5838 tree ops[3] = {};
5839 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5840 edges if there are intermediate VARYING defs. For this reason
5841 do not follow SSA edges here even though SCCVN can technically
5842 just deal fine with that. */
5843 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
5845 tree res = NULL_TREE;
5846 if (gimple_simplified_result_is_gimple_val (rcode, ops))
5847 res = ops[0];
5848 else if (mprts_hook)
5849 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5850 if (res)
5852 if (dump_file && dump_flags & TDF_DETAILS)
5854 fprintf (dump_file, "Match-and-simplified ");
5855 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5856 fprintf (dump_file, " to ");
5857 print_generic_expr (dump_file, res);
5858 fprintf (dump_file, "\n");
5860 return res;
5864 location_t loc = gimple_location (stmt);
5865 switch (gimple_code (stmt))
5867 case GIMPLE_ASSIGN:
5869 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5871 switch (get_gimple_rhs_class (subcode))
5873 case GIMPLE_SINGLE_RHS:
5875 tree rhs = gimple_assign_rhs1 (stmt);
5876 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5878 if (TREE_CODE (rhs) == SSA_NAME)
5880 /* If the RHS is an SSA_NAME, return its known constant value,
5881 if any. */
5882 return (*valueize) (rhs);
5884 /* Handle propagating invariant addresses into address
5885 operations. */
5886 else if (TREE_CODE (rhs) == ADDR_EXPR
5887 && !is_gimple_min_invariant (rhs))
5889 HOST_WIDE_INT offset = 0;
5890 tree base;
5891 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5892 &offset,
5893 valueize);
5894 if (base
5895 && (CONSTANT_CLASS_P (base)
5896 || decl_address_invariant_p (base)))
5897 return build_invariant_address (TREE_TYPE (rhs),
5898 base, offset);
5900 else if (TREE_CODE (rhs) == CONSTRUCTOR
5901 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5902 && (CONSTRUCTOR_NELTS (rhs)
5903 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5905 unsigned i;
5906 tree val, *vec;
5908 vec = XALLOCAVEC (tree,
5909 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5910 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5912 val = (*valueize) (val);
5913 if (TREE_CODE (val) == INTEGER_CST
5914 || TREE_CODE (val) == REAL_CST
5915 || TREE_CODE (val) == FIXED_CST)
5916 vec[i] = val;
5917 else
5918 return NULL_TREE;
5921 return build_vector (TREE_TYPE (rhs), vec);
5923 if (subcode == OBJ_TYPE_REF)
5925 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5926 /* If callee is constant, we can fold away the wrapper. */
5927 if (is_gimple_min_invariant (val))
5928 return val;
5931 if (kind == tcc_reference)
5933 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5934 || TREE_CODE (rhs) == REALPART_EXPR
5935 || TREE_CODE (rhs) == IMAGPART_EXPR)
5936 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5938 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5939 return fold_unary_loc (EXPR_LOCATION (rhs),
5940 TREE_CODE (rhs),
5941 TREE_TYPE (rhs), val);
5943 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5944 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5946 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5947 return fold_ternary_loc (EXPR_LOCATION (rhs),
5948 TREE_CODE (rhs),
5949 TREE_TYPE (rhs), val,
5950 TREE_OPERAND (rhs, 1),
5951 TREE_OPERAND (rhs, 2));
5953 else if (TREE_CODE (rhs) == MEM_REF
5954 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5956 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5957 if (TREE_CODE (val) == ADDR_EXPR
5958 && is_gimple_min_invariant (val))
5960 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5961 unshare_expr (val),
5962 TREE_OPERAND (rhs, 1));
5963 if (tem)
5964 rhs = tem;
5967 return fold_const_aggregate_ref_1 (rhs, valueize);
5969 else if (kind == tcc_declaration)
5970 return get_symbol_constant_value (rhs);
5971 return rhs;
5974 case GIMPLE_UNARY_RHS:
5975 return NULL_TREE;
5977 case GIMPLE_BINARY_RHS:
5978 /* Translate &x + CST into an invariant form suitable for
5979 further propagation. */
5980 if (subcode == POINTER_PLUS_EXPR)
5982 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5983 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5984 if (TREE_CODE (op0) == ADDR_EXPR
5985 && TREE_CODE (op1) == INTEGER_CST)
5987 tree off = fold_convert (ptr_type_node, op1);
5988 return build_fold_addr_expr_loc
5989 (loc,
5990 fold_build2 (MEM_REF,
5991 TREE_TYPE (TREE_TYPE (op0)),
5992 unshare_expr (op0), off));
5995 /* Canonicalize bool != 0 and bool == 0 appearing after
5996 valueization. While gimple_simplify handles this
5997 it can get confused by the ~X == 1 -> X == 0 transform
5998 which we cant reduce to a SSA name or a constant
5999 (and we have no way to tell gimple_simplify to not
6000 consider those transforms in the first place). */
6001 else if (subcode == EQ_EXPR
6002 || subcode == NE_EXPR)
6004 tree lhs = gimple_assign_lhs (stmt);
6005 tree op0 = gimple_assign_rhs1 (stmt);
6006 if (useless_type_conversion_p (TREE_TYPE (lhs),
6007 TREE_TYPE (op0)))
6009 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6010 op0 = (*valueize) (op0);
6011 if (TREE_CODE (op0) == INTEGER_CST)
6012 std::swap (op0, op1);
6013 if (TREE_CODE (op1) == INTEGER_CST
6014 && ((subcode == NE_EXPR && integer_zerop (op1))
6015 || (subcode == EQ_EXPR && integer_onep (op1))))
6016 return op0;
6019 return NULL_TREE;
6021 case GIMPLE_TERNARY_RHS:
6023 /* Handle ternary operators that can appear in GIMPLE form. */
6024 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6025 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6026 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6027 return fold_ternary_loc (loc, subcode,
6028 gimple_expr_type (stmt), op0, op1, op2);
6031 default:
6032 gcc_unreachable ();
6036 case GIMPLE_CALL:
6038 tree fn;
6039 gcall *call_stmt = as_a <gcall *> (stmt);
6041 if (gimple_call_internal_p (stmt))
6043 enum tree_code subcode = ERROR_MARK;
6044 switch (gimple_call_internal_fn (stmt))
6046 case IFN_UBSAN_CHECK_ADD:
6047 subcode = PLUS_EXPR;
6048 break;
6049 case IFN_UBSAN_CHECK_SUB:
6050 subcode = MINUS_EXPR;
6051 break;
6052 case IFN_UBSAN_CHECK_MUL:
6053 subcode = MULT_EXPR;
6054 break;
6055 case IFN_BUILTIN_EXPECT:
6057 tree arg0 = gimple_call_arg (stmt, 0);
6058 tree op0 = (*valueize) (arg0);
6059 if (TREE_CODE (op0) == INTEGER_CST)
6060 return op0;
6061 return NULL_TREE;
6063 default:
6064 return NULL_TREE;
6066 tree arg0 = gimple_call_arg (stmt, 0);
6067 tree arg1 = gimple_call_arg (stmt, 1);
6068 tree op0 = (*valueize) (arg0);
6069 tree op1 = (*valueize) (arg1);
6071 if (TREE_CODE (op0) != INTEGER_CST
6072 || TREE_CODE (op1) != INTEGER_CST)
6074 switch (subcode)
6076 case MULT_EXPR:
6077 /* x * 0 = 0 * x = 0 without overflow. */
6078 if (integer_zerop (op0) || integer_zerop (op1))
6079 return build_zero_cst (TREE_TYPE (arg0));
6080 break;
6081 case MINUS_EXPR:
6082 /* y - y = 0 without overflow. */
6083 if (operand_equal_p (op0, op1, 0))
6084 return build_zero_cst (TREE_TYPE (arg0));
6085 break;
6086 default:
6087 break;
6090 tree res
6091 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6092 if (res
6093 && TREE_CODE (res) == INTEGER_CST
6094 && !TREE_OVERFLOW (res))
6095 return res;
6096 return NULL_TREE;
6099 fn = (*valueize) (gimple_call_fn (stmt));
6100 if (TREE_CODE (fn) == ADDR_EXPR
6101 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6102 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6103 && gimple_builtin_call_types_compatible_p (stmt,
6104 TREE_OPERAND (fn, 0)))
6106 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6107 tree retval;
6108 unsigned i;
6109 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6110 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6111 retval = fold_builtin_call_array (loc,
6112 gimple_call_return_type (call_stmt),
6113 fn, gimple_call_num_args (stmt), args);
6114 if (retval)
6116 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6117 STRIP_NOPS (retval);
6118 retval = fold_convert (gimple_call_return_type (call_stmt),
6119 retval);
6121 return retval;
6123 return NULL_TREE;
6126 default:
6127 return NULL_TREE;
6131 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6132 Returns NULL_TREE if folding to a constant is not possible, otherwise
6133 returns a constant according to is_gimple_min_invariant. */
6135 tree
6136 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6138 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6139 if (res && is_gimple_min_invariant (res))
6140 return res;
6141 return NULL_TREE;
6145 /* The following set of functions are supposed to fold references using
6146 their constant initializers. */
6148 /* See if we can find constructor defining value of BASE.
6149 When we know the consructor with constant offset (such as
6150 base is array[40] and we do know constructor of array), then
6151 BIT_OFFSET is adjusted accordingly.
6153 As a special case, return error_mark_node when constructor
6154 is not explicitly available, but it is known to be zero
6155 such as 'static const int a;'. */
6156 static tree
6157 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
6158 tree (*valueize)(tree))
6160 HOST_WIDE_INT bit_offset2, size, max_size;
6161 bool reverse;
6163 if (TREE_CODE (base) == MEM_REF)
6165 if (!integer_zerop (TREE_OPERAND (base, 1)))
6167 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
6168 return NULL_TREE;
6169 *bit_offset += (mem_ref_offset (base).to_short_addr ()
6170 * BITS_PER_UNIT);
6173 if (valueize
6174 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6175 base = valueize (TREE_OPERAND (base, 0));
6176 if (!base || TREE_CODE (base) != ADDR_EXPR)
6177 return NULL_TREE;
6178 base = TREE_OPERAND (base, 0);
6180 else if (valueize
6181 && TREE_CODE (base) == SSA_NAME)
6182 base = valueize (base);
6184 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6185 DECL_INITIAL. If BASE is a nested reference into another
6186 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6187 the inner reference. */
6188 switch (TREE_CODE (base))
6190 case VAR_DECL:
6191 case CONST_DECL:
6193 tree init = ctor_for_folding (base);
6195 /* Our semantic is exact opposite of ctor_for_folding;
6196 NULL means unknown, while error_mark_node is 0. */
6197 if (init == error_mark_node)
6198 return NULL_TREE;
6199 if (!init)
6200 return error_mark_node;
6201 return init;
6204 case VIEW_CONVERT_EXPR:
6205 return get_base_constructor (TREE_OPERAND (base, 0),
6206 bit_offset, valueize);
6208 case ARRAY_REF:
6209 case COMPONENT_REF:
6210 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6211 &reverse);
6212 if (max_size == -1 || size != max_size)
6213 return NULL_TREE;
6214 *bit_offset += bit_offset2;
6215 return get_base_constructor (base, bit_offset, valueize);
6217 case CONSTRUCTOR:
6218 return base;
6220 default:
6221 if (CONSTANT_CLASS_P (base))
6222 return base;
6224 return NULL_TREE;
6228 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6229 SIZE to the memory at bit OFFSET. */
6231 static tree
6232 fold_array_ctor_reference (tree type, tree ctor,
6233 unsigned HOST_WIDE_INT offset,
6234 unsigned HOST_WIDE_INT size,
6235 tree from_decl)
6237 offset_int low_bound;
6238 offset_int elt_size;
6239 offset_int access_index;
6240 tree domain_type = NULL_TREE;
6241 HOST_WIDE_INT inner_offset;
6243 /* Compute low bound and elt size. */
6244 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6245 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6246 if (domain_type && TYPE_MIN_VALUE (domain_type))
6248 /* Static constructors for variably sized objects makes no sense. */
6249 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6250 return NULL_TREE;
6251 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6253 else
6254 low_bound = 0;
6255 /* Static constructors for variably sized objects makes no sense. */
6256 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6257 return NULL_TREE;
6258 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6260 /* We can handle only constantly sized accesses that are known to not
6261 be larger than size of array element. */
6262 if (!TYPE_SIZE_UNIT (type)
6263 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6264 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6265 || elt_size == 0)
6266 return NULL_TREE;
6268 /* Compute the array index we look for. */
6269 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6270 elt_size);
6271 access_index += low_bound;
6273 /* And offset within the access. */
6274 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6276 /* See if the array field is large enough to span whole access. We do not
6277 care to fold accesses spanning multiple array indexes. */
6278 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6279 return NULL_TREE;
6280 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6281 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
6283 /* When memory is not explicitely mentioned in constructor,
6284 it is 0 (or out of range). */
6285 return build_zero_cst (type);
6288 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6289 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6291 static tree
6292 fold_nonarray_ctor_reference (tree type, tree ctor,
6293 unsigned HOST_WIDE_INT offset,
6294 unsigned HOST_WIDE_INT size,
6295 tree from_decl)
6297 unsigned HOST_WIDE_INT cnt;
6298 tree cfield, cval;
6300 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6301 cval)
6303 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6304 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6305 tree field_size = DECL_SIZE (cfield);
6306 offset_int bitoffset;
6307 offset_int bitoffset_end, access_end;
6309 /* Variable sized objects in static constructors makes no sense,
6310 but field_size can be NULL for flexible array members. */
6311 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6312 && TREE_CODE (byte_offset) == INTEGER_CST
6313 && (field_size != NULL_TREE
6314 ? TREE_CODE (field_size) == INTEGER_CST
6315 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6317 /* Compute bit offset of the field. */
6318 bitoffset = (wi::to_offset (field_offset)
6319 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6320 /* Compute bit offset where the field ends. */
6321 if (field_size != NULL_TREE)
6322 bitoffset_end = bitoffset + wi::to_offset (field_size);
6323 else
6324 bitoffset_end = 0;
6326 access_end = offset_int (offset) + size;
6328 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6329 [BITOFFSET, BITOFFSET_END)? */
6330 if (wi::cmps (access_end, bitoffset) > 0
6331 && (field_size == NULL_TREE
6332 || wi::lts_p (offset, bitoffset_end)))
6334 offset_int inner_offset = offset_int (offset) - bitoffset;
6335 /* We do have overlap. Now see if field is large enough to
6336 cover the access. Give up for accesses spanning multiple
6337 fields. */
6338 if (wi::cmps (access_end, bitoffset_end) > 0)
6339 return NULL_TREE;
6340 if (offset < bitoffset)
6341 return NULL_TREE;
6342 return fold_ctor_reference (type, cval,
6343 inner_offset.to_uhwi (), size,
6344 from_decl);
6347 /* When memory is not explicitely mentioned in constructor, it is 0. */
6348 return build_zero_cst (type);
6351 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
6352 to the memory at bit OFFSET. */
6354 tree
6355 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
6356 unsigned HOST_WIDE_INT size, tree from_decl)
6358 tree ret;
6360 /* We found the field with exact match. */
6361 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6362 && !offset)
6363 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6365 /* We are at the end of walk, see if we can view convert the
6366 result. */
6367 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6368 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6369 && !compare_tree_int (TYPE_SIZE (type), size)
6370 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6372 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6373 if (ret)
6375 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6376 if (ret)
6377 STRIP_USELESS_TYPE_CONVERSION (ret);
6379 return ret;
6381 /* For constants and byte-aligned/sized reads try to go through
6382 native_encode/interpret. */
6383 if (CONSTANT_CLASS_P (ctor)
6384 && BITS_PER_UNIT == 8
6385 && offset % BITS_PER_UNIT == 0
6386 && size % BITS_PER_UNIT == 0
6387 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6389 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6390 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6391 offset / BITS_PER_UNIT);
6392 if (len > 0)
6393 return native_interpret_expr (type, buf, len);
6395 if (TREE_CODE (ctor) == CONSTRUCTOR)
6398 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6399 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6400 return fold_array_ctor_reference (type, ctor, offset, size,
6401 from_decl);
6402 else
6403 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6404 from_decl);
6407 return NULL_TREE;
6410 /* Return the tree representing the element referenced by T if T is an
6411 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6412 names using VALUEIZE. Return NULL_TREE otherwise. */
6414 tree
6415 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6417 tree ctor, idx, base;
6418 HOST_WIDE_INT offset, size, max_size;
6419 tree tem;
6420 bool reverse;
6422 if (TREE_THIS_VOLATILE (t))
6423 return NULL_TREE;
6425 if (DECL_P (t))
6426 return get_symbol_constant_value (t);
6428 tem = fold_read_from_constant_string (t);
6429 if (tem)
6430 return tem;
6432 switch (TREE_CODE (t))
6434 case ARRAY_REF:
6435 case ARRAY_RANGE_REF:
6436 /* Constant indexes are handled well by get_base_constructor.
6437 Only special case variable offsets.
6438 FIXME: This code can't handle nested references with variable indexes
6439 (they will be handled only by iteration of ccp). Perhaps we can bring
6440 get_ref_base_and_extent here and make it use a valueize callback. */
6441 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6442 && valueize
6443 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6444 && TREE_CODE (idx) == INTEGER_CST)
6446 tree low_bound, unit_size;
6448 /* If the resulting bit-offset is constant, track it. */
6449 if ((low_bound = array_ref_low_bound (t),
6450 TREE_CODE (low_bound) == INTEGER_CST)
6451 && (unit_size = array_ref_element_size (t),
6452 tree_fits_uhwi_p (unit_size)))
6454 offset_int woffset
6455 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
6456 TYPE_PRECISION (TREE_TYPE (idx)));
6458 if (wi::fits_shwi_p (woffset))
6460 offset = woffset.to_shwi ();
6461 /* TODO: This code seems wrong, multiply then check
6462 to see if it fits. */
6463 offset *= tree_to_uhwi (unit_size);
6464 offset *= BITS_PER_UNIT;
6466 base = TREE_OPERAND (t, 0);
6467 ctor = get_base_constructor (base, &offset, valueize);
6468 /* Empty constructor. Always fold to 0. */
6469 if (ctor == error_mark_node)
6470 return build_zero_cst (TREE_TYPE (t));
6471 /* Out of bound array access. Value is undefined,
6472 but don't fold. */
6473 if (offset < 0)
6474 return NULL_TREE;
6475 /* We can not determine ctor. */
6476 if (!ctor)
6477 return NULL_TREE;
6478 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6479 tree_to_uhwi (unit_size)
6480 * BITS_PER_UNIT,
6481 base);
6485 /* Fallthru. */
6487 case COMPONENT_REF:
6488 case BIT_FIELD_REF:
6489 case TARGET_MEM_REF:
6490 case MEM_REF:
6491 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6492 ctor = get_base_constructor (base, &offset, valueize);
6494 /* Empty constructor. Always fold to 0. */
6495 if (ctor == error_mark_node)
6496 return build_zero_cst (TREE_TYPE (t));
6497 /* We do not know precise address. */
6498 if (max_size == -1 || max_size != size)
6499 return NULL_TREE;
6500 /* We can not determine ctor. */
6501 if (!ctor)
6502 return NULL_TREE;
6504 /* Out of bound array access. Value is undefined, but don't fold. */
6505 if (offset < 0)
6506 return NULL_TREE;
6508 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6509 base);
6511 case REALPART_EXPR:
6512 case IMAGPART_EXPR:
6514 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6515 if (c && TREE_CODE (c) == COMPLEX_CST)
6516 return fold_build1_loc (EXPR_LOCATION (t),
6517 TREE_CODE (t), TREE_TYPE (t), c);
6518 break;
6521 default:
6522 break;
6525 return NULL_TREE;
6528 tree
6529 fold_const_aggregate_ref (tree t)
6531 return fold_const_aggregate_ref_1 (t, NULL);
6534 /* Lookup virtual method with index TOKEN in a virtual table V
6535 at OFFSET.
6536 Set CAN_REFER if non-NULL to false if method
6537 is not referable or if the virtual table is ill-formed (such as rewriten
6538 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6540 tree
6541 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6542 tree v,
6543 unsigned HOST_WIDE_INT offset,
6544 bool *can_refer)
6546 tree vtable = v, init, fn;
6547 unsigned HOST_WIDE_INT size;
6548 unsigned HOST_WIDE_INT elt_size, access_index;
6549 tree domain_type;
6551 if (can_refer)
6552 *can_refer = true;
6554 /* First of all double check we have virtual table. */
6555 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6557 /* Pass down that we lost track of the target. */
6558 if (can_refer)
6559 *can_refer = false;
6560 return NULL_TREE;
6563 init = ctor_for_folding (v);
6565 /* The virtual tables should always be born with constructors
6566 and we always should assume that they are avaialble for
6567 folding. At the moment we do not stream them in all cases,
6568 but it should never happen that ctor seem unreachable. */
6569 gcc_assert (init);
6570 if (init == error_mark_node)
6572 gcc_assert (in_lto_p);
6573 /* Pass down that we lost track of the target. */
6574 if (can_refer)
6575 *can_refer = false;
6576 return NULL_TREE;
6578 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6579 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6580 offset *= BITS_PER_UNIT;
6581 offset += token * size;
6583 /* Lookup the value in the constructor that is assumed to be array.
6584 This is equivalent to
6585 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6586 offset, size, NULL);
6587 but in a constant time. We expect that frontend produced a simple
6588 array without indexed initializers. */
6590 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6591 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6592 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6593 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6595 access_index = offset / BITS_PER_UNIT / elt_size;
6596 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6598 /* This code makes an assumption that there are no
6599 indexed fileds produced by C++ FE, so we can directly index the array. */
6600 if (access_index < CONSTRUCTOR_NELTS (init))
6602 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6603 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6604 STRIP_NOPS (fn);
6606 else
6607 fn = NULL;
6609 /* For type inconsistent program we may end up looking up virtual method
6610 in virtual table that does not contain TOKEN entries. We may overrun
6611 the virtual table and pick up a constant or RTTI info pointer.
6612 In any case the call is undefined. */
6613 if (!fn
6614 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6615 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6616 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6617 else
6619 fn = TREE_OPERAND (fn, 0);
6621 /* When cgraph node is missing and function is not public, we cannot
6622 devirtualize. This can happen in WHOPR when the actual method
6623 ends up in other partition, because we found devirtualization
6624 possibility too late. */
6625 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6627 if (can_refer)
6629 *can_refer = false;
6630 return fn;
6632 return NULL_TREE;
6636 /* Make sure we create a cgraph node for functions we'll reference.
6637 They can be non-existent if the reference comes from an entry
6638 of an external vtable for example. */
6639 cgraph_node::get_create (fn);
6641 return fn;
6644 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6645 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6646 KNOWN_BINFO carries the binfo describing the true type of
6647 OBJ_TYPE_REF_OBJECT(REF).
6648 Set CAN_REFER if non-NULL to false if method
6649 is not referable or if the virtual table is ill-formed (such as rewriten
6650 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6652 tree
6653 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6654 bool *can_refer)
6656 unsigned HOST_WIDE_INT offset;
6657 tree v;
6659 v = BINFO_VTABLE (known_binfo);
6660 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6661 if (!v)
6662 return NULL_TREE;
6664 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6666 if (can_refer)
6667 *can_refer = false;
6668 return NULL_TREE;
6670 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6673 /* Given a pointer value T, return a simplified version of an
6674 indirection through T, or NULL_TREE if no simplification is
6675 possible. Note that the resulting type may be different from
6676 the type pointed to in the sense that it is still compatible
6677 from the langhooks point of view. */
6679 tree
6680 gimple_fold_indirect_ref (tree t)
6682 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6683 tree sub = t;
6684 tree subtype;
6686 STRIP_NOPS (sub);
6687 subtype = TREE_TYPE (sub);
6688 if (!POINTER_TYPE_P (subtype)
6689 || TYPE_REF_CAN_ALIAS_ALL (ptype))
6690 return NULL_TREE;
6692 if (TREE_CODE (sub) == ADDR_EXPR)
6694 tree op = TREE_OPERAND (sub, 0);
6695 tree optype = TREE_TYPE (op);
6696 /* *&p => p */
6697 if (useless_type_conversion_p (type, optype))
6698 return op;
6700 /* *(foo *)&fooarray => fooarray[0] */
6701 if (TREE_CODE (optype) == ARRAY_TYPE
6702 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6703 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6705 tree type_domain = TYPE_DOMAIN (optype);
6706 tree min_val = size_zero_node;
6707 if (type_domain && TYPE_MIN_VALUE (type_domain))
6708 min_val = TYPE_MIN_VALUE (type_domain);
6709 if (TREE_CODE (min_val) == INTEGER_CST)
6710 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6712 /* *(foo *)&complexfoo => __real__ complexfoo */
6713 else if (TREE_CODE (optype) == COMPLEX_TYPE
6714 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6715 return fold_build1 (REALPART_EXPR, type, op);
6716 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6717 else if (TREE_CODE (optype) == VECTOR_TYPE
6718 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6720 tree part_width = TYPE_SIZE (type);
6721 tree index = bitsize_int (0);
6722 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6726 /* *(p + CST) -> ... */
6727 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6728 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6730 tree addr = TREE_OPERAND (sub, 0);
6731 tree off = TREE_OPERAND (sub, 1);
6732 tree addrtype;
6734 STRIP_NOPS (addr);
6735 addrtype = TREE_TYPE (addr);
6737 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6738 if (TREE_CODE (addr) == ADDR_EXPR
6739 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6740 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
6741 && tree_fits_uhwi_p (off))
6743 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
6744 tree part_width = TYPE_SIZE (type);
6745 unsigned HOST_WIDE_INT part_widthi
6746 = tree_to_shwi (part_width) / BITS_PER_UNIT;
6747 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6748 tree index = bitsize_int (indexi);
6749 if (offset / part_widthi
6750 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
6751 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6752 part_width, index);
6755 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6756 if (TREE_CODE (addr) == ADDR_EXPR
6757 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6758 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6760 tree size = TYPE_SIZE_UNIT (type);
6761 if (tree_int_cst_equal (size, off))
6762 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6765 /* *(p + CST) -> MEM_REF <p, CST>. */
6766 if (TREE_CODE (addr) != ADDR_EXPR
6767 || DECL_P (TREE_OPERAND (addr, 0)))
6768 return fold_build2 (MEM_REF, type,
6769 addr,
6770 wide_int_to_tree (ptype, off));
6773 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6774 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6775 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6776 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6778 tree type_domain;
6779 tree min_val = size_zero_node;
6780 tree osub = sub;
6781 sub = gimple_fold_indirect_ref (sub);
6782 if (! sub)
6783 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6784 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6785 if (type_domain && TYPE_MIN_VALUE (type_domain))
6786 min_val = TYPE_MIN_VALUE (type_domain);
6787 if (TREE_CODE (min_val) == INTEGER_CST)
6788 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6791 return NULL_TREE;
6794 /* Return true if CODE is an operation that when operating on signed
6795 integer types involves undefined behavior on overflow and the
6796 operation can be expressed with unsigned arithmetic. */
6798 bool
6799 arith_code_with_undefined_signed_overflow (tree_code code)
6801 switch (code)
6803 case PLUS_EXPR:
6804 case MINUS_EXPR:
6805 case MULT_EXPR:
6806 case NEGATE_EXPR:
6807 case POINTER_PLUS_EXPR:
6808 return true;
6809 default:
6810 return false;
6814 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6815 operation that can be transformed to unsigned arithmetic by converting
6816 its operand, carrying out the operation in the corresponding unsigned
6817 type and converting the result back to the original type.
6819 Returns a sequence of statements that replace STMT and also contain
6820 a modified form of STMT itself. */
6822 gimple_seq
6823 rewrite_to_defined_overflow (gimple *stmt)
6825 if (dump_file && (dump_flags & TDF_DETAILS))
6827 fprintf (dump_file, "rewriting stmt with undefined signed "
6828 "overflow ");
6829 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6832 tree lhs = gimple_assign_lhs (stmt);
6833 tree type = unsigned_type_for (TREE_TYPE (lhs));
6834 gimple_seq stmts = NULL;
6835 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6837 tree op = gimple_op (stmt, i);
6838 op = gimple_convert (&stmts, type, op);
6839 gimple_set_op (stmt, i, op);
6841 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6842 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6843 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6844 gimple_seq_add_stmt (&stmts, stmt);
6845 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6846 gimple_seq_add_stmt (&stmts, cvt);
6848 return stmts;
6852 /* The valueization hook we use for the gimple_build API simplification.
6853 This makes us match fold_buildN behavior by only combining with
6854 statements in the sequence(s) we are currently building. */
6856 static tree
6857 gimple_build_valueize (tree op)
6859 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6860 return op;
6861 return NULL_TREE;
6864 /* Build the expression CODE OP0 of type TYPE with location LOC,
6865 simplifying it first if possible. Returns the built
6866 expression value and appends statements possibly defining it
6867 to SEQ. */
6869 tree
6870 gimple_build (gimple_seq *seq, location_t loc,
6871 enum tree_code code, tree type, tree op0)
6873 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
6874 if (!res)
6876 res = create_tmp_reg_or_ssa_name (type);
6877 gimple *stmt;
6878 if (code == REALPART_EXPR
6879 || code == IMAGPART_EXPR
6880 || code == VIEW_CONVERT_EXPR)
6881 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6882 else
6883 stmt = gimple_build_assign (res, code, op0);
6884 gimple_set_location (stmt, loc);
6885 gimple_seq_add_stmt_without_update (seq, stmt);
6887 return res;
6890 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6891 simplifying it first if possible. Returns the built
6892 expression value and appends statements possibly defining it
6893 to SEQ. */
6895 tree
6896 gimple_build (gimple_seq *seq, location_t loc,
6897 enum tree_code code, tree type, tree op0, tree op1)
6899 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6900 if (!res)
6902 res = create_tmp_reg_or_ssa_name (type);
6903 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6904 gimple_set_location (stmt, loc);
6905 gimple_seq_add_stmt_without_update (seq, stmt);
6907 return res;
6910 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6911 simplifying it first if possible. Returns the built
6912 expression value and appends statements possibly defining it
6913 to SEQ. */
6915 tree
6916 gimple_build (gimple_seq *seq, location_t loc,
6917 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6919 tree res = gimple_simplify (code, type, op0, op1, op2,
6920 seq, gimple_build_valueize);
6921 if (!res)
6923 res = create_tmp_reg_or_ssa_name (type);
6924 gimple *stmt;
6925 if (code == BIT_FIELD_REF)
6926 stmt = gimple_build_assign (res, code,
6927 build3 (code, type, op0, op1, op2));
6928 else
6929 stmt = gimple_build_assign (res, code, op0, op1, op2);
6930 gimple_set_location (stmt, loc);
6931 gimple_seq_add_stmt_without_update (seq, stmt);
6933 return res;
6936 /* Build the call FN (ARG0) with a result of type TYPE
6937 (or no result if TYPE is void) with location LOC,
6938 simplifying it first if possible. Returns the built
6939 expression value (or NULL_TREE if TYPE is void) and appends
6940 statements possibly defining it to SEQ. */
6942 tree
6943 gimple_build (gimple_seq *seq, location_t loc,
6944 enum built_in_function fn, tree type, tree arg0)
6946 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6947 if (!res)
6949 tree decl = builtin_decl_implicit (fn);
6950 gimple *stmt = gimple_build_call (decl, 1, arg0);
6951 if (!VOID_TYPE_P (type))
6953 res = create_tmp_reg_or_ssa_name (type);
6954 gimple_call_set_lhs (stmt, res);
6956 gimple_set_location (stmt, loc);
6957 gimple_seq_add_stmt_without_update (seq, stmt);
6959 return res;
6962 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6963 (or no result if TYPE is void) with location LOC,
6964 simplifying it first if possible. Returns the built
6965 expression value (or NULL_TREE if TYPE is void) and appends
6966 statements possibly defining it to SEQ. */
6968 tree
6969 gimple_build (gimple_seq *seq, location_t loc,
6970 enum built_in_function fn, tree type, tree arg0, tree arg1)
6972 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6973 if (!res)
6975 tree decl = builtin_decl_implicit (fn);
6976 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6977 if (!VOID_TYPE_P (type))
6979 res = create_tmp_reg_or_ssa_name (type);
6980 gimple_call_set_lhs (stmt, res);
6982 gimple_set_location (stmt, loc);
6983 gimple_seq_add_stmt_without_update (seq, stmt);
6985 return res;
6988 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6989 (or no result if TYPE is void) with location LOC,
6990 simplifying it first if possible. Returns the built
6991 expression value (or NULL_TREE if TYPE is void) and appends
6992 statements possibly defining it to SEQ. */
6994 tree
6995 gimple_build (gimple_seq *seq, location_t loc,
6996 enum built_in_function fn, tree type,
6997 tree arg0, tree arg1, tree arg2)
6999 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7000 seq, gimple_build_valueize);
7001 if (!res)
7003 tree decl = builtin_decl_implicit (fn);
7004 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7005 if (!VOID_TYPE_P (type))
7007 res = create_tmp_reg_or_ssa_name (type);
7008 gimple_call_set_lhs (stmt, res);
7010 gimple_set_location (stmt, loc);
7011 gimple_seq_add_stmt_without_update (seq, stmt);
7013 return res;
7016 /* Build the conversion (TYPE) OP with a result of type TYPE
7017 with location LOC if such conversion is neccesary in GIMPLE,
7018 simplifying it first.
7019 Returns the built expression value and appends
7020 statements possibly defining it to SEQ. */
7022 tree
7023 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7025 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7026 return op;
7027 return gimple_build (seq, loc, NOP_EXPR, type, op);
7030 /* Build the conversion (ptrofftype) OP with a result of a type
7031 compatible with ptrofftype with location LOC if such conversion
7032 is neccesary in GIMPLE, simplifying it first.
7033 Returns the built expression value and appends
7034 statements possibly defining it to SEQ. */
7036 tree
7037 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7039 if (ptrofftype_p (TREE_TYPE (op)))
7040 return op;
7041 return gimple_convert (seq, loc, sizetype, op);
7044 /* Return true if the result of assignment STMT is known to be non-negative.
7045 If the return value is based on the assumption that signed overflow is
7046 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7047 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7049 static bool
7050 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7051 int depth)
7053 enum tree_code code = gimple_assign_rhs_code (stmt);
7054 switch (get_gimple_rhs_class (code))
7056 case GIMPLE_UNARY_RHS:
7057 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7058 gimple_expr_type (stmt),
7059 gimple_assign_rhs1 (stmt),
7060 strict_overflow_p, depth);
7061 case GIMPLE_BINARY_RHS:
7062 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7063 gimple_expr_type (stmt),
7064 gimple_assign_rhs1 (stmt),
7065 gimple_assign_rhs2 (stmt),
7066 strict_overflow_p, depth);
7067 case GIMPLE_TERNARY_RHS:
7068 return false;
7069 case GIMPLE_SINGLE_RHS:
7070 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7071 strict_overflow_p, depth);
7072 case GIMPLE_INVALID_RHS:
7073 break;
7075 gcc_unreachable ();
7078 /* Return true if return value of call STMT is known to be non-negative.
7079 If the return value is based on the assumption that signed overflow is
7080 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7081 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7083 static bool
7084 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7085 int depth)
7087 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7088 gimple_call_arg (stmt, 0) : NULL_TREE;
7089 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7090 gimple_call_arg (stmt, 1) : NULL_TREE;
7092 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7093 gimple_call_combined_fn (stmt),
7094 arg0,
7095 arg1,
7096 strict_overflow_p, depth);
7099 /* Return true if return value of call STMT is known to be non-negative.
7100 If the return value is based on the assumption that signed overflow is
7101 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7102 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7104 static bool
7105 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7106 int depth)
7108 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7110 tree arg = gimple_phi_arg_def (stmt, i);
7111 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7112 return false;
7114 return true;
7117 /* Return true if STMT is known to compute a non-negative value.
7118 If the return value is based on the assumption that signed overflow is
7119 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7120 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7122 bool
7123 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7124 int depth)
7126 switch (gimple_code (stmt))
7128 case GIMPLE_ASSIGN:
7129 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7130 depth);
7131 case GIMPLE_CALL:
7132 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7133 depth);
7134 case GIMPLE_PHI:
7135 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7136 depth);
7137 default:
7138 return false;
7142 /* Return true if the floating-point value computed by assignment STMT
7143 is known to have an integer value. We also allow +Inf, -Inf and NaN
7144 to be considered integer values. Return false for signaling NaN.
7146 DEPTH is the current nesting depth of the query. */
7148 static bool
7149 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7151 enum tree_code code = gimple_assign_rhs_code (stmt);
7152 switch (get_gimple_rhs_class (code))
7154 case GIMPLE_UNARY_RHS:
7155 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7156 gimple_assign_rhs1 (stmt), depth);
7157 case GIMPLE_BINARY_RHS:
7158 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7159 gimple_assign_rhs1 (stmt),
7160 gimple_assign_rhs2 (stmt), depth);
7161 case GIMPLE_TERNARY_RHS:
7162 return false;
7163 case GIMPLE_SINGLE_RHS:
7164 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7165 case GIMPLE_INVALID_RHS:
7166 break;
7168 gcc_unreachable ();
7171 /* Return true if the floating-point value computed by call STMT is known
7172 to have an integer value. We also allow +Inf, -Inf and NaN to be
7173 considered integer values. Return false for signaling NaN.
7175 DEPTH is the current nesting depth of the query. */
7177 static bool
7178 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7180 tree arg0 = (gimple_call_num_args (stmt) > 0
7181 ? gimple_call_arg (stmt, 0)
7182 : NULL_TREE);
7183 tree arg1 = (gimple_call_num_args (stmt) > 1
7184 ? gimple_call_arg (stmt, 1)
7185 : NULL_TREE);
7186 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7187 arg0, arg1, depth);
7190 /* Return true if the floating-point result of phi STMT is known to have
7191 an integer value. We also allow +Inf, -Inf and NaN to be considered
7192 integer values. Return false for signaling NaN.
7194 DEPTH is the current nesting depth of the query. */
7196 static bool
7197 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7199 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7201 tree arg = gimple_phi_arg_def (stmt, i);
7202 if (!integer_valued_real_single_p (arg, depth + 1))
7203 return false;
7205 return true;
7208 /* Return true if the floating-point value computed by STMT is known
7209 to have an integer value. We also allow +Inf, -Inf and NaN to be
7210 considered integer values. Return false for signaling NaN.
7212 DEPTH is the current nesting depth of the query. */
7214 bool
7215 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7217 switch (gimple_code (stmt))
7219 case GIMPLE_ASSIGN:
7220 return gimple_assign_integer_valued_real_p (stmt, depth);
7221 case GIMPLE_CALL:
7222 return gimple_call_integer_valued_real_p (stmt, depth);
7223 case GIMPLE_PHI:
7224 return gimple_phi_integer_valued_real_p (stmt, depth);
7225 default:
7226 return false;