* g++.dg/debug/pr71432.C: Fail on AIX.
[official-gcc.git] / gcc / gimple-fold.c
blob36c105fc141cb8d48ba7d5679057331214ede91e
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2016 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stmt.h"
35 #include "expr.h"
36 #include "stor-layout.h"
37 #include "dumpfile.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "dbgcnt.h"
50 #include "builtins.h"
51 #include "tree-eh.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
55 #include "omp-low.h"
56 #include "ipa-chkp.h"
57 #include "tree-cfg.h"
60 /* Return true when DECL can be referenced from current unit.
61 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
62 We can get declarations that are not possible to reference for various
63 reasons:
65 1) When analyzing C++ virtual tables.
66 C++ virtual tables do have known constructors even
67 when they are keyed to other compilation unit.
68 Those tables can contain pointers to methods and vars
69 in other units. Those methods have both STATIC and EXTERNAL
70 set.
71 2) In WHOPR mode devirtualization might lead to reference
72 to method that was partitioned elsehwere.
73 In this case we have static VAR_DECL or FUNCTION_DECL
74 that has no corresponding callgraph/varpool node
75 declaring the body.
76 3) COMDAT functions referred by external vtables that
77 we devirtualize only during final compilation stage.
78 At this time we already decided that we will not output
79 the function body and thus we can't reference the symbol
80 directly. */
82 static bool
83 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
85 varpool_node *vnode;
86 struct cgraph_node *node;
87 symtab_node *snode;
89 if (DECL_ABSTRACT_P (decl))
90 return false;
92 /* We are concerned only about static/external vars and functions. */
93 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
94 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
95 return true;
97 /* Static objects can be referred only if they was not optimized out yet. */
98 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
100 /* Before we start optimizing unreachable code we can be sure all
101 static objects are defined. */
102 if (symtab->function_flags_ready)
103 return true;
104 snode = symtab_node::get (decl);
105 if (!snode || !snode->definition)
106 return false;
107 node = dyn_cast <cgraph_node *> (snode);
108 return !node || !node->global.inlined_to;
111 /* We will later output the initializer, so we can refer to it.
112 So we are concerned only when DECL comes from initializer of
113 external var or var that has been optimized out. */
114 if (!from_decl
115 || TREE_CODE (from_decl) != VAR_DECL
116 || (!DECL_EXTERNAL (from_decl)
117 && (vnode = varpool_node::get (from_decl)) != NULL
118 && vnode->definition)
119 || (flag_ltrans
120 && (vnode = varpool_node::get (from_decl)) != NULL
121 && vnode->in_other_partition))
122 return true;
123 /* We are folding reference from external vtable. The vtable may reffer
124 to a symbol keyed to other compilation unit. The other compilation
125 unit may be in separate DSO and the symbol may be hidden. */
126 if (DECL_VISIBILITY_SPECIFIED (decl)
127 && DECL_EXTERNAL (decl)
128 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
129 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
130 return false;
131 /* When function is public, we always can introduce new reference.
132 Exception are the COMDAT functions where introducing a direct
133 reference imply need to include function body in the curren tunit. */
134 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
135 return true;
136 /* We have COMDAT. We are going to check if we still have definition
137 or if the definition is going to be output in other partition.
138 Bypass this when gimplifying; all needed functions will be produced.
140 As observed in PR20991 for already optimized out comdat virtual functions
141 it may be tempting to not necessarily give up because the copy will be
142 output elsewhere when corresponding vtable is output.
143 This is however not possible - ABI specify that COMDATs are output in
144 units where they are used and when the other unit was compiled with LTO
145 it is possible that vtable was kept public while the function itself
146 was privatized. */
147 if (!symtab->function_flags_ready)
148 return true;
150 snode = symtab_node::get (decl);
151 if (!snode
152 || ((!snode->definition || DECL_EXTERNAL (decl))
153 && (!snode->in_other_partition
154 || (!snode->forced_by_abi && !snode->force_output))))
155 return false;
156 node = dyn_cast <cgraph_node *> (snode);
157 return !node || !node->global.inlined_to;
160 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
161 acceptable form for is_gimple_min_invariant.
162 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
164 tree
165 canonicalize_constructor_val (tree cval, tree from_decl)
167 tree orig_cval = cval;
168 STRIP_NOPS (cval);
169 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
170 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
172 tree ptr = TREE_OPERAND (cval, 0);
173 if (is_gimple_min_invariant (ptr))
174 cval = build1_loc (EXPR_LOCATION (cval),
175 ADDR_EXPR, TREE_TYPE (ptr),
176 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
177 ptr,
178 fold_convert (ptr_type_node,
179 TREE_OPERAND (cval, 1))));
181 if (TREE_CODE (cval) == ADDR_EXPR)
183 tree base = NULL_TREE;
184 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
186 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
187 if (base)
188 TREE_OPERAND (cval, 0) = base;
190 else
191 base = get_base_address (TREE_OPERAND (cval, 0));
192 if (!base)
193 return NULL_TREE;
195 if ((TREE_CODE (base) == VAR_DECL
196 || TREE_CODE (base) == FUNCTION_DECL)
197 && !can_refer_decl_in_current_unit_p (base, from_decl))
198 return NULL_TREE;
199 if (TREE_TYPE (base) == error_mark_node)
200 return NULL_TREE;
201 if (TREE_CODE (base) == VAR_DECL)
202 TREE_ADDRESSABLE (base) = 1;
203 else if (TREE_CODE (base) == FUNCTION_DECL)
205 /* Make sure we create a cgraph node for functions we'll reference.
206 They can be non-existent if the reference comes from an entry
207 of an external vtable for example. */
208 cgraph_node::get_create (base);
210 /* Fixup types in global initializers. */
211 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
212 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
214 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
215 cval = fold_convert (TREE_TYPE (orig_cval), cval);
216 return cval;
218 if (TREE_OVERFLOW_P (cval))
219 return drop_tree_overflow (cval);
220 return orig_cval;
223 /* If SYM is a constant variable with known value, return the value.
224 NULL_TREE is returned otherwise. */
226 tree
227 get_symbol_constant_value (tree sym)
229 tree val = ctor_for_folding (sym);
230 if (val != error_mark_node)
232 if (val)
234 val = canonicalize_constructor_val (unshare_expr (val), sym);
235 if (val && is_gimple_min_invariant (val))
236 return val;
237 else
238 return NULL_TREE;
240 /* Variables declared 'const' without an initializer
241 have zero as the initializer if they may not be
242 overridden at link or run time. */
243 if (!val
244 && is_gimple_reg_type (TREE_TYPE (sym)))
245 return build_zero_cst (TREE_TYPE (sym));
248 return NULL_TREE;
253 /* Subroutine of fold_stmt. We perform several simplifications of the
254 memory reference tree EXPR and make sure to re-gimplify them properly
255 after propagation of constant addresses. IS_LHS is true if the
256 reference is supposed to be an lvalue. */
258 static tree
259 maybe_fold_reference (tree expr, bool is_lhs)
261 tree result;
263 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
264 || TREE_CODE (expr) == REALPART_EXPR
265 || TREE_CODE (expr) == IMAGPART_EXPR)
266 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
267 return fold_unary_loc (EXPR_LOCATION (expr),
268 TREE_CODE (expr),
269 TREE_TYPE (expr),
270 TREE_OPERAND (expr, 0));
271 else if (TREE_CODE (expr) == BIT_FIELD_REF
272 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
273 return fold_ternary_loc (EXPR_LOCATION (expr),
274 TREE_CODE (expr),
275 TREE_TYPE (expr),
276 TREE_OPERAND (expr, 0),
277 TREE_OPERAND (expr, 1),
278 TREE_OPERAND (expr, 2));
280 if (!is_lhs
281 && (result = fold_const_aggregate_ref (expr))
282 && is_gimple_min_invariant (result))
283 return result;
285 return NULL_TREE;
289 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
290 replacement rhs for the statement or NULL_TREE if no simplification
291 could be made. It is assumed that the operands have been previously
292 folded. */
294 static tree
295 fold_gimple_assign (gimple_stmt_iterator *si)
297 gimple *stmt = gsi_stmt (*si);
298 enum tree_code subcode = gimple_assign_rhs_code (stmt);
299 location_t loc = gimple_location (stmt);
301 tree result = NULL_TREE;
303 switch (get_gimple_rhs_class (subcode))
305 case GIMPLE_SINGLE_RHS:
307 tree rhs = gimple_assign_rhs1 (stmt);
309 if (TREE_CLOBBER_P (rhs))
310 return NULL_TREE;
312 if (REFERENCE_CLASS_P (rhs))
313 return maybe_fold_reference (rhs, false);
315 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
317 tree val = OBJ_TYPE_REF_EXPR (rhs);
318 if (is_gimple_min_invariant (val))
319 return val;
320 else if (flag_devirtualize && virtual_method_call_p (rhs))
322 bool final;
323 vec <cgraph_node *>targets
324 = possible_polymorphic_call_targets (rhs, stmt, &final);
325 if (final && targets.length () <= 1 && dbg_cnt (devirt))
327 if (dump_enabled_p ())
329 location_t loc = gimple_location_safe (stmt);
330 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
331 "resolving virtual function address "
332 "reference to function %s\n",
333 targets.length () == 1
334 ? targets[0]->name ()
335 : "NULL");
337 if (targets.length () == 1)
339 val = fold_convert (TREE_TYPE (val),
340 build_fold_addr_expr_loc
341 (loc, targets[0]->decl));
342 STRIP_USELESS_TYPE_CONVERSION (val);
344 else
345 /* We can not use __builtin_unreachable here because it
346 can not have address taken. */
347 val = build_int_cst (TREE_TYPE (val), 0);
348 return val;
353 else if (TREE_CODE (rhs) == ADDR_EXPR)
355 tree ref = TREE_OPERAND (rhs, 0);
356 tree tem = maybe_fold_reference (ref, true);
357 if (tem
358 && TREE_CODE (tem) == MEM_REF
359 && integer_zerop (TREE_OPERAND (tem, 1)))
360 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
361 else if (tem)
362 result = fold_convert (TREE_TYPE (rhs),
363 build_fold_addr_expr_loc (loc, tem));
364 else if (TREE_CODE (ref) == MEM_REF
365 && integer_zerop (TREE_OPERAND (ref, 1)))
366 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
368 if (result)
370 /* Strip away useless type conversions. Both the
371 NON_LVALUE_EXPR that may have been added by fold, and
372 "useless" type conversions that might now be apparent
373 due to propagation. */
374 STRIP_USELESS_TYPE_CONVERSION (result);
376 if (result != rhs && valid_gimple_rhs_p (result))
377 return result;
381 else if (TREE_CODE (rhs) == CONSTRUCTOR
382 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
384 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
385 unsigned i;
386 tree val;
388 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
389 if (! CONSTANT_CLASS_P (val))
390 return NULL_TREE;
392 return build_vector_from_ctor (TREE_TYPE (rhs),
393 CONSTRUCTOR_ELTS (rhs));
396 else if (DECL_P (rhs))
397 return get_symbol_constant_value (rhs);
399 break;
401 case GIMPLE_UNARY_RHS:
402 break;
404 case GIMPLE_BINARY_RHS:
405 break;
407 case GIMPLE_TERNARY_RHS:
408 result = fold_ternary_loc (loc, subcode,
409 TREE_TYPE (gimple_assign_lhs (stmt)),
410 gimple_assign_rhs1 (stmt),
411 gimple_assign_rhs2 (stmt),
412 gimple_assign_rhs3 (stmt));
414 if (result)
416 STRIP_USELESS_TYPE_CONVERSION (result);
417 if (valid_gimple_rhs_p (result))
418 return result;
420 break;
422 case GIMPLE_INVALID_RHS:
423 gcc_unreachable ();
426 return NULL_TREE;
430 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
431 adjusting the replacement stmts location and virtual operands.
432 If the statement has a lhs the last stmt in the sequence is expected
433 to assign to that lhs. */
435 static void
436 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
438 gimple *stmt = gsi_stmt (*si_p);
440 if (gimple_has_location (stmt))
441 annotate_all_with_location (stmts, gimple_location (stmt));
443 /* First iterate over the replacement statements backward, assigning
444 virtual operands to their defining statements. */
445 gimple *laststore = NULL;
446 for (gimple_stmt_iterator i = gsi_last (stmts);
447 !gsi_end_p (i); gsi_prev (&i))
449 gimple *new_stmt = gsi_stmt (i);
450 if ((gimple_assign_single_p (new_stmt)
451 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
452 || (is_gimple_call (new_stmt)
453 && (gimple_call_flags (new_stmt)
454 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
456 tree vdef;
457 if (!laststore)
458 vdef = gimple_vdef (stmt);
459 else
460 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
461 gimple_set_vdef (new_stmt, vdef);
462 if (vdef && TREE_CODE (vdef) == SSA_NAME)
463 SSA_NAME_DEF_STMT (vdef) = new_stmt;
464 laststore = new_stmt;
468 /* Second iterate over the statements forward, assigning virtual
469 operands to their uses. */
470 tree reaching_vuse = gimple_vuse (stmt);
471 for (gimple_stmt_iterator i = gsi_start (stmts);
472 !gsi_end_p (i); gsi_next (&i))
474 gimple *new_stmt = gsi_stmt (i);
475 /* If the new statement possibly has a VUSE, update it with exact SSA
476 name we know will reach this one. */
477 if (gimple_has_mem_ops (new_stmt))
478 gimple_set_vuse (new_stmt, reaching_vuse);
479 gimple_set_modified (new_stmt, true);
480 if (gimple_vdef (new_stmt))
481 reaching_vuse = gimple_vdef (new_stmt);
484 /* If the new sequence does not do a store release the virtual
485 definition of the original statement. */
486 if (reaching_vuse
487 && reaching_vuse == gimple_vuse (stmt))
489 tree vdef = gimple_vdef (stmt);
490 if (vdef
491 && TREE_CODE (vdef) == SSA_NAME)
493 unlink_stmt_vdef (stmt);
494 release_ssa_name (vdef);
498 /* Finally replace the original statement with the sequence. */
499 gsi_replace_with_seq (si_p, stmts, false);
502 /* Convert EXPR into a GIMPLE value suitable for substitution on the
503 RHS of an assignment. Insert the necessary statements before
504 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
505 is replaced. If the call is expected to produces a result, then it
506 is replaced by an assignment of the new RHS to the result variable.
507 If the result is to be ignored, then the call is replaced by a
508 GIMPLE_NOP. A proper VDEF chain is retained by making the first
509 VUSE and the last VDEF of the whole sequence be the same as the replaced
510 statement and using new SSA names for stores in between. */
512 void
513 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
515 tree lhs;
516 gimple *stmt, *new_stmt;
517 gimple_stmt_iterator i;
518 gimple_seq stmts = NULL;
520 stmt = gsi_stmt (*si_p);
522 gcc_assert (is_gimple_call (stmt));
524 push_gimplify_context (gimple_in_ssa_p (cfun));
526 lhs = gimple_call_lhs (stmt);
527 if (lhs == NULL_TREE)
529 gimplify_and_add (expr, &stmts);
530 /* We can end up with folding a memcpy of an empty class assignment
531 which gets optimized away by C++ gimplification. */
532 if (gimple_seq_empty_p (stmts))
534 pop_gimplify_context (NULL);
535 if (gimple_in_ssa_p (cfun))
537 unlink_stmt_vdef (stmt);
538 release_defs (stmt);
540 gsi_replace (si_p, gimple_build_nop (), false);
541 return;
544 else
546 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
547 new_stmt = gimple_build_assign (lhs, tmp);
548 i = gsi_last (stmts);
549 gsi_insert_after_without_update (&i, new_stmt,
550 GSI_CONTINUE_LINKING);
553 pop_gimplify_context (NULL);
555 gsi_replace_with_seq_vops (si_p, stmts);
559 /* Replace the call at *GSI with the gimple value VAL. */
561 static void
562 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
564 gimple *stmt = gsi_stmt (*gsi);
565 tree lhs = gimple_call_lhs (stmt);
566 gimple *repl;
567 if (lhs)
569 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
570 val = fold_convert (TREE_TYPE (lhs), val);
571 repl = gimple_build_assign (lhs, val);
573 else
574 repl = gimple_build_nop ();
575 tree vdef = gimple_vdef (stmt);
576 if (vdef && TREE_CODE (vdef) == SSA_NAME)
578 unlink_stmt_vdef (stmt);
579 release_ssa_name (vdef);
581 gsi_replace (gsi, repl, false);
584 /* Replace the call at *GSI with the new call REPL and fold that
585 again. */
587 static void
588 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
590 gimple *stmt = gsi_stmt (*gsi);
591 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
592 gimple_set_location (repl, gimple_location (stmt));
593 if (gimple_vdef (stmt)
594 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
596 gimple_set_vdef (repl, gimple_vdef (stmt));
597 gimple_set_vuse (repl, gimple_vuse (stmt));
598 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
600 gsi_replace (gsi, repl, false);
601 fold_stmt (gsi);
604 /* Return true if VAR is a VAR_DECL or a component thereof. */
606 static bool
607 var_decl_component_p (tree var)
609 tree inner = var;
610 while (handled_component_p (inner))
611 inner = TREE_OPERAND (inner, 0);
612 return SSA_VAR_P (inner);
615 /* Fold function call to builtin mem{{,p}cpy,move}. Return
616 false if no simplification can be made.
617 If ENDP is 0, return DEST (like memcpy).
618 If ENDP is 1, return DEST+LEN (like mempcpy).
619 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
620 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
621 (memmove). */
623 static bool
624 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
625 tree dest, tree src, int endp)
627 gimple *stmt = gsi_stmt (*gsi);
628 tree lhs = gimple_call_lhs (stmt);
629 tree len = gimple_call_arg (stmt, 2);
630 tree destvar, srcvar;
631 location_t loc = gimple_location (stmt);
633 /* If the LEN parameter is zero, return DEST. */
634 if (integer_zerop (len))
636 gimple *repl;
637 if (gimple_call_lhs (stmt))
638 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
639 else
640 repl = gimple_build_nop ();
641 tree vdef = gimple_vdef (stmt);
642 if (vdef && TREE_CODE (vdef) == SSA_NAME)
644 unlink_stmt_vdef (stmt);
645 release_ssa_name (vdef);
647 gsi_replace (gsi, repl, false);
648 return true;
651 /* If SRC and DEST are the same (and not volatile), return
652 DEST{,+LEN,+LEN-1}. */
653 if (operand_equal_p (src, dest, 0))
655 unlink_stmt_vdef (stmt);
656 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
657 release_ssa_name (gimple_vdef (stmt));
658 if (!lhs)
660 gsi_replace (gsi, gimple_build_nop (), false);
661 return true;
663 goto done;
665 else
667 tree srctype, desttype;
668 unsigned int src_align, dest_align;
669 tree off0;
671 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
672 pointers as wide integer) and also may result in huge function
673 size because of inlined bounds copy. Thus don't inline for
674 functions we want to instrument. */
675 if (flag_check_pointer_bounds
676 && chkp_instrumentable_p (cfun->decl)
677 /* Even if data may contain pointers we can inline if copy
678 less than a pointer size. */
679 && (!tree_fits_uhwi_p (len)
680 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
681 return false;
683 /* Build accesses at offset zero with a ref-all character type. */
684 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
685 ptr_mode, true), 0);
687 /* If we can perform the copy efficiently with first doing all loads
688 and then all stores inline it that way. Currently efficiently
689 means that we can load all the memory into a single integer
690 register which is what MOVE_MAX gives us. */
691 src_align = get_pointer_alignment (src);
692 dest_align = get_pointer_alignment (dest);
693 if (tree_fits_uhwi_p (len)
694 && compare_tree_int (len, MOVE_MAX) <= 0
695 /* ??? Don't transform copies from strings with known length this
696 confuses the tree-ssa-strlen.c. This doesn't handle
697 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
698 reason. */
699 && !c_strlen (src, 2))
701 unsigned ilen = tree_to_uhwi (len);
702 if (exact_log2 (ilen) != -1)
704 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
705 if (type
706 && TYPE_MODE (type) != BLKmode
707 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
708 == ilen * 8)
709 /* If the destination pointer is not aligned we must be able
710 to emit an unaligned store. */
711 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
712 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
713 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
714 != CODE_FOR_nothing)))
716 tree srctype = type;
717 tree desttype = type;
718 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
719 srctype = build_aligned_type (type, src_align);
720 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
721 tree tem = fold_const_aggregate_ref (srcmem);
722 if (tem)
723 srcmem = tem;
724 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
725 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
726 src_align)
727 && (optab_handler (movmisalign_optab,
728 TYPE_MODE (type))
729 == CODE_FOR_nothing))
730 srcmem = NULL_TREE;
731 if (srcmem)
733 gimple *new_stmt;
734 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
736 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
737 if (gimple_in_ssa_p (cfun))
738 srcmem = make_ssa_name (TREE_TYPE (srcmem),
739 new_stmt);
740 else
741 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
742 gimple_assign_set_lhs (new_stmt, srcmem);
743 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
744 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
746 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
747 desttype = build_aligned_type (type, dest_align);
748 new_stmt
749 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
750 dest, off0),
751 srcmem);
752 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
753 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
754 if (gimple_vdef (new_stmt)
755 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
756 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
757 if (!lhs)
759 gsi_replace (gsi, new_stmt, false);
760 return true;
762 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
763 goto done;
769 if (endp == 3)
771 /* Both DEST and SRC must be pointer types.
772 ??? This is what old code did. Is the testing for pointer types
773 really mandatory?
775 If either SRC is readonly or length is 1, we can use memcpy. */
776 if (!dest_align || !src_align)
777 return false;
778 if (readonly_data_expr (src)
779 || (tree_fits_uhwi_p (len)
780 && (MIN (src_align, dest_align) / BITS_PER_UNIT
781 >= tree_to_uhwi (len))))
783 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
784 if (!fn)
785 return false;
786 gimple_call_set_fndecl (stmt, fn);
787 gimple_call_set_arg (stmt, 0, dest);
788 gimple_call_set_arg (stmt, 1, src);
789 fold_stmt (gsi);
790 return true;
793 /* If *src and *dest can't overlap, optimize into memcpy as well. */
794 if (TREE_CODE (src) == ADDR_EXPR
795 && TREE_CODE (dest) == ADDR_EXPR)
797 tree src_base, dest_base, fn;
798 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
799 HOST_WIDE_INT size = -1;
800 HOST_WIDE_INT maxsize = -1;
801 bool reverse;
803 srcvar = TREE_OPERAND (src, 0);
804 src_base = get_ref_base_and_extent (srcvar, &src_offset,
805 &size, &maxsize, &reverse);
806 destvar = TREE_OPERAND (dest, 0);
807 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
808 &size, &maxsize, &reverse);
809 if (tree_fits_uhwi_p (len))
810 maxsize = tree_to_uhwi (len);
811 else
812 maxsize = -1;
813 src_offset /= BITS_PER_UNIT;
814 dest_offset /= BITS_PER_UNIT;
815 if (SSA_VAR_P (src_base)
816 && SSA_VAR_P (dest_base))
818 if (operand_equal_p (src_base, dest_base, 0)
819 && ranges_overlap_p (src_offset, maxsize,
820 dest_offset, maxsize))
821 return false;
823 else if (TREE_CODE (src_base) == MEM_REF
824 && TREE_CODE (dest_base) == MEM_REF)
826 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
827 TREE_OPERAND (dest_base, 0), 0))
828 return false;
829 offset_int off = mem_ref_offset (src_base) + src_offset;
830 if (!wi::fits_shwi_p (off))
831 return false;
832 src_offset = off.to_shwi ();
834 off = mem_ref_offset (dest_base) + dest_offset;
835 if (!wi::fits_shwi_p (off))
836 return false;
837 dest_offset = off.to_shwi ();
838 if (ranges_overlap_p (src_offset, maxsize,
839 dest_offset, maxsize))
840 return false;
842 else
843 return false;
845 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
846 if (!fn)
847 return false;
848 gimple_call_set_fndecl (stmt, fn);
849 gimple_call_set_arg (stmt, 0, dest);
850 gimple_call_set_arg (stmt, 1, src);
851 fold_stmt (gsi);
852 return true;
855 /* If the destination and source do not alias optimize into
856 memcpy as well. */
857 if ((is_gimple_min_invariant (dest)
858 || TREE_CODE (dest) == SSA_NAME)
859 && (is_gimple_min_invariant (src)
860 || TREE_CODE (src) == SSA_NAME))
862 ao_ref destr, srcr;
863 ao_ref_init_from_ptr_and_size (&destr, dest, len);
864 ao_ref_init_from_ptr_and_size (&srcr, src, len);
865 if (!refs_may_alias_p_1 (&destr, &srcr, false))
867 tree fn;
868 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
869 if (!fn)
870 return false;
871 gimple_call_set_fndecl (stmt, fn);
872 gimple_call_set_arg (stmt, 0, dest);
873 gimple_call_set_arg (stmt, 1, src);
874 fold_stmt (gsi);
875 return true;
879 return false;
882 if (!tree_fits_shwi_p (len))
883 return false;
884 /* FIXME:
885 This logic lose for arguments like (type *)malloc (sizeof (type)),
886 since we strip the casts of up to VOID return value from malloc.
887 Perhaps we ought to inherit type from non-VOID argument here? */
888 STRIP_NOPS (src);
889 STRIP_NOPS (dest);
890 if (!POINTER_TYPE_P (TREE_TYPE (src))
891 || !POINTER_TYPE_P (TREE_TYPE (dest)))
892 return false;
893 /* In the following try to find a type that is most natural to be
894 used for the memcpy source and destination and that allows
895 the most optimization when memcpy is turned into a plain assignment
896 using that type. In theory we could always use a char[len] type
897 but that only gains us that the destination and source possibly
898 no longer will have their address taken. */
899 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
900 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
902 tree tem = TREE_OPERAND (src, 0);
903 STRIP_NOPS (tem);
904 if (tem != TREE_OPERAND (src, 0))
905 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
907 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
909 tree tem = TREE_OPERAND (dest, 0);
910 STRIP_NOPS (tem);
911 if (tem != TREE_OPERAND (dest, 0))
912 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
914 srctype = TREE_TYPE (TREE_TYPE (src));
915 if (TREE_CODE (srctype) == ARRAY_TYPE
916 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
918 srctype = TREE_TYPE (srctype);
919 STRIP_NOPS (src);
920 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
922 desttype = TREE_TYPE (TREE_TYPE (dest));
923 if (TREE_CODE (desttype) == ARRAY_TYPE
924 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
926 desttype = TREE_TYPE (desttype);
927 STRIP_NOPS (dest);
928 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
930 if (TREE_ADDRESSABLE (srctype)
931 || TREE_ADDRESSABLE (desttype))
932 return false;
934 /* Make sure we are not copying using a floating-point mode or
935 a type whose size possibly does not match its precision. */
936 if (FLOAT_MODE_P (TYPE_MODE (desttype))
937 || TREE_CODE (desttype) == BOOLEAN_TYPE
938 || TREE_CODE (desttype) == ENUMERAL_TYPE)
939 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
940 if (FLOAT_MODE_P (TYPE_MODE (srctype))
941 || TREE_CODE (srctype) == BOOLEAN_TYPE
942 || TREE_CODE (srctype) == ENUMERAL_TYPE)
943 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
944 if (!srctype)
945 srctype = desttype;
946 if (!desttype)
947 desttype = srctype;
948 if (!srctype)
949 return false;
951 src_align = get_pointer_alignment (src);
952 dest_align = get_pointer_alignment (dest);
953 if (dest_align < TYPE_ALIGN (desttype)
954 || src_align < TYPE_ALIGN (srctype))
955 return false;
957 destvar = dest;
958 STRIP_NOPS (destvar);
959 if (TREE_CODE (destvar) == ADDR_EXPR
960 && var_decl_component_p (TREE_OPERAND (destvar, 0))
961 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
962 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
963 else
964 destvar = NULL_TREE;
966 srcvar = src;
967 STRIP_NOPS (srcvar);
968 if (TREE_CODE (srcvar) == ADDR_EXPR
969 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
970 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
972 if (!destvar
973 || src_align >= TYPE_ALIGN (desttype))
974 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
975 srcvar, off0);
976 else if (!STRICT_ALIGNMENT)
978 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
979 src_align);
980 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
982 else
983 srcvar = NULL_TREE;
985 else
986 srcvar = NULL_TREE;
988 if (srcvar == NULL_TREE && destvar == NULL_TREE)
989 return false;
991 if (srcvar == NULL_TREE)
993 STRIP_NOPS (src);
994 if (src_align >= TYPE_ALIGN (desttype))
995 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
996 else
998 if (STRICT_ALIGNMENT)
999 return false;
1000 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1001 src_align);
1002 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1005 else if (destvar == NULL_TREE)
1007 STRIP_NOPS (dest);
1008 if (dest_align >= TYPE_ALIGN (srctype))
1009 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1010 else
1012 if (STRICT_ALIGNMENT)
1013 return false;
1014 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1015 dest_align);
1016 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1020 gimple *new_stmt;
1021 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1023 tree tem = fold_const_aggregate_ref (srcvar);
1024 if (tem)
1025 srcvar = tem;
1026 if (! is_gimple_min_invariant (srcvar))
1028 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1029 if (gimple_in_ssa_p (cfun))
1030 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1031 else
1032 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1033 gimple_assign_set_lhs (new_stmt, srcvar);
1034 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1035 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1038 new_stmt = gimple_build_assign (destvar, srcvar);
1039 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1040 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1041 if (gimple_vdef (new_stmt)
1042 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1043 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1044 if (!lhs)
1046 gsi_replace (gsi, new_stmt, false);
1047 return true;
1049 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1052 done:
1053 gimple_seq stmts = NULL;
1054 if (endp == 0 || endp == 3)
1055 len = NULL_TREE;
1056 else if (endp == 2)
1057 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1058 ssize_int (1));
1059 if (endp == 2 || endp == 1)
1061 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1062 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1063 TREE_TYPE (dest), dest, len);
1066 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1067 gimple *repl = gimple_build_assign (lhs, dest);
1068 gsi_replace (gsi, repl, false);
1069 return true;
1072 /* Fold function call to builtin memset or bzero at *GSI setting the
1073 memory of size LEN to VAL. Return whether a simplification was made. */
1075 static bool
1076 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1078 gimple *stmt = gsi_stmt (*gsi);
1079 tree etype;
1080 unsigned HOST_WIDE_INT length, cval;
1082 /* If the LEN parameter is zero, return DEST. */
1083 if (integer_zerop (len))
1085 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1086 return true;
1089 if (! tree_fits_uhwi_p (len))
1090 return false;
1092 if (TREE_CODE (c) != INTEGER_CST)
1093 return false;
1095 tree dest = gimple_call_arg (stmt, 0);
1096 tree var = dest;
1097 if (TREE_CODE (var) != ADDR_EXPR)
1098 return false;
1100 var = TREE_OPERAND (var, 0);
1101 if (TREE_THIS_VOLATILE (var))
1102 return false;
1104 etype = TREE_TYPE (var);
1105 if (TREE_CODE (etype) == ARRAY_TYPE)
1106 etype = TREE_TYPE (etype);
1108 if (!INTEGRAL_TYPE_P (etype)
1109 && !POINTER_TYPE_P (etype))
1110 return NULL_TREE;
1112 if (! var_decl_component_p (var))
1113 return NULL_TREE;
1115 length = tree_to_uhwi (len);
1116 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1117 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1118 return NULL_TREE;
1120 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1121 return NULL_TREE;
1123 if (integer_zerop (c))
1124 cval = 0;
1125 else
1127 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1128 return NULL_TREE;
1130 cval = TREE_INT_CST_LOW (c);
1131 cval &= 0xff;
1132 cval |= cval << 8;
1133 cval |= cval << 16;
1134 cval |= (cval << 31) << 1;
1137 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1138 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1139 gimple_set_vuse (store, gimple_vuse (stmt));
1140 tree vdef = gimple_vdef (stmt);
1141 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1143 gimple_set_vdef (store, gimple_vdef (stmt));
1144 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1146 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1147 if (gimple_call_lhs (stmt))
1149 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1150 gsi_replace (gsi, asgn, false);
1152 else
1154 gimple_stmt_iterator gsi2 = *gsi;
1155 gsi_prev (gsi);
1156 gsi_remove (&gsi2, true);
1159 return true;
1163 /* Return the string length, maximum string length or maximum value of
1164 ARG in LENGTH.
1165 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1166 is not NULL and, for TYPE == 0, its value is not equal to the length
1167 we determine or if we are unable to determine the length or value,
1168 return false. VISITED is a bitmap of visited variables.
1169 TYPE is 0 if string length should be returned, 1 for maximum string
1170 length and 2 for maximum value ARG can have. */
1172 static bool
1173 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1175 tree var, val;
1176 gimple *def_stmt;
1178 if (TREE_CODE (arg) != SSA_NAME)
1180 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1181 if (TREE_CODE (arg) == ADDR_EXPR
1182 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1183 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1185 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1186 if (TREE_CODE (aop0) == INDIRECT_REF
1187 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1188 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1189 length, visited, type);
1192 if (type == 2)
1194 val = arg;
1195 if (TREE_CODE (val) != INTEGER_CST
1196 || tree_int_cst_sgn (val) < 0)
1197 return false;
1199 else
1200 val = c_strlen (arg, 1);
1201 if (!val)
1202 return false;
1204 if (*length)
1206 if (type > 0)
1208 if (TREE_CODE (*length) != INTEGER_CST
1209 || TREE_CODE (val) != INTEGER_CST)
1210 return false;
1212 if (tree_int_cst_lt (*length, val))
1213 *length = val;
1214 return true;
1216 else if (simple_cst_equal (val, *length) != 1)
1217 return false;
1220 *length = val;
1221 return true;
1224 /* If ARG is registered for SSA update we cannot look at its defining
1225 statement. */
1226 if (name_registered_for_update_p (arg))
1227 return false;
1229 /* If we were already here, break the infinite cycle. */
1230 if (!*visited)
1231 *visited = BITMAP_ALLOC (NULL);
1232 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1233 return true;
1235 var = arg;
1236 def_stmt = SSA_NAME_DEF_STMT (var);
1238 switch (gimple_code (def_stmt))
1240 case GIMPLE_ASSIGN:
1241 /* The RHS of the statement defining VAR must either have a
1242 constant length or come from another SSA_NAME with a constant
1243 length. */
1244 if (gimple_assign_single_p (def_stmt)
1245 || gimple_assign_unary_nop_p (def_stmt))
1247 tree rhs = gimple_assign_rhs1 (def_stmt);
1248 return get_maxval_strlen (rhs, length, visited, type);
1250 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1252 tree op2 = gimple_assign_rhs2 (def_stmt);
1253 tree op3 = gimple_assign_rhs3 (def_stmt);
1254 return get_maxval_strlen (op2, length, visited, type)
1255 && get_maxval_strlen (op3, length, visited, type);
1257 return false;
1259 case GIMPLE_PHI:
1261 /* All the arguments of the PHI node must have the same constant
1262 length. */
1263 unsigned i;
1265 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1267 tree arg = gimple_phi_arg (def_stmt, i)->def;
1269 /* If this PHI has itself as an argument, we cannot
1270 determine the string length of this argument. However,
1271 if we can find a constant string length for the other
1272 PHI args then we can still be sure that this is a
1273 constant string length. So be optimistic and just
1274 continue with the next argument. */
1275 if (arg == gimple_phi_result (def_stmt))
1276 continue;
1278 if (!get_maxval_strlen (arg, length, visited, type))
1279 return false;
1282 return true;
1284 default:
1285 return false;
1289 tree
1290 get_maxval_strlen (tree arg, int type)
1292 bitmap visited = NULL;
1293 tree len = NULL_TREE;
1294 if (!get_maxval_strlen (arg, &len, &visited, type))
1295 len = NULL_TREE;
1296 if (visited)
1297 BITMAP_FREE (visited);
1299 return len;
1303 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1304 If LEN is not NULL, it represents the length of the string to be
1305 copied. Return NULL_TREE if no simplification can be made. */
1307 static bool
1308 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1309 tree dest, tree src)
1311 location_t loc = gimple_location (gsi_stmt (*gsi));
1312 tree fn;
1314 /* If SRC and DEST are the same (and not volatile), return DEST. */
1315 if (operand_equal_p (src, dest, 0))
1317 replace_call_with_value (gsi, dest);
1318 return true;
1321 if (optimize_function_for_size_p (cfun))
1322 return false;
1324 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1325 if (!fn)
1326 return false;
1328 tree len = get_maxval_strlen (src, 0);
1329 if (!len)
1330 return false;
1332 len = fold_convert_loc (loc, size_type_node, len);
1333 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1334 len = force_gimple_operand_gsi (gsi, len, true,
1335 NULL_TREE, true, GSI_SAME_STMT);
1336 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1337 replace_call_with_call_and_fold (gsi, repl);
1338 return true;
1341 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1342 If SLEN is not NULL, it represents the length of the source string.
1343 Return NULL_TREE if no simplification can be made. */
1345 static bool
1346 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1347 tree dest, tree src, tree len)
1349 location_t loc = gimple_location (gsi_stmt (*gsi));
1350 tree fn;
1352 /* If the LEN parameter is zero, return DEST. */
1353 if (integer_zerop (len))
1355 replace_call_with_value (gsi, dest);
1356 return true;
1359 /* We can't compare slen with len as constants below if len is not a
1360 constant. */
1361 if (TREE_CODE (len) != INTEGER_CST)
1362 return false;
1364 /* Now, we must be passed a constant src ptr parameter. */
1365 tree slen = get_maxval_strlen (src, 0);
1366 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1367 return false;
1369 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1371 /* We do not support simplification of this case, though we do
1372 support it when expanding trees into RTL. */
1373 /* FIXME: generate a call to __builtin_memset. */
1374 if (tree_int_cst_lt (slen, len))
1375 return false;
1377 /* OK transform into builtin memcpy. */
1378 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1379 if (!fn)
1380 return false;
1382 len = fold_convert_loc (loc, size_type_node, len);
1383 len = force_gimple_operand_gsi (gsi, len, true,
1384 NULL_TREE, true, GSI_SAME_STMT);
1385 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1386 replace_call_with_call_and_fold (gsi, repl);
1387 return true;
1390 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1391 to the call.
1393 Return NULL_TREE if no simplification was possible, otherwise return the
1394 simplified form of the call as a tree.
1396 The simplified form may be a constant or other expression which
1397 computes the same value, but in a more efficient manner (including
1398 calls to other builtin functions).
1400 The call may contain arguments which need to be evaluated, but
1401 which are not useful to determine the result of the call. In
1402 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1403 COMPOUND_EXPR will be an argument which must be evaluated.
1404 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1405 COMPOUND_EXPR in the chain will contain the tree for the simplified
1406 form of the builtin function call. */
1408 static bool
1409 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1411 gimple *stmt = gsi_stmt (*gsi);
1412 location_t loc = gimple_location (stmt);
1414 const char *p = c_getstr (src);
1416 /* If the string length is zero, return the dst parameter. */
1417 if (p && *p == '\0')
1419 replace_call_with_value (gsi, dst);
1420 return true;
1423 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1424 return false;
1426 /* See if we can store by pieces into (dst + strlen(dst)). */
1427 tree newdst;
1428 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1429 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1431 if (!strlen_fn || !memcpy_fn)
1432 return false;
1434 /* If the length of the source string isn't computable don't
1435 split strcat into strlen and memcpy. */
1436 tree len = get_maxval_strlen (src, 0);
1437 if (! len)
1438 return false;
1440 /* Create strlen (dst). */
1441 gimple_seq stmts = NULL, stmts2;
1442 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1443 gimple_set_location (repl, loc);
1444 if (gimple_in_ssa_p (cfun))
1445 newdst = make_ssa_name (size_type_node);
1446 else
1447 newdst = create_tmp_reg (size_type_node);
1448 gimple_call_set_lhs (repl, newdst);
1449 gimple_seq_add_stmt_without_update (&stmts, repl);
1451 /* Create (dst p+ strlen (dst)). */
1452 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1453 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1454 gimple_seq_add_seq_without_update (&stmts, stmts2);
1456 len = fold_convert_loc (loc, size_type_node, len);
1457 len = size_binop_loc (loc, PLUS_EXPR, len,
1458 build_int_cst (size_type_node, 1));
1459 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1460 gimple_seq_add_seq_without_update (&stmts, stmts2);
1462 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1463 gimple_seq_add_stmt_without_update (&stmts, repl);
1464 if (gimple_call_lhs (stmt))
1466 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1467 gimple_seq_add_stmt_without_update (&stmts, repl);
1468 gsi_replace_with_seq_vops (gsi, stmts);
1469 /* gsi now points at the assignment to the lhs, get a
1470 stmt iterator to the memcpy call.
1471 ??? We can't use gsi_for_stmt as that doesn't work when the
1472 CFG isn't built yet. */
1473 gimple_stmt_iterator gsi2 = *gsi;
1474 gsi_prev (&gsi2);
1475 fold_stmt (&gsi2);
1477 else
1479 gsi_replace_with_seq_vops (gsi, stmts);
1480 fold_stmt (gsi);
1482 return true;
1485 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1486 are the arguments to the call. */
1488 static bool
1489 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1491 gimple *stmt = gsi_stmt (*gsi);
1492 tree dest = gimple_call_arg (stmt, 0);
1493 tree src = gimple_call_arg (stmt, 1);
1494 tree size = gimple_call_arg (stmt, 2);
1495 tree fn;
1496 const char *p;
1499 p = c_getstr (src);
1500 /* If the SRC parameter is "", return DEST. */
1501 if (p && *p == '\0')
1503 replace_call_with_value (gsi, dest);
1504 return true;
1507 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1508 return false;
1510 /* If __builtin_strcat_chk is used, assume strcat is available. */
1511 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1512 if (!fn)
1513 return false;
1515 gimple *repl = gimple_build_call (fn, 2, dest, src);
1516 replace_call_with_call_and_fold (gsi, repl);
1517 return true;
1520 /* Simplify a call to the strncat builtin. */
1522 static bool
1523 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1525 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1526 tree dst = gimple_call_arg (stmt, 0);
1527 tree src = gimple_call_arg (stmt, 1);
1528 tree len = gimple_call_arg (stmt, 2);
1530 const char *p = c_getstr (src);
1532 /* If the requested length is zero, or the src parameter string
1533 length is zero, return the dst parameter. */
1534 if (integer_zerop (len) || (p && *p == '\0'))
1536 replace_call_with_value (gsi, dst);
1537 return true;
1540 /* If the requested len is greater than or equal to the string
1541 length, call strcat. */
1542 if (TREE_CODE (len) == INTEGER_CST && p
1543 && compare_tree_int (len, strlen (p)) >= 0)
1545 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1547 /* If the replacement _DECL isn't initialized, don't do the
1548 transformation. */
1549 if (!fn)
1550 return false;
1552 gcall *repl = gimple_build_call (fn, 2, dst, src);
1553 replace_call_with_call_and_fold (gsi, repl);
1554 return true;
1557 return false;
1560 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1561 LEN, and SIZE. */
1563 static bool
1564 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1566 gimple *stmt = gsi_stmt (*gsi);
1567 tree dest = gimple_call_arg (stmt, 0);
1568 tree src = gimple_call_arg (stmt, 1);
1569 tree len = gimple_call_arg (stmt, 2);
1570 tree size = gimple_call_arg (stmt, 3);
1571 tree fn;
1572 const char *p;
1574 p = c_getstr (src);
1575 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1576 if ((p && *p == '\0')
1577 || integer_zerop (len))
1579 replace_call_with_value (gsi, dest);
1580 return true;
1583 if (! tree_fits_uhwi_p (size))
1584 return false;
1586 if (! integer_all_onesp (size))
1588 tree src_len = c_strlen (src, 1);
1589 if (src_len
1590 && tree_fits_uhwi_p (src_len)
1591 && tree_fits_uhwi_p (len)
1592 && ! tree_int_cst_lt (len, src_len))
1594 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1595 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1596 if (!fn)
1597 return false;
1599 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1600 replace_call_with_call_and_fold (gsi, repl);
1601 return true;
1603 return false;
1606 /* If __builtin_strncat_chk is used, assume strncat is available. */
1607 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1608 if (!fn)
1609 return false;
1611 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1612 replace_call_with_call_and_fold (gsi, repl);
1613 return true;
1616 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1617 to the call. IGNORE is true if the value returned
1618 by the builtin will be ignored. UNLOCKED is true is true if this
1619 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1620 the known length of the string. Return NULL_TREE if no simplification
1621 was possible. */
1623 static bool
1624 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1625 tree arg0, tree arg1,
1626 bool unlocked)
1628 gimple *stmt = gsi_stmt (*gsi);
1630 /* If we're using an unlocked function, assume the other unlocked
1631 functions exist explicitly. */
1632 tree const fn_fputc = (unlocked
1633 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1634 : builtin_decl_implicit (BUILT_IN_FPUTC));
1635 tree const fn_fwrite = (unlocked
1636 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1637 : builtin_decl_implicit (BUILT_IN_FWRITE));
1639 /* If the return value is used, don't do the transformation. */
1640 if (gimple_call_lhs (stmt))
1641 return false;
1643 /* Get the length of the string passed to fputs. If the length
1644 can't be determined, punt. */
1645 tree len = get_maxval_strlen (arg0, 0);
1646 if (!len
1647 || TREE_CODE (len) != INTEGER_CST)
1648 return false;
1650 switch (compare_tree_int (len, 1))
1652 case -1: /* length is 0, delete the call entirely . */
1653 replace_call_with_value (gsi, integer_zero_node);
1654 return true;
1656 case 0: /* length is 1, call fputc. */
1658 const char *p = c_getstr (arg0);
1659 if (p != NULL)
1661 if (!fn_fputc)
1662 return false;
1664 gimple *repl = gimple_build_call (fn_fputc, 2,
1665 build_int_cst
1666 (integer_type_node, p[0]), arg1);
1667 replace_call_with_call_and_fold (gsi, repl);
1668 return true;
1671 /* FALLTHROUGH */
1672 case 1: /* length is greater than 1, call fwrite. */
1674 /* If optimizing for size keep fputs. */
1675 if (optimize_function_for_size_p (cfun))
1676 return false;
1677 /* New argument list transforming fputs(string, stream) to
1678 fwrite(string, 1, len, stream). */
1679 if (!fn_fwrite)
1680 return false;
1682 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
1683 size_one_node, len, arg1);
1684 replace_call_with_call_and_fold (gsi, repl);
1685 return true;
1687 default:
1688 gcc_unreachable ();
1690 return false;
1693 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1694 DEST, SRC, LEN, and SIZE are the arguments to the call.
1695 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1696 code of the builtin. If MAXLEN is not NULL, it is maximum length
1697 passed as third argument. */
1699 static bool
1700 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1701 tree dest, tree src, tree len, tree size,
1702 enum built_in_function fcode)
1704 gimple *stmt = gsi_stmt (*gsi);
1705 location_t loc = gimple_location (stmt);
1706 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1707 tree fn;
1709 /* If SRC and DEST are the same (and not volatile), return DEST
1710 (resp. DEST+LEN for __mempcpy_chk). */
1711 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1713 if (fcode != BUILT_IN_MEMPCPY_CHK)
1715 replace_call_with_value (gsi, dest);
1716 return true;
1718 else
1720 gimple_seq stmts = NULL;
1721 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1722 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1723 TREE_TYPE (dest), dest, len);
1724 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1725 replace_call_with_value (gsi, temp);
1726 return true;
1730 if (! tree_fits_uhwi_p (size))
1731 return false;
1733 tree maxlen = get_maxval_strlen (len, 2);
1734 if (! integer_all_onesp (size))
1736 if (! tree_fits_uhwi_p (len))
1738 /* If LEN is not constant, try MAXLEN too.
1739 For MAXLEN only allow optimizing into non-_ocs function
1740 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1741 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1743 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1745 /* (void) __mempcpy_chk () can be optimized into
1746 (void) __memcpy_chk (). */
1747 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1748 if (!fn)
1749 return false;
1751 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1752 replace_call_with_call_and_fold (gsi, repl);
1753 return true;
1755 return false;
1758 else
1759 maxlen = len;
1761 if (tree_int_cst_lt (size, maxlen))
1762 return false;
1765 fn = NULL_TREE;
1766 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1767 mem{cpy,pcpy,move,set} is available. */
1768 switch (fcode)
1770 case BUILT_IN_MEMCPY_CHK:
1771 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1772 break;
1773 case BUILT_IN_MEMPCPY_CHK:
1774 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1775 break;
1776 case BUILT_IN_MEMMOVE_CHK:
1777 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1778 break;
1779 case BUILT_IN_MEMSET_CHK:
1780 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1781 break;
1782 default:
1783 break;
1786 if (!fn)
1787 return false;
1789 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1790 replace_call_with_call_and_fold (gsi, repl);
1791 return true;
1794 /* Fold a call to the __st[rp]cpy_chk builtin.
1795 DEST, SRC, and SIZE are the arguments to the call.
1796 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1797 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1798 strings passed as second argument. */
1800 static bool
1801 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1802 tree dest,
1803 tree src, tree size,
1804 enum built_in_function fcode)
1806 gimple *stmt = gsi_stmt (*gsi);
1807 location_t loc = gimple_location (stmt);
1808 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1809 tree len, fn;
1811 /* If SRC and DEST are the same (and not volatile), return DEST. */
1812 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1814 replace_call_with_value (gsi, dest);
1815 return true;
1818 if (! tree_fits_uhwi_p (size))
1819 return false;
1821 tree maxlen = get_maxval_strlen (src, 1);
1822 if (! integer_all_onesp (size))
1824 len = c_strlen (src, 1);
1825 if (! len || ! tree_fits_uhwi_p (len))
1827 /* If LEN is not constant, try MAXLEN too.
1828 For MAXLEN only allow optimizing into non-_ocs function
1829 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1830 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1832 if (fcode == BUILT_IN_STPCPY_CHK)
1834 if (! ignore)
1835 return false;
1837 /* If return value of __stpcpy_chk is ignored,
1838 optimize into __strcpy_chk. */
1839 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1840 if (!fn)
1841 return false;
1843 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1844 replace_call_with_call_and_fold (gsi, repl);
1845 return true;
1848 if (! len || TREE_SIDE_EFFECTS (len))
1849 return false;
1851 /* If c_strlen returned something, but not a constant,
1852 transform __strcpy_chk into __memcpy_chk. */
1853 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1854 if (!fn)
1855 return false;
1857 gimple_seq stmts = NULL;
1858 len = gimple_convert (&stmts, loc, size_type_node, len);
1859 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
1860 build_int_cst (size_type_node, 1));
1861 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1862 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1863 replace_call_with_call_and_fold (gsi, repl);
1864 return true;
1867 else
1868 maxlen = len;
1870 if (! tree_int_cst_lt (maxlen, size))
1871 return false;
1874 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1875 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1876 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1877 if (!fn)
1878 return false;
1880 gimple *repl = gimple_build_call (fn, 2, dest, src);
1881 replace_call_with_call_and_fold (gsi, repl);
1882 return true;
1885 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1886 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1887 length passed as third argument. IGNORE is true if return value can be
1888 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1890 static bool
1891 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1892 tree dest, tree src,
1893 tree len, tree size,
1894 enum built_in_function fcode)
1896 gimple *stmt = gsi_stmt (*gsi);
1897 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1898 tree fn;
1900 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1902 /* If return value of __stpncpy_chk is ignored,
1903 optimize into __strncpy_chk. */
1904 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1905 if (fn)
1907 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1908 replace_call_with_call_and_fold (gsi, repl);
1909 return true;
1913 if (! tree_fits_uhwi_p (size))
1914 return false;
1916 tree maxlen = get_maxval_strlen (len, 2);
1917 if (! integer_all_onesp (size))
1919 if (! tree_fits_uhwi_p (len))
1921 /* If LEN is not constant, try MAXLEN too.
1922 For MAXLEN only allow optimizing into non-_ocs function
1923 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1924 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1925 return false;
1927 else
1928 maxlen = len;
1930 if (tree_int_cst_lt (size, maxlen))
1931 return false;
1934 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1935 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1936 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1937 if (!fn)
1938 return false;
1940 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1941 replace_call_with_call_and_fold (gsi, repl);
1942 return true;
1945 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
1946 Return NULL_TREE if no simplification can be made. */
1948 static bool
1949 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
1951 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1952 location_t loc = gimple_location (stmt);
1953 tree dest = gimple_call_arg (stmt, 0);
1954 tree src = gimple_call_arg (stmt, 1);
1955 tree fn, len, lenp1;
1957 /* If the result is unused, replace stpcpy with strcpy. */
1958 if (gimple_call_lhs (stmt) == NULL_TREE)
1960 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
1961 if (!fn)
1962 return false;
1963 gimple_call_set_fndecl (stmt, fn);
1964 fold_stmt (gsi);
1965 return true;
1968 len = c_strlen (src, 1);
1969 if (!len
1970 || TREE_CODE (len) != INTEGER_CST)
1971 return false;
1973 if (optimize_function_for_size_p (cfun)
1974 /* If length is zero it's small enough. */
1975 && !integer_zerop (len))
1976 return false;
1978 /* If the source has a known length replace stpcpy with memcpy. */
1979 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1980 if (!fn)
1981 return false;
1983 gimple_seq stmts = NULL;
1984 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
1985 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
1986 tem, build_int_cst (size_type_node, 1));
1987 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1988 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
1989 gimple_set_vuse (repl, gimple_vuse (stmt));
1990 gimple_set_vdef (repl, gimple_vdef (stmt));
1991 if (gimple_vdef (repl)
1992 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
1993 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
1994 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
1995 /* Replace the result with dest + len. */
1996 stmts = NULL;
1997 tem = gimple_convert (&stmts, loc, sizetype, len);
1998 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1999 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2000 POINTER_PLUS_EXPR, dest, tem);
2001 gsi_replace (gsi, ret, false);
2002 /* Finally fold the memcpy call. */
2003 gimple_stmt_iterator gsi2 = *gsi;
2004 gsi_prev (&gsi2);
2005 fold_stmt (&gsi2);
2006 return true;
2009 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2010 NULL_TREE if a normal call should be emitted rather than expanding
2011 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2012 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2013 passed as second argument. */
2015 static bool
2016 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2017 enum built_in_function fcode)
2019 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2020 tree dest, size, len, fn, fmt, flag;
2021 const char *fmt_str;
2023 /* Verify the required arguments in the original call. */
2024 if (gimple_call_num_args (stmt) < 5)
2025 return false;
2027 dest = gimple_call_arg (stmt, 0);
2028 len = gimple_call_arg (stmt, 1);
2029 flag = gimple_call_arg (stmt, 2);
2030 size = gimple_call_arg (stmt, 3);
2031 fmt = gimple_call_arg (stmt, 4);
2033 if (! tree_fits_uhwi_p (size))
2034 return false;
2036 if (! integer_all_onesp (size))
2038 tree maxlen = get_maxval_strlen (len, 2);
2039 if (! tree_fits_uhwi_p (len))
2041 /* If LEN is not constant, try MAXLEN too.
2042 For MAXLEN only allow optimizing into non-_ocs function
2043 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2044 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2045 return false;
2047 else
2048 maxlen = len;
2050 if (tree_int_cst_lt (size, maxlen))
2051 return false;
2054 if (!init_target_chars ())
2055 return false;
2057 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2058 or if format doesn't contain % chars or is "%s". */
2059 if (! integer_zerop (flag))
2061 fmt_str = c_getstr (fmt);
2062 if (fmt_str == NULL)
2063 return false;
2064 if (strchr (fmt_str, target_percent) != NULL
2065 && strcmp (fmt_str, target_percent_s))
2066 return false;
2069 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2070 available. */
2071 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2072 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2073 if (!fn)
2074 return false;
2076 /* Replace the called function and the first 5 argument by 3 retaining
2077 trailing varargs. */
2078 gimple_call_set_fndecl (stmt, fn);
2079 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2080 gimple_call_set_arg (stmt, 0, dest);
2081 gimple_call_set_arg (stmt, 1, len);
2082 gimple_call_set_arg (stmt, 2, fmt);
2083 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2084 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2085 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2086 fold_stmt (gsi);
2087 return true;
2090 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2091 Return NULL_TREE if a normal call should be emitted rather than
2092 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2093 or BUILT_IN_VSPRINTF_CHK. */
2095 static bool
2096 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2097 enum built_in_function fcode)
2099 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2100 tree dest, size, len, fn, fmt, flag;
2101 const char *fmt_str;
2102 unsigned nargs = gimple_call_num_args (stmt);
2104 /* Verify the required arguments in the original call. */
2105 if (nargs < 4)
2106 return false;
2107 dest = gimple_call_arg (stmt, 0);
2108 flag = gimple_call_arg (stmt, 1);
2109 size = gimple_call_arg (stmt, 2);
2110 fmt = gimple_call_arg (stmt, 3);
2112 if (! tree_fits_uhwi_p (size))
2113 return false;
2115 len = NULL_TREE;
2117 if (!init_target_chars ())
2118 return false;
2120 /* Check whether the format is a literal string constant. */
2121 fmt_str = c_getstr (fmt);
2122 if (fmt_str != NULL)
2124 /* If the format doesn't contain % args or %%, we know the size. */
2125 if (strchr (fmt_str, target_percent) == 0)
2127 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2128 len = build_int_cstu (size_type_node, strlen (fmt_str));
2130 /* If the format is "%s" and first ... argument is a string literal,
2131 we know the size too. */
2132 else if (fcode == BUILT_IN_SPRINTF_CHK
2133 && strcmp (fmt_str, target_percent_s) == 0)
2135 tree arg;
2137 if (nargs == 5)
2139 arg = gimple_call_arg (stmt, 4);
2140 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2142 len = c_strlen (arg, 1);
2143 if (! len || ! tree_fits_uhwi_p (len))
2144 len = NULL_TREE;
2150 if (! integer_all_onesp (size))
2152 if (! len || ! tree_int_cst_lt (len, size))
2153 return false;
2156 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2157 or if format doesn't contain % chars or is "%s". */
2158 if (! integer_zerop (flag))
2160 if (fmt_str == NULL)
2161 return false;
2162 if (strchr (fmt_str, target_percent) != NULL
2163 && strcmp (fmt_str, target_percent_s))
2164 return false;
2167 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2168 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2169 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2170 if (!fn)
2171 return false;
2173 /* Replace the called function and the first 4 argument by 2 retaining
2174 trailing varargs. */
2175 gimple_call_set_fndecl (stmt, fn);
2176 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2177 gimple_call_set_arg (stmt, 0, dest);
2178 gimple_call_set_arg (stmt, 1, fmt);
2179 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2180 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2181 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2182 fold_stmt (gsi);
2183 return true;
2186 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2187 ORIG may be null if this is a 2-argument call. We don't attempt to
2188 simplify calls with more than 3 arguments.
2190 Return NULL_TREE if no simplification was possible, otherwise return the
2191 simplified form of the call as a tree. If IGNORED is true, it means that
2192 the caller does not use the returned value of the function. */
2194 static bool
2195 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2197 gimple *stmt = gsi_stmt (*gsi);
2198 tree dest = gimple_call_arg (stmt, 0);
2199 tree fmt = gimple_call_arg (stmt, 1);
2200 tree orig = NULL_TREE;
2201 const char *fmt_str = NULL;
2203 /* Verify the required arguments in the original call. We deal with two
2204 types of sprintf() calls: 'sprintf (str, fmt)' and
2205 'sprintf (dest, "%s", orig)'. */
2206 if (gimple_call_num_args (stmt) > 3)
2207 return false;
2209 if (gimple_call_num_args (stmt) == 3)
2210 orig = gimple_call_arg (stmt, 2);
2212 /* Check whether the format is a literal string constant. */
2213 fmt_str = c_getstr (fmt);
2214 if (fmt_str == NULL)
2215 return false;
2217 if (!init_target_chars ())
2218 return false;
2220 /* If the format doesn't contain % args or %%, use strcpy. */
2221 if (strchr (fmt_str, target_percent) == NULL)
2223 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2225 if (!fn)
2226 return false;
2228 /* Don't optimize sprintf (buf, "abc", ptr++). */
2229 if (orig)
2230 return false;
2232 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2233 'format' is known to contain no % formats. */
2234 gimple_seq stmts = NULL;
2235 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2236 gimple_seq_add_stmt_without_update (&stmts, repl);
2237 if (gimple_call_lhs (stmt))
2239 repl = gimple_build_assign (gimple_call_lhs (stmt),
2240 build_int_cst (integer_type_node,
2241 strlen (fmt_str)));
2242 gimple_seq_add_stmt_without_update (&stmts, repl);
2243 gsi_replace_with_seq_vops (gsi, stmts);
2244 /* gsi now points at the assignment to the lhs, get a
2245 stmt iterator to the memcpy call.
2246 ??? We can't use gsi_for_stmt as that doesn't work when the
2247 CFG isn't built yet. */
2248 gimple_stmt_iterator gsi2 = *gsi;
2249 gsi_prev (&gsi2);
2250 fold_stmt (&gsi2);
2252 else
2254 gsi_replace_with_seq_vops (gsi, stmts);
2255 fold_stmt (gsi);
2257 return true;
2260 /* If the format is "%s", use strcpy if the result isn't used. */
2261 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2263 tree fn;
2264 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2266 if (!fn)
2267 return false;
2269 /* Don't crash on sprintf (str1, "%s"). */
2270 if (!orig)
2271 return false;
2273 tree orig_len = NULL_TREE;
2274 if (gimple_call_lhs (stmt))
2276 orig_len = get_maxval_strlen (orig, 0);
2277 if (!orig_len)
2278 return false;
2281 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2282 gimple_seq stmts = NULL;
2283 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2284 gimple_seq_add_stmt_without_update (&stmts, repl);
2285 if (gimple_call_lhs (stmt))
2287 if (!useless_type_conversion_p (integer_type_node,
2288 TREE_TYPE (orig_len)))
2289 orig_len = fold_convert (integer_type_node, orig_len);
2290 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2291 gimple_seq_add_stmt_without_update (&stmts, repl);
2292 gsi_replace_with_seq_vops (gsi, stmts);
2293 /* gsi now points at the assignment to the lhs, get a
2294 stmt iterator to the memcpy call.
2295 ??? We can't use gsi_for_stmt as that doesn't work when the
2296 CFG isn't built yet. */
2297 gimple_stmt_iterator gsi2 = *gsi;
2298 gsi_prev (&gsi2);
2299 fold_stmt (&gsi2);
2301 else
2303 gsi_replace_with_seq_vops (gsi, stmts);
2304 fold_stmt (gsi);
2306 return true;
2308 return false;
2311 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2312 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2313 attempt to simplify calls with more than 4 arguments.
2315 Return NULL_TREE if no simplification was possible, otherwise return the
2316 simplified form of the call as a tree. If IGNORED is true, it means that
2317 the caller does not use the returned value of the function. */
2319 static bool
2320 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2322 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2323 tree dest = gimple_call_arg (stmt, 0);
2324 tree destsize = gimple_call_arg (stmt, 1);
2325 tree fmt = gimple_call_arg (stmt, 2);
2326 tree orig = NULL_TREE;
2327 const char *fmt_str = NULL;
2329 if (gimple_call_num_args (stmt) > 4)
2330 return false;
2332 if (gimple_call_num_args (stmt) == 4)
2333 orig = gimple_call_arg (stmt, 3);
2335 if (!tree_fits_uhwi_p (destsize))
2336 return false;
2337 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2339 /* Check whether the format is a literal string constant. */
2340 fmt_str = c_getstr (fmt);
2341 if (fmt_str == NULL)
2342 return false;
2344 if (!init_target_chars ())
2345 return false;
2347 /* If the format doesn't contain % args or %%, use strcpy. */
2348 if (strchr (fmt_str, target_percent) == NULL)
2350 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2351 if (!fn)
2352 return false;
2354 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2355 if (orig)
2356 return false;
2358 /* We could expand this as
2359 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2360 or to
2361 memcpy (str, fmt_with_nul_at_cstm1, cst);
2362 but in the former case that might increase code size
2363 and in the latter case grow .rodata section too much.
2364 So punt for now. */
2365 size_t len = strlen (fmt_str);
2366 if (len >= destlen)
2367 return false;
2369 gimple_seq stmts = NULL;
2370 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2371 gimple_seq_add_stmt_without_update (&stmts, repl);
2372 if (gimple_call_lhs (stmt))
2374 repl = gimple_build_assign (gimple_call_lhs (stmt),
2375 build_int_cst (integer_type_node, len));
2376 gimple_seq_add_stmt_without_update (&stmts, repl);
2377 gsi_replace_with_seq_vops (gsi, stmts);
2378 /* gsi now points at the assignment to the lhs, get a
2379 stmt iterator to the memcpy call.
2380 ??? We can't use gsi_for_stmt as that doesn't work when the
2381 CFG isn't built yet. */
2382 gimple_stmt_iterator gsi2 = *gsi;
2383 gsi_prev (&gsi2);
2384 fold_stmt (&gsi2);
2386 else
2388 gsi_replace_with_seq_vops (gsi, stmts);
2389 fold_stmt (gsi);
2391 return true;
2394 /* If the format is "%s", use strcpy if the result isn't used. */
2395 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2397 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2398 if (!fn)
2399 return false;
2401 /* Don't crash on snprintf (str1, cst, "%s"). */
2402 if (!orig)
2403 return false;
2405 tree orig_len = get_maxval_strlen (orig, 0);
2406 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2407 return false;
2409 /* We could expand this as
2410 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2411 or to
2412 memcpy (str1, str2_with_nul_at_cstm1, cst);
2413 but in the former case that might increase code size
2414 and in the latter case grow .rodata section too much.
2415 So punt for now. */
2416 if (compare_tree_int (orig_len, destlen) >= 0)
2417 return false;
2419 /* Convert snprintf (str1, cst, "%s", str2) into
2420 strcpy (str1, str2) if strlen (str2) < cst. */
2421 gimple_seq stmts = NULL;
2422 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2423 gimple_seq_add_stmt_without_update (&stmts, repl);
2424 if (gimple_call_lhs (stmt))
2426 if (!useless_type_conversion_p (integer_type_node,
2427 TREE_TYPE (orig_len)))
2428 orig_len = fold_convert (integer_type_node, orig_len);
2429 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2430 gimple_seq_add_stmt_without_update (&stmts, repl);
2431 gsi_replace_with_seq_vops (gsi, stmts);
2432 /* gsi now points at the assignment to the lhs, get a
2433 stmt iterator to the memcpy call.
2434 ??? We can't use gsi_for_stmt as that doesn't work when the
2435 CFG isn't built yet. */
2436 gimple_stmt_iterator gsi2 = *gsi;
2437 gsi_prev (&gsi2);
2438 fold_stmt (&gsi2);
2440 else
2442 gsi_replace_with_seq_vops (gsi, stmts);
2443 fold_stmt (gsi);
2445 return true;
2447 return false;
2450 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2451 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2452 more than 3 arguments, and ARG may be null in the 2-argument case.
2454 Return NULL_TREE if no simplification was possible, otherwise return the
2455 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2456 code of the function to be simplified. */
2458 static bool
2459 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2460 tree fp, tree fmt, tree arg,
2461 enum built_in_function fcode)
2463 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2464 tree fn_fputc, fn_fputs;
2465 const char *fmt_str = NULL;
2467 /* If the return value is used, don't do the transformation. */
2468 if (gimple_call_lhs (stmt) != NULL_TREE)
2469 return false;
2471 /* Check whether the format is a literal string constant. */
2472 fmt_str = c_getstr (fmt);
2473 if (fmt_str == NULL)
2474 return false;
2476 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2478 /* If we're using an unlocked function, assume the other
2479 unlocked functions exist explicitly. */
2480 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2481 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2483 else
2485 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2486 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2489 if (!init_target_chars ())
2490 return false;
2492 /* If the format doesn't contain % args or %%, use strcpy. */
2493 if (strchr (fmt_str, target_percent) == NULL)
2495 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2496 && arg)
2497 return false;
2499 /* If the format specifier was "", fprintf does nothing. */
2500 if (fmt_str[0] == '\0')
2502 replace_call_with_value (gsi, NULL_TREE);
2503 return true;
2506 /* When "string" doesn't contain %, replace all cases of
2507 fprintf (fp, string) with fputs (string, fp). The fputs
2508 builtin will take care of special cases like length == 1. */
2509 if (fn_fputs)
2511 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2512 replace_call_with_call_and_fold (gsi, repl);
2513 return true;
2517 /* The other optimizations can be done only on the non-va_list variants. */
2518 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2519 return false;
2521 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2522 else if (strcmp (fmt_str, target_percent_s) == 0)
2524 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2525 return false;
2526 if (fn_fputs)
2528 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2529 replace_call_with_call_and_fold (gsi, repl);
2530 return true;
2534 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2535 else if (strcmp (fmt_str, target_percent_c) == 0)
2537 if (!arg
2538 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2539 return false;
2540 if (fn_fputc)
2542 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2543 replace_call_with_call_and_fold (gsi, repl);
2544 return true;
2548 return false;
2551 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2552 FMT and ARG are the arguments to the call; we don't fold cases with
2553 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2555 Return NULL_TREE if no simplification was possible, otherwise return the
2556 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2557 code of the function to be simplified. */
2559 static bool
2560 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2561 tree arg, enum built_in_function fcode)
2563 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2564 tree fn_putchar, fn_puts, newarg;
2565 const char *fmt_str = NULL;
2567 /* If the return value is used, don't do the transformation. */
2568 if (gimple_call_lhs (stmt) != NULL_TREE)
2569 return false;
2571 /* Check whether the format is a literal string constant. */
2572 fmt_str = c_getstr (fmt);
2573 if (fmt_str == NULL)
2574 return false;
2576 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2578 /* If we're using an unlocked function, assume the other
2579 unlocked functions exist explicitly. */
2580 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2581 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2583 else
2585 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2586 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2589 if (!init_target_chars ())
2590 return false;
2592 if (strcmp (fmt_str, target_percent_s) == 0
2593 || strchr (fmt_str, target_percent) == NULL)
2595 const char *str;
2597 if (strcmp (fmt_str, target_percent_s) == 0)
2599 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2600 return false;
2602 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2603 return false;
2605 str = c_getstr (arg);
2606 if (str == NULL)
2607 return false;
2609 else
2611 /* The format specifier doesn't contain any '%' characters. */
2612 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2613 && arg)
2614 return false;
2615 str = fmt_str;
2618 /* If the string was "", printf does nothing. */
2619 if (str[0] == '\0')
2621 replace_call_with_value (gsi, NULL_TREE);
2622 return true;
2625 /* If the string has length of 1, call putchar. */
2626 if (str[1] == '\0')
2628 /* Given printf("c"), (where c is any one character,)
2629 convert "c"[0] to an int and pass that to the replacement
2630 function. */
2631 newarg = build_int_cst (integer_type_node, str[0]);
2632 if (fn_putchar)
2634 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2635 replace_call_with_call_and_fold (gsi, repl);
2636 return true;
2639 else
2641 /* If the string was "string\n", call puts("string"). */
2642 size_t len = strlen (str);
2643 if ((unsigned char)str[len - 1] == target_newline
2644 && (size_t) (int) len == len
2645 && (int) len > 0)
2647 char *newstr;
2648 tree offset_node, string_cst;
2650 /* Create a NUL-terminated string that's one char shorter
2651 than the original, stripping off the trailing '\n'. */
2652 newarg = build_string_literal (len, str);
2653 string_cst = string_constant (newarg, &offset_node);
2654 gcc_checking_assert (string_cst
2655 && (TREE_STRING_LENGTH (string_cst)
2656 == (int) len)
2657 && integer_zerop (offset_node)
2658 && (unsigned char)
2659 TREE_STRING_POINTER (string_cst)[len - 1]
2660 == target_newline);
2661 /* build_string_literal creates a new STRING_CST,
2662 modify it in place to avoid double copying. */
2663 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2664 newstr[len - 1] = '\0';
2665 if (fn_puts)
2667 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2668 replace_call_with_call_and_fold (gsi, repl);
2669 return true;
2672 else
2673 /* We'd like to arrange to call fputs(string,stdout) here,
2674 but we need stdout and don't have a way to get it yet. */
2675 return false;
2679 /* The other optimizations can be done only on the non-va_list variants. */
2680 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2681 return false;
2683 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2684 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2686 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2687 return false;
2688 if (fn_puts)
2690 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2691 replace_call_with_call_and_fold (gsi, repl);
2692 return true;
2696 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2697 else if (strcmp (fmt_str, target_percent_c) == 0)
2699 if (!arg || ! useless_type_conversion_p (integer_type_node,
2700 TREE_TYPE (arg)))
2701 return false;
2702 if (fn_putchar)
2704 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2705 replace_call_with_call_and_fold (gsi, repl);
2706 return true;
2710 return false;
2715 /* Fold a call to __builtin_strlen with known length LEN. */
2717 static bool
2718 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2720 gimple *stmt = gsi_stmt (*gsi);
2721 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2722 if (!len)
2723 return false;
2724 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2725 replace_call_with_value (gsi, len);
2726 return true;
2729 /* Fold a call to __builtin_acc_on_device. */
2731 static bool
2732 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2734 /* Defer folding until we know which compiler we're in. */
2735 if (symtab->state != EXPANSION)
2736 return false;
2738 unsigned val_host = GOMP_DEVICE_HOST;
2739 unsigned val_dev = GOMP_DEVICE_NONE;
2741 #ifdef ACCEL_COMPILER
2742 val_host = GOMP_DEVICE_NOT_HOST;
2743 val_dev = ACCEL_COMPILER_acc_device;
2744 #endif
2746 location_t loc = gimple_location (gsi_stmt (*gsi));
2748 tree host_eq = make_ssa_name (boolean_type_node);
2749 gimple *host_ass = gimple_build_assign
2750 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2751 gimple_set_location (host_ass, loc);
2752 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2754 tree dev_eq = make_ssa_name (boolean_type_node);
2755 gimple *dev_ass = gimple_build_assign
2756 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2757 gimple_set_location (dev_ass, loc);
2758 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2760 tree result = make_ssa_name (boolean_type_node);
2761 gimple *result_ass = gimple_build_assign
2762 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2763 gimple_set_location (result_ass, loc);
2764 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2766 replace_call_with_value (gsi, result);
2768 return true;
2771 /* Fold the non-target builtin at *GSI and return whether any simplification
2772 was made. */
2774 static bool
2775 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2777 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2778 tree callee = gimple_call_fndecl (stmt);
2780 /* Give up for always_inline inline builtins until they are
2781 inlined. */
2782 if (avoid_folding_inline_builtin (callee))
2783 return false;
2785 unsigned n = gimple_call_num_args (stmt);
2786 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2787 switch (fcode)
2789 case BUILT_IN_BZERO:
2790 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2791 gimple_call_arg (stmt, 1));
2792 case BUILT_IN_MEMSET:
2793 return gimple_fold_builtin_memset (gsi,
2794 gimple_call_arg (stmt, 1),
2795 gimple_call_arg (stmt, 2));
2796 case BUILT_IN_BCOPY:
2797 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2798 gimple_call_arg (stmt, 0), 3);
2799 case BUILT_IN_MEMCPY:
2800 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2801 gimple_call_arg (stmt, 1), 0);
2802 case BUILT_IN_MEMPCPY:
2803 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2804 gimple_call_arg (stmt, 1), 1);
2805 case BUILT_IN_MEMMOVE:
2806 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2807 gimple_call_arg (stmt, 1), 3);
2808 case BUILT_IN_SPRINTF_CHK:
2809 case BUILT_IN_VSPRINTF_CHK:
2810 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2811 case BUILT_IN_STRCAT_CHK:
2812 return gimple_fold_builtin_strcat_chk (gsi);
2813 case BUILT_IN_STRNCAT_CHK:
2814 return gimple_fold_builtin_strncat_chk (gsi);
2815 case BUILT_IN_STRLEN:
2816 return gimple_fold_builtin_strlen (gsi);
2817 case BUILT_IN_STRCPY:
2818 return gimple_fold_builtin_strcpy (gsi,
2819 gimple_call_arg (stmt, 0),
2820 gimple_call_arg (stmt, 1));
2821 case BUILT_IN_STRNCPY:
2822 return gimple_fold_builtin_strncpy (gsi,
2823 gimple_call_arg (stmt, 0),
2824 gimple_call_arg (stmt, 1),
2825 gimple_call_arg (stmt, 2));
2826 case BUILT_IN_STRCAT:
2827 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2828 gimple_call_arg (stmt, 1));
2829 case BUILT_IN_STRNCAT:
2830 return gimple_fold_builtin_strncat (gsi);
2831 case BUILT_IN_FPUTS:
2832 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2833 gimple_call_arg (stmt, 1), false);
2834 case BUILT_IN_FPUTS_UNLOCKED:
2835 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2836 gimple_call_arg (stmt, 1), true);
2837 case BUILT_IN_MEMCPY_CHK:
2838 case BUILT_IN_MEMPCPY_CHK:
2839 case BUILT_IN_MEMMOVE_CHK:
2840 case BUILT_IN_MEMSET_CHK:
2841 return gimple_fold_builtin_memory_chk (gsi,
2842 gimple_call_arg (stmt, 0),
2843 gimple_call_arg (stmt, 1),
2844 gimple_call_arg (stmt, 2),
2845 gimple_call_arg (stmt, 3),
2846 fcode);
2847 case BUILT_IN_STPCPY:
2848 return gimple_fold_builtin_stpcpy (gsi);
2849 case BUILT_IN_STRCPY_CHK:
2850 case BUILT_IN_STPCPY_CHK:
2851 return gimple_fold_builtin_stxcpy_chk (gsi,
2852 gimple_call_arg (stmt, 0),
2853 gimple_call_arg (stmt, 1),
2854 gimple_call_arg (stmt, 2),
2855 fcode);
2856 case BUILT_IN_STRNCPY_CHK:
2857 case BUILT_IN_STPNCPY_CHK:
2858 return gimple_fold_builtin_stxncpy_chk (gsi,
2859 gimple_call_arg (stmt, 0),
2860 gimple_call_arg (stmt, 1),
2861 gimple_call_arg (stmt, 2),
2862 gimple_call_arg (stmt, 3),
2863 fcode);
2864 case BUILT_IN_SNPRINTF_CHK:
2865 case BUILT_IN_VSNPRINTF_CHK:
2866 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2867 case BUILT_IN_SNPRINTF:
2868 return gimple_fold_builtin_snprintf (gsi);
2869 case BUILT_IN_SPRINTF:
2870 return gimple_fold_builtin_sprintf (gsi);
2871 case BUILT_IN_FPRINTF:
2872 case BUILT_IN_FPRINTF_UNLOCKED:
2873 case BUILT_IN_VFPRINTF:
2874 if (n == 2 || n == 3)
2875 return gimple_fold_builtin_fprintf (gsi,
2876 gimple_call_arg (stmt, 0),
2877 gimple_call_arg (stmt, 1),
2878 n == 3
2879 ? gimple_call_arg (stmt, 2)
2880 : NULL_TREE,
2881 fcode);
2882 break;
2883 case BUILT_IN_FPRINTF_CHK:
2884 case BUILT_IN_VFPRINTF_CHK:
2885 if (n == 3 || n == 4)
2886 return gimple_fold_builtin_fprintf (gsi,
2887 gimple_call_arg (stmt, 0),
2888 gimple_call_arg (stmt, 2),
2889 n == 4
2890 ? gimple_call_arg (stmt, 3)
2891 : NULL_TREE,
2892 fcode);
2893 break;
2894 case BUILT_IN_PRINTF:
2895 case BUILT_IN_PRINTF_UNLOCKED:
2896 case BUILT_IN_VPRINTF:
2897 if (n == 1 || n == 2)
2898 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2899 n == 2
2900 ? gimple_call_arg (stmt, 1)
2901 : NULL_TREE, fcode);
2902 break;
2903 case BUILT_IN_PRINTF_CHK:
2904 case BUILT_IN_VPRINTF_CHK:
2905 if (n == 2 || n == 3)
2906 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2907 n == 3
2908 ? gimple_call_arg (stmt, 2)
2909 : NULL_TREE, fcode);
2910 break;
2911 case BUILT_IN_ACC_ON_DEVICE:
2912 return gimple_fold_builtin_acc_on_device (gsi,
2913 gimple_call_arg (stmt, 0));
2914 default:;
2917 /* Try the generic builtin folder. */
2918 bool ignore = (gimple_call_lhs (stmt) == NULL);
2919 tree result = fold_call_stmt (stmt, ignore);
2920 if (result)
2922 if (ignore)
2923 STRIP_NOPS (result);
2924 else
2925 result = fold_convert (gimple_call_return_type (stmt), result);
2926 if (!update_call_from_tree (gsi, result))
2927 gimplify_and_update_call_from_tree (gsi, result);
2928 return true;
2931 return false;
2934 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
2935 function calls to constants, where possible. */
2937 static tree
2938 fold_internal_goacc_dim (const gimple *call)
2940 int axis = get_oacc_ifn_dim_arg (call);
2941 int size = get_oacc_fn_dim_size (current_function_decl, axis);
2942 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
2943 tree result = NULL_TREE;
2945 /* If the size is 1, or we only want the size and it is not dynamic,
2946 we know the answer. */
2947 if (size == 1 || (!is_pos && size))
2949 tree type = TREE_TYPE (gimple_call_lhs (call));
2950 result = build_int_cst (type, size - is_pos);
2953 return result;
2956 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
2957 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
2958 &var where var is only addressable because of such calls. */
2960 bool
2961 optimize_atomic_compare_exchange_p (gimple *stmt)
2963 if (gimple_call_num_args (stmt) != 6
2964 || !flag_inline_atomics
2965 || !optimize
2966 || (flag_sanitize & (SANITIZE_THREAD | SANITIZE_ADDRESS)) != 0
2967 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
2968 || !gimple_vdef (stmt)
2969 || !gimple_vuse (stmt))
2970 return false;
2972 tree fndecl = gimple_call_fndecl (stmt);
2973 switch (DECL_FUNCTION_CODE (fndecl))
2975 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
2976 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
2977 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
2978 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
2979 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
2980 break;
2981 default:
2982 return false;
2985 tree expected = gimple_call_arg (stmt, 1);
2986 if (TREE_CODE (expected) != ADDR_EXPR
2987 || !SSA_VAR_P (TREE_OPERAND (expected, 0))
2988 || !is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (expected, 0)))
2989 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
2990 || TREE_THIS_VOLATILE (TREE_TYPE (TREE_OPERAND (expected, 0)))
2991 || TREE_CODE (TREE_TYPE (TREE_OPERAND (expected, 0))) == VECTOR_TYPE
2992 || TREE_CODE (TREE_TYPE (TREE_OPERAND (expected, 0))) == COMPLEX_TYPE)
2993 return false;
2995 tree weak = gimple_call_arg (stmt, 3);
2996 if (!integer_zerop (weak) && !integer_onep (weak))
2997 return false;
2999 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3000 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3001 machine_mode mode = TYPE_MODE (itype);
3003 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3004 == CODE_FOR_nothing
3005 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3006 return false;
3008 if (int_size_in_bytes (TREE_TYPE (TREE_OPERAND (expected, 0)))
3009 != GET_MODE_SIZE (mode))
3010 return false;
3012 return true;
3015 /* Fold
3016 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3017 into
3018 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3019 i = IMAGPART_EXPR <t>;
3020 r = (_Bool) i;
3021 e = REALPART_EXPR <t>; */
3023 void
3024 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3026 gimple *stmt = gsi_stmt (*gsi);
3027 tree fndecl = gimple_call_fndecl (stmt);
3028 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3029 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3030 tree ctype = build_complex_type (itype);
3031 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3032 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3033 expected);
3034 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3035 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3036 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3038 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3039 build1 (VIEW_CONVERT_EXPR, itype,
3040 gimple_assign_lhs (g)));
3041 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3043 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3044 + int_size_in_bytes (itype);
3045 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3046 gimple_call_arg (stmt, 0),
3047 gimple_assign_lhs (g),
3048 gimple_call_arg (stmt, 2),
3049 build_int_cst (integer_type_node, flag),
3050 gimple_call_arg (stmt, 4),
3051 gimple_call_arg (stmt, 5));
3052 tree lhs = make_ssa_name (ctype);
3053 gimple_call_set_lhs (g, lhs);
3054 gimple_set_vdef (g, gimple_vdef (stmt));
3055 gimple_set_vuse (g, gimple_vuse (stmt));
3056 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3057 if (gimple_call_lhs (stmt))
3059 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3060 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3061 build1 (IMAGPART_EXPR, itype, lhs));
3062 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3063 g = gimple_build_assign (gimple_call_lhs (stmt), NOP_EXPR,
3064 gimple_assign_lhs (g));
3066 gsi_replace (gsi, g, true);
3067 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3068 build1 (REALPART_EXPR, itype, lhs));
3069 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3070 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3072 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3073 VIEW_CONVERT_EXPR,
3074 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3075 gimple_assign_lhs (g)));
3076 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3078 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3079 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3080 *gsi = gsiret;
3083 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3084 doesn't fit into TYPE. The test for overflow should be regardless of
3085 -fwrapv, and even for unsigned types. */
3087 bool
3088 arith_overflowed_p (enum tree_code code, const_tree type,
3089 const_tree arg0, const_tree arg1)
3091 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3092 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3093 widest2_int_cst;
3094 widest2_int warg0 = widest2_int_cst (arg0);
3095 widest2_int warg1 = widest2_int_cst (arg1);
3096 widest2_int wres;
3097 switch (code)
3099 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3100 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3101 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3102 default: gcc_unreachable ();
3104 signop sign = TYPE_SIGN (type);
3105 if (sign == UNSIGNED && wi::neg_p (wres))
3106 return true;
3107 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3110 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3111 The statement may be replaced by another statement, e.g., if the call
3112 simplifies to a constant value. Return true if any changes were made.
3113 It is assumed that the operands have been previously folded. */
3115 static bool
3116 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3118 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3119 tree callee;
3120 bool changed = false;
3121 unsigned i;
3123 /* Fold *& in call arguments. */
3124 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3125 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3127 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3128 if (tmp)
3130 gimple_call_set_arg (stmt, i, tmp);
3131 changed = true;
3135 /* Check for virtual calls that became direct calls. */
3136 callee = gimple_call_fn (stmt);
3137 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3139 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3141 if (dump_file && virtual_method_call_p (callee)
3142 && !possible_polymorphic_call_target_p
3143 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3144 (OBJ_TYPE_REF_EXPR (callee)))))
3146 fprintf (dump_file,
3147 "Type inheritance inconsistent devirtualization of ");
3148 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3149 fprintf (dump_file, " to ");
3150 print_generic_expr (dump_file, callee, TDF_SLIM);
3151 fprintf (dump_file, "\n");
3154 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3155 changed = true;
3157 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3159 bool final;
3160 vec <cgraph_node *>targets
3161 = possible_polymorphic_call_targets (callee, stmt, &final);
3162 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3164 tree lhs = gimple_call_lhs (stmt);
3165 if (dump_enabled_p ())
3167 location_t loc = gimple_location_safe (stmt);
3168 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3169 "folding virtual function call to %s\n",
3170 targets.length () == 1
3171 ? targets[0]->name ()
3172 : "__builtin_unreachable");
3174 if (targets.length () == 1)
3176 tree fndecl = targets[0]->decl;
3177 gimple_call_set_fndecl (stmt, fndecl);
3178 changed = true;
3179 /* If changing the call to __cxa_pure_virtual
3180 or similar noreturn function, adjust gimple_call_fntype
3181 too. */
3182 if ((gimple_call_flags (stmt) & ECF_NORETURN)
3183 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3184 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3185 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3186 == void_type_node))
3187 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
3188 /* If the call becomes noreturn, remove the lhs. */
3189 if (lhs
3190 && gimple_call_noreturn_p (stmt)
3191 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
3192 || should_remove_lhs_p (lhs)))
3194 if (TREE_CODE (lhs) == SSA_NAME)
3196 tree var = create_tmp_var (TREE_TYPE (lhs));
3197 tree def = get_or_create_ssa_default_def (cfun, var);
3198 gimple *new_stmt = gimple_build_assign (lhs, def);
3199 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3201 gimple_call_set_lhs (stmt, NULL_TREE);
3203 maybe_remove_unused_call_args (cfun, stmt);
3205 else
3207 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3208 gimple *new_stmt = gimple_build_call (fndecl, 0);
3209 gimple_set_location (new_stmt, gimple_location (stmt));
3210 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3212 tree var = create_tmp_var (TREE_TYPE (lhs));
3213 tree def = get_or_create_ssa_default_def (cfun, var);
3215 /* To satisfy condition for
3216 cgraph_update_edges_for_call_stmt_node,
3217 we need to preserve GIMPLE_CALL statement
3218 at position of GSI iterator. */
3219 update_call_from_tree (gsi, def);
3220 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3222 else
3224 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3225 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3226 gsi_replace (gsi, new_stmt, false);
3228 return true;
3234 /* Check for indirect calls that became direct calls, and then
3235 no longer require a static chain. */
3236 if (gimple_call_chain (stmt))
3238 tree fn = gimple_call_fndecl (stmt);
3239 if (fn && !DECL_STATIC_CHAIN (fn))
3241 gimple_call_set_chain (stmt, NULL);
3242 changed = true;
3244 else
3246 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3247 if (tmp)
3249 gimple_call_set_chain (stmt, tmp);
3250 changed = true;
3255 if (inplace)
3256 return changed;
3258 /* Check for builtins that CCP can handle using information not
3259 available in the generic fold routines. */
3260 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3262 if (gimple_fold_builtin (gsi))
3263 changed = true;
3265 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3267 changed |= targetm.gimple_fold_builtin (gsi);
3269 else if (gimple_call_internal_p (stmt))
3271 enum tree_code subcode = ERROR_MARK;
3272 tree result = NULL_TREE;
3273 bool cplx_result = false;
3274 tree overflow = NULL_TREE;
3275 switch (gimple_call_internal_fn (stmt))
3277 case IFN_BUILTIN_EXPECT:
3278 result = fold_builtin_expect (gimple_location (stmt),
3279 gimple_call_arg (stmt, 0),
3280 gimple_call_arg (stmt, 1),
3281 gimple_call_arg (stmt, 2));
3282 break;
3283 case IFN_UBSAN_OBJECT_SIZE:
3284 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3285 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3286 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3287 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3288 gimple_call_arg (stmt, 2))))
3290 gsi_replace (gsi, gimple_build_nop (), false);
3291 unlink_stmt_vdef (stmt);
3292 release_defs (stmt);
3293 return true;
3295 break;
3296 case IFN_GOACC_DIM_SIZE:
3297 case IFN_GOACC_DIM_POS:
3298 result = fold_internal_goacc_dim (stmt);
3299 break;
3300 case IFN_UBSAN_CHECK_ADD:
3301 subcode = PLUS_EXPR;
3302 break;
3303 case IFN_UBSAN_CHECK_SUB:
3304 subcode = MINUS_EXPR;
3305 break;
3306 case IFN_UBSAN_CHECK_MUL:
3307 subcode = MULT_EXPR;
3308 break;
3309 case IFN_ADD_OVERFLOW:
3310 subcode = PLUS_EXPR;
3311 cplx_result = true;
3312 break;
3313 case IFN_SUB_OVERFLOW:
3314 subcode = MINUS_EXPR;
3315 cplx_result = true;
3316 break;
3317 case IFN_MUL_OVERFLOW:
3318 subcode = MULT_EXPR;
3319 cplx_result = true;
3320 break;
3321 default:
3322 break;
3324 if (subcode != ERROR_MARK)
3326 tree arg0 = gimple_call_arg (stmt, 0);
3327 tree arg1 = gimple_call_arg (stmt, 1);
3328 tree type = TREE_TYPE (arg0);
3329 if (cplx_result)
3331 tree lhs = gimple_call_lhs (stmt);
3332 if (lhs == NULL_TREE)
3333 type = NULL_TREE;
3334 else
3335 type = TREE_TYPE (TREE_TYPE (lhs));
3337 if (type == NULL_TREE)
3339 /* x = y + 0; x = y - 0; x = y * 0; */
3340 else if (integer_zerop (arg1))
3341 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3342 /* x = 0 + y; x = 0 * y; */
3343 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3344 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3345 /* x = y - y; */
3346 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3347 result = integer_zero_node;
3348 /* x = y * 1; x = 1 * y; */
3349 else if (subcode == MULT_EXPR && integer_onep (arg1))
3350 result = arg0;
3351 else if (subcode == MULT_EXPR && integer_onep (arg0))
3352 result = arg1;
3353 else if (TREE_CODE (arg0) == INTEGER_CST
3354 && TREE_CODE (arg1) == INTEGER_CST)
3356 if (cplx_result)
3357 result = int_const_binop (subcode, fold_convert (type, arg0),
3358 fold_convert (type, arg1));
3359 else
3360 result = int_const_binop (subcode, arg0, arg1);
3361 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3363 if (cplx_result)
3364 overflow = build_one_cst (type);
3365 else
3366 result = NULL_TREE;
3369 if (result)
3371 if (result == integer_zero_node)
3372 result = build_zero_cst (type);
3373 else if (cplx_result && TREE_TYPE (result) != type)
3375 if (TREE_CODE (result) == INTEGER_CST)
3377 if (arith_overflowed_p (PLUS_EXPR, type, result,
3378 integer_zero_node))
3379 overflow = build_one_cst (type);
3381 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3382 && TYPE_UNSIGNED (type))
3383 || (TYPE_PRECISION (type)
3384 < (TYPE_PRECISION (TREE_TYPE (result))
3385 + (TYPE_UNSIGNED (TREE_TYPE (result))
3386 && !TYPE_UNSIGNED (type)))))
3387 result = NULL_TREE;
3388 if (result)
3389 result = fold_convert (type, result);
3394 if (result)
3396 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3397 result = drop_tree_overflow (result);
3398 if (cplx_result)
3400 if (overflow == NULL_TREE)
3401 overflow = build_zero_cst (TREE_TYPE (result));
3402 tree ctype = build_complex_type (TREE_TYPE (result));
3403 if (TREE_CODE (result) == INTEGER_CST
3404 && TREE_CODE (overflow) == INTEGER_CST)
3405 result = build_complex (ctype, result, overflow);
3406 else
3407 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3408 ctype, result, overflow);
3410 if (!update_call_from_tree (gsi, result))
3411 gimplify_and_update_call_from_tree (gsi, result);
3412 changed = true;
3416 return changed;
3420 /* Return true whether NAME has a use on STMT. */
3422 static bool
3423 has_use_on_stmt (tree name, gimple *stmt)
3425 imm_use_iterator iter;
3426 use_operand_p use_p;
3427 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3428 if (USE_STMT (use_p) == stmt)
3429 return true;
3430 return false;
3433 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3434 gimple_simplify.
3436 Replaces *GSI with the simplification result in RCODE and OPS
3437 and the associated statements in *SEQ. Does the replacement
3438 according to INPLACE and returns true if the operation succeeded. */
3440 static bool
3441 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3442 code_helper rcode, tree *ops,
3443 gimple_seq *seq, bool inplace)
3445 gimple *stmt = gsi_stmt (*gsi);
3447 /* Play safe and do not allow abnormals to be mentioned in
3448 newly created statements. See also maybe_push_res_to_seq.
3449 As an exception allow such uses if there was a use of the
3450 same SSA name on the old stmt. */
3451 if ((TREE_CODE (ops[0]) == SSA_NAME
3452 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3453 && !has_use_on_stmt (ops[0], stmt))
3454 || (ops[1]
3455 && TREE_CODE (ops[1]) == SSA_NAME
3456 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3457 && !has_use_on_stmt (ops[1], stmt))
3458 || (ops[2]
3459 && TREE_CODE (ops[2]) == SSA_NAME
3460 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3461 && !has_use_on_stmt (ops[2], stmt))
3462 || (COMPARISON_CLASS_P (ops[0])
3463 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
3464 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
3465 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
3466 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
3467 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
3468 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
3469 return false;
3471 /* Don't insert new statements when INPLACE is true, even if we could
3472 reuse STMT for the final statement. */
3473 if (inplace && !gimple_seq_empty_p (*seq))
3474 return false;
3476 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3478 gcc_assert (rcode.is_tree_code ());
3479 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3480 /* GIMPLE_CONDs condition may not throw. */
3481 && (!flag_exceptions
3482 || !cfun->can_throw_non_call_exceptions
3483 || !operation_could_trap_p (rcode,
3484 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3485 false, NULL_TREE)))
3486 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3487 else if (rcode == SSA_NAME)
3488 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3489 build_zero_cst (TREE_TYPE (ops[0])));
3490 else if (rcode == INTEGER_CST)
3492 if (integer_zerop (ops[0]))
3493 gimple_cond_make_false (cond_stmt);
3494 else
3495 gimple_cond_make_true (cond_stmt);
3497 else if (!inplace)
3499 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3500 ops, seq);
3501 if (!res)
3502 return false;
3503 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3504 build_zero_cst (TREE_TYPE (res)));
3506 else
3507 return false;
3508 if (dump_file && (dump_flags & TDF_DETAILS))
3510 fprintf (dump_file, "gimple_simplified to ");
3511 if (!gimple_seq_empty_p (*seq))
3512 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3513 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3514 0, TDF_SLIM);
3516 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3517 return true;
3519 else if (is_gimple_assign (stmt)
3520 && rcode.is_tree_code ())
3522 if (!inplace
3523 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3525 maybe_build_generic_op (rcode,
3526 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
3527 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3528 if (dump_file && (dump_flags & TDF_DETAILS))
3530 fprintf (dump_file, "gimple_simplified to ");
3531 if (!gimple_seq_empty_p (*seq))
3532 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3533 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3534 0, TDF_SLIM);
3536 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3537 return true;
3540 else if (rcode.is_fn_code ()
3541 && gimple_call_combined_fn (stmt) == rcode)
3543 unsigned i;
3544 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3546 gcc_assert (ops[i] != NULL_TREE);
3547 gimple_call_set_arg (stmt, i, ops[i]);
3549 if (i < 3)
3550 gcc_assert (ops[i] == NULL_TREE);
3551 if (dump_file && (dump_flags & TDF_DETAILS))
3553 fprintf (dump_file, "gimple_simplified to ");
3554 if (!gimple_seq_empty_p (*seq))
3555 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3556 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3558 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3559 return true;
3561 else if (!inplace)
3563 if (gimple_has_lhs (stmt))
3565 tree lhs = gimple_get_lhs (stmt);
3566 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3567 ops, seq, lhs))
3568 return false;
3569 if (dump_file && (dump_flags & TDF_DETAILS))
3571 fprintf (dump_file, "gimple_simplified to ");
3572 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3574 gsi_replace_with_seq_vops (gsi, *seq);
3575 return true;
3577 else
3578 gcc_unreachable ();
3581 return false;
3584 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3586 static bool
3587 maybe_canonicalize_mem_ref_addr (tree *t)
3589 bool res = false;
3591 if (TREE_CODE (*t) == ADDR_EXPR)
3592 t = &TREE_OPERAND (*t, 0);
3594 /* The C and C++ frontends use an ARRAY_REF for indexing with their
3595 generic vector extension. The actual vector referenced is
3596 view-converted to an array type for this purpose. If the index
3597 is constant the canonical representation in the middle-end is a
3598 BIT_FIELD_REF so re-write the former to the latter here. */
3599 if (TREE_CODE (*t) == ARRAY_REF
3600 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
3601 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
3602 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
3604 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
3605 if (VECTOR_TYPE_P (vtype))
3607 tree low = array_ref_low_bound (*t);
3608 if (TREE_CODE (low) == INTEGER_CST)
3610 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
3612 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
3613 wi::to_widest (low));
3614 idx = wi::mul (idx, wi::to_widest
3615 (TYPE_SIZE (TREE_TYPE (*t))));
3616 widest_int ext
3617 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
3618 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
3620 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
3621 TREE_TYPE (*t),
3622 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
3623 TYPE_SIZE (TREE_TYPE (*t)),
3624 wide_int_to_tree (sizetype, idx));
3625 res = true;
3632 while (handled_component_p (*t))
3633 t = &TREE_OPERAND (*t, 0);
3635 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3636 of invariant addresses into a SSA name MEM_REF address. */
3637 if (TREE_CODE (*t) == MEM_REF
3638 || TREE_CODE (*t) == TARGET_MEM_REF)
3640 tree addr = TREE_OPERAND (*t, 0);
3641 if (TREE_CODE (addr) == ADDR_EXPR
3642 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3643 || handled_component_p (TREE_OPERAND (addr, 0))))
3645 tree base;
3646 HOST_WIDE_INT coffset;
3647 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3648 &coffset);
3649 if (!base)
3650 gcc_unreachable ();
3652 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3653 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3654 TREE_OPERAND (*t, 1),
3655 size_int (coffset));
3656 res = true;
3658 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3659 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3662 /* Canonicalize back MEM_REFs to plain reference trees if the object
3663 accessed is a decl that has the same access semantics as the MEM_REF. */
3664 if (TREE_CODE (*t) == MEM_REF
3665 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3666 && integer_zerop (TREE_OPERAND (*t, 1))
3667 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3669 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3670 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3671 if (/* Same volatile qualification. */
3672 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3673 /* Same TBAA behavior with -fstrict-aliasing. */
3674 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3675 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3676 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3677 /* Same alignment. */
3678 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3679 /* We have to look out here to not drop a required conversion
3680 from the rhs to the lhs if *t appears on the lhs or vice-versa
3681 if it appears on the rhs. Thus require strict type
3682 compatibility. */
3683 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3685 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3686 res = true;
3690 /* Canonicalize TARGET_MEM_REF in particular with respect to
3691 the indexes becoming constant. */
3692 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3694 tree tem = maybe_fold_tmr (*t);
3695 if (tem)
3697 *t = tem;
3698 res = true;
3702 return res;
3705 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3706 distinguishes both cases. */
3708 static bool
3709 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3711 bool changed = false;
3712 gimple *stmt = gsi_stmt (*gsi);
3713 bool nowarning = gimple_no_warning_p (stmt);
3714 unsigned i;
3715 fold_defer_overflow_warnings ();
3717 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3718 after propagation.
3719 ??? This shouldn't be done in generic folding but in the
3720 propagation helpers which also know whether an address was
3721 propagated.
3722 Also canonicalize operand order. */
3723 switch (gimple_code (stmt))
3725 case GIMPLE_ASSIGN:
3726 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3728 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3729 if ((REFERENCE_CLASS_P (*rhs)
3730 || TREE_CODE (*rhs) == ADDR_EXPR)
3731 && maybe_canonicalize_mem_ref_addr (rhs))
3732 changed = true;
3733 tree *lhs = gimple_assign_lhs_ptr (stmt);
3734 if (REFERENCE_CLASS_P (*lhs)
3735 && maybe_canonicalize_mem_ref_addr (lhs))
3736 changed = true;
3738 else
3740 /* Canonicalize operand order. */
3741 enum tree_code code = gimple_assign_rhs_code (stmt);
3742 if (TREE_CODE_CLASS (code) == tcc_comparison
3743 || commutative_tree_code (code)
3744 || commutative_ternary_tree_code (code))
3746 tree rhs1 = gimple_assign_rhs1 (stmt);
3747 tree rhs2 = gimple_assign_rhs2 (stmt);
3748 if (tree_swap_operands_p (rhs1, rhs2, false))
3750 gimple_assign_set_rhs1 (stmt, rhs2);
3751 gimple_assign_set_rhs2 (stmt, rhs1);
3752 if (TREE_CODE_CLASS (code) == tcc_comparison)
3753 gimple_assign_set_rhs_code (stmt,
3754 swap_tree_comparison (code));
3755 changed = true;
3759 break;
3760 case GIMPLE_CALL:
3762 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3764 tree *arg = gimple_call_arg_ptr (stmt, i);
3765 if (REFERENCE_CLASS_P (*arg)
3766 && maybe_canonicalize_mem_ref_addr (arg))
3767 changed = true;
3769 tree *lhs = gimple_call_lhs_ptr (stmt);
3770 if (*lhs
3771 && REFERENCE_CLASS_P (*lhs)
3772 && maybe_canonicalize_mem_ref_addr (lhs))
3773 changed = true;
3774 break;
3776 case GIMPLE_ASM:
3778 gasm *asm_stmt = as_a <gasm *> (stmt);
3779 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3781 tree link = gimple_asm_output_op (asm_stmt, i);
3782 tree op = TREE_VALUE (link);
3783 if (REFERENCE_CLASS_P (op)
3784 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3785 changed = true;
3787 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3789 tree link = gimple_asm_input_op (asm_stmt, i);
3790 tree op = TREE_VALUE (link);
3791 if ((REFERENCE_CLASS_P (op)
3792 || TREE_CODE (op) == ADDR_EXPR)
3793 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3794 changed = true;
3797 break;
3798 case GIMPLE_DEBUG:
3799 if (gimple_debug_bind_p (stmt))
3801 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3802 if (*val
3803 && (REFERENCE_CLASS_P (*val)
3804 || TREE_CODE (*val) == ADDR_EXPR)
3805 && maybe_canonicalize_mem_ref_addr (val))
3806 changed = true;
3808 break;
3809 case GIMPLE_COND:
3811 /* Canonicalize operand order. */
3812 tree lhs = gimple_cond_lhs (stmt);
3813 tree rhs = gimple_cond_rhs (stmt);
3814 if (tree_swap_operands_p (lhs, rhs, false))
3816 gcond *gc = as_a <gcond *> (stmt);
3817 gimple_cond_set_lhs (gc, rhs);
3818 gimple_cond_set_rhs (gc, lhs);
3819 gimple_cond_set_code (gc,
3820 swap_tree_comparison (gimple_cond_code (gc)));
3821 changed = true;
3824 default:;
3827 /* Dispatch to pattern-based folding. */
3828 if (!inplace
3829 || is_gimple_assign (stmt)
3830 || gimple_code (stmt) == GIMPLE_COND)
3832 gimple_seq seq = NULL;
3833 code_helper rcode;
3834 tree ops[3] = {};
3835 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3836 valueize, valueize))
3838 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3839 changed = true;
3840 else
3841 gimple_seq_discard (seq);
3845 stmt = gsi_stmt (*gsi);
3847 /* Fold the main computation performed by the statement. */
3848 switch (gimple_code (stmt))
3850 case GIMPLE_ASSIGN:
3852 /* Try to canonicalize for boolean-typed X the comparisons
3853 X == 0, X == 1, X != 0, and X != 1. */
3854 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3855 || gimple_assign_rhs_code (stmt) == NE_EXPR)
3857 tree lhs = gimple_assign_lhs (stmt);
3858 tree op1 = gimple_assign_rhs1 (stmt);
3859 tree op2 = gimple_assign_rhs2 (stmt);
3860 tree type = TREE_TYPE (op1);
3862 /* Check whether the comparison operands are of the same boolean
3863 type as the result type is.
3864 Check that second operand is an integer-constant with value
3865 one or zero. */
3866 if (TREE_CODE (op2) == INTEGER_CST
3867 && (integer_zerop (op2) || integer_onep (op2))
3868 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3870 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3871 bool is_logical_not = false;
3873 /* X == 0 and X != 1 is a logical-not.of X
3874 X == 1 and X != 0 is X */
3875 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3876 || (cmp_code == NE_EXPR && integer_onep (op2)))
3877 is_logical_not = true;
3879 if (is_logical_not == false)
3880 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3881 /* Only for one-bit precision typed X the transformation
3882 !X -> ~X is valied. */
3883 else if (TYPE_PRECISION (type) == 1)
3884 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3885 /* Otherwise we use !X -> X ^ 1. */
3886 else
3887 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3888 build_int_cst (type, 1));
3889 changed = true;
3890 break;
3894 unsigned old_num_ops = gimple_num_ops (stmt);
3895 tree lhs = gimple_assign_lhs (stmt);
3896 tree new_rhs = fold_gimple_assign (gsi);
3897 if (new_rhs
3898 && !useless_type_conversion_p (TREE_TYPE (lhs),
3899 TREE_TYPE (new_rhs)))
3900 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3901 if (new_rhs
3902 && (!inplace
3903 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3905 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3906 changed = true;
3908 break;
3911 case GIMPLE_CALL:
3912 changed |= gimple_fold_call (gsi, inplace);
3913 break;
3915 case GIMPLE_ASM:
3916 /* Fold *& in asm operands. */
3918 gasm *asm_stmt = as_a <gasm *> (stmt);
3919 size_t noutputs;
3920 const char **oconstraints;
3921 const char *constraint;
3922 bool allows_mem, allows_reg;
3924 noutputs = gimple_asm_noutputs (asm_stmt);
3925 oconstraints = XALLOCAVEC (const char *, noutputs);
3927 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3929 tree link = gimple_asm_output_op (asm_stmt, i);
3930 tree op = TREE_VALUE (link);
3931 oconstraints[i]
3932 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3933 if (REFERENCE_CLASS_P (op)
3934 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3936 TREE_VALUE (link) = op;
3937 changed = true;
3940 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3942 tree link = gimple_asm_input_op (asm_stmt, i);
3943 tree op = TREE_VALUE (link);
3944 constraint
3945 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3946 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3947 oconstraints, &allows_mem, &allows_reg);
3948 if (REFERENCE_CLASS_P (op)
3949 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3950 != NULL_TREE)
3952 TREE_VALUE (link) = op;
3953 changed = true;
3957 break;
3959 case GIMPLE_DEBUG:
3960 if (gimple_debug_bind_p (stmt))
3962 tree val = gimple_debug_bind_get_value (stmt);
3963 if (val
3964 && REFERENCE_CLASS_P (val))
3966 tree tem = maybe_fold_reference (val, false);
3967 if (tem)
3969 gimple_debug_bind_set_value (stmt, tem);
3970 changed = true;
3973 else if (val
3974 && TREE_CODE (val) == ADDR_EXPR)
3976 tree ref = TREE_OPERAND (val, 0);
3977 tree tem = maybe_fold_reference (ref, false);
3978 if (tem)
3980 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3981 gimple_debug_bind_set_value (stmt, tem);
3982 changed = true;
3986 break;
3988 default:;
3991 stmt = gsi_stmt (*gsi);
3993 /* Fold *& on the lhs. */
3994 if (gimple_has_lhs (stmt))
3996 tree lhs = gimple_get_lhs (stmt);
3997 if (lhs && REFERENCE_CLASS_P (lhs))
3999 tree new_lhs = maybe_fold_reference (lhs, true);
4000 if (new_lhs)
4002 gimple_set_lhs (stmt, new_lhs);
4003 changed = true;
4008 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4009 return changed;
4012 /* Valueziation callback that ends up not following SSA edges. */
4014 tree
4015 no_follow_ssa_edges (tree)
4017 return NULL_TREE;
4020 /* Valueization callback that ends up following single-use SSA edges only. */
4022 tree
4023 follow_single_use_edges (tree val)
4025 if (TREE_CODE (val) == SSA_NAME
4026 && !has_single_use (val))
4027 return NULL_TREE;
4028 return val;
4031 /* Fold the statement pointed to by GSI. In some cases, this function may
4032 replace the whole statement with a new one. Returns true iff folding
4033 makes any changes.
4034 The statement pointed to by GSI should be in valid gimple form but may
4035 be in unfolded state as resulting from for example constant propagation
4036 which can produce *&x = 0. */
4038 bool
4039 fold_stmt (gimple_stmt_iterator *gsi)
4041 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4044 bool
4045 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4047 return fold_stmt_1 (gsi, false, valueize);
4050 /* Perform the minimal folding on statement *GSI. Only operations like
4051 *&x created by constant propagation are handled. The statement cannot
4052 be replaced with a new one. Return true if the statement was
4053 changed, false otherwise.
4054 The statement *GSI should be in valid gimple form but may
4055 be in unfolded state as resulting from for example constant propagation
4056 which can produce *&x = 0. */
4058 bool
4059 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4061 gimple *stmt = gsi_stmt (*gsi);
4062 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4063 gcc_assert (gsi_stmt (*gsi) == stmt);
4064 return changed;
4067 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4068 if EXPR is null or we don't know how.
4069 If non-null, the result always has boolean type. */
4071 static tree
4072 canonicalize_bool (tree expr, bool invert)
4074 if (!expr)
4075 return NULL_TREE;
4076 else if (invert)
4078 if (integer_nonzerop (expr))
4079 return boolean_false_node;
4080 else if (integer_zerop (expr))
4081 return boolean_true_node;
4082 else if (TREE_CODE (expr) == SSA_NAME)
4083 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4084 build_int_cst (TREE_TYPE (expr), 0));
4085 else if (COMPARISON_CLASS_P (expr))
4086 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4087 boolean_type_node,
4088 TREE_OPERAND (expr, 0),
4089 TREE_OPERAND (expr, 1));
4090 else
4091 return NULL_TREE;
4093 else
4095 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4096 return expr;
4097 if (integer_nonzerop (expr))
4098 return boolean_true_node;
4099 else if (integer_zerop (expr))
4100 return boolean_false_node;
4101 else if (TREE_CODE (expr) == SSA_NAME)
4102 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4103 build_int_cst (TREE_TYPE (expr), 0));
4104 else if (COMPARISON_CLASS_P (expr))
4105 return fold_build2 (TREE_CODE (expr),
4106 boolean_type_node,
4107 TREE_OPERAND (expr, 0),
4108 TREE_OPERAND (expr, 1));
4109 else
4110 return NULL_TREE;
4114 /* Check to see if a boolean expression EXPR is logically equivalent to the
4115 comparison (OP1 CODE OP2). Check for various identities involving
4116 SSA_NAMEs. */
4118 static bool
4119 same_bool_comparison_p (const_tree expr, enum tree_code code,
4120 const_tree op1, const_tree op2)
4122 gimple *s;
4124 /* The obvious case. */
4125 if (TREE_CODE (expr) == code
4126 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4127 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4128 return true;
4130 /* Check for comparing (name, name != 0) and the case where expr
4131 is an SSA_NAME with a definition matching the comparison. */
4132 if (TREE_CODE (expr) == SSA_NAME
4133 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4135 if (operand_equal_p (expr, op1, 0))
4136 return ((code == NE_EXPR && integer_zerop (op2))
4137 || (code == EQ_EXPR && integer_nonzerop (op2)));
4138 s = SSA_NAME_DEF_STMT (expr);
4139 if (is_gimple_assign (s)
4140 && gimple_assign_rhs_code (s) == code
4141 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4142 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4143 return true;
4146 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4147 of name is a comparison, recurse. */
4148 if (TREE_CODE (op1) == SSA_NAME
4149 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4151 s = SSA_NAME_DEF_STMT (op1);
4152 if (is_gimple_assign (s)
4153 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
4155 enum tree_code c = gimple_assign_rhs_code (s);
4156 if ((c == NE_EXPR && integer_zerop (op2))
4157 || (c == EQ_EXPR && integer_nonzerop (op2)))
4158 return same_bool_comparison_p (expr, c,
4159 gimple_assign_rhs1 (s),
4160 gimple_assign_rhs2 (s));
4161 if ((c == EQ_EXPR && integer_zerop (op2))
4162 || (c == NE_EXPR && integer_nonzerop (op2)))
4163 return same_bool_comparison_p (expr,
4164 invert_tree_comparison (c, false),
4165 gimple_assign_rhs1 (s),
4166 gimple_assign_rhs2 (s));
4169 return false;
4172 /* Check to see if two boolean expressions OP1 and OP2 are logically
4173 equivalent. */
4175 static bool
4176 same_bool_result_p (const_tree op1, const_tree op2)
4178 /* Simple cases first. */
4179 if (operand_equal_p (op1, op2, 0))
4180 return true;
4182 /* Check the cases where at least one of the operands is a comparison.
4183 These are a bit smarter than operand_equal_p in that they apply some
4184 identifies on SSA_NAMEs. */
4185 if (COMPARISON_CLASS_P (op2)
4186 && same_bool_comparison_p (op1, TREE_CODE (op2),
4187 TREE_OPERAND (op2, 0),
4188 TREE_OPERAND (op2, 1)))
4189 return true;
4190 if (COMPARISON_CLASS_P (op1)
4191 && same_bool_comparison_p (op2, TREE_CODE (op1),
4192 TREE_OPERAND (op1, 0),
4193 TREE_OPERAND (op1, 1)))
4194 return true;
4196 /* Default case. */
4197 return false;
4200 /* Forward declarations for some mutually recursive functions. */
4202 static tree
4203 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4204 enum tree_code code2, tree op2a, tree op2b);
4205 static tree
4206 and_var_with_comparison (tree var, bool invert,
4207 enum tree_code code2, tree op2a, tree op2b);
4208 static tree
4209 and_var_with_comparison_1 (gimple *stmt,
4210 enum tree_code code2, tree op2a, tree op2b);
4211 static tree
4212 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4213 enum tree_code code2, tree op2a, tree op2b);
4214 static tree
4215 or_var_with_comparison (tree var, bool invert,
4216 enum tree_code code2, tree op2a, tree op2b);
4217 static tree
4218 or_var_with_comparison_1 (gimple *stmt,
4219 enum tree_code code2, tree op2a, tree op2b);
4221 /* Helper function for and_comparisons_1: try to simplify the AND of the
4222 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4223 If INVERT is true, invert the value of the VAR before doing the AND.
4224 Return NULL_EXPR if we can't simplify this to a single expression. */
4226 static tree
4227 and_var_with_comparison (tree var, bool invert,
4228 enum tree_code code2, tree op2a, tree op2b)
4230 tree t;
4231 gimple *stmt = SSA_NAME_DEF_STMT (var);
4233 /* We can only deal with variables whose definitions are assignments. */
4234 if (!is_gimple_assign (stmt))
4235 return NULL_TREE;
4237 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4238 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4239 Then we only have to consider the simpler non-inverted cases. */
4240 if (invert)
4241 t = or_var_with_comparison_1 (stmt,
4242 invert_tree_comparison (code2, false),
4243 op2a, op2b);
4244 else
4245 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4246 return canonicalize_bool (t, invert);
4249 /* Try to simplify the AND of the ssa variable defined by the assignment
4250 STMT with the comparison specified by (OP2A CODE2 OP2B).
4251 Return NULL_EXPR if we can't simplify this to a single expression. */
4253 static tree
4254 and_var_with_comparison_1 (gimple *stmt,
4255 enum tree_code code2, tree op2a, tree op2b)
4257 tree var = gimple_assign_lhs (stmt);
4258 tree true_test_var = NULL_TREE;
4259 tree false_test_var = NULL_TREE;
4260 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4262 /* Check for identities like (var AND (var == 0)) => false. */
4263 if (TREE_CODE (op2a) == SSA_NAME
4264 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4266 if ((code2 == NE_EXPR && integer_zerop (op2b))
4267 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4269 true_test_var = op2a;
4270 if (var == true_test_var)
4271 return var;
4273 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4274 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4276 false_test_var = op2a;
4277 if (var == false_test_var)
4278 return boolean_false_node;
4282 /* If the definition is a comparison, recurse on it. */
4283 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4285 tree t = and_comparisons_1 (innercode,
4286 gimple_assign_rhs1 (stmt),
4287 gimple_assign_rhs2 (stmt),
4288 code2,
4289 op2a,
4290 op2b);
4291 if (t)
4292 return t;
4295 /* If the definition is an AND or OR expression, we may be able to
4296 simplify by reassociating. */
4297 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4298 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4300 tree inner1 = gimple_assign_rhs1 (stmt);
4301 tree inner2 = gimple_assign_rhs2 (stmt);
4302 gimple *s;
4303 tree t;
4304 tree partial = NULL_TREE;
4305 bool is_and = (innercode == BIT_AND_EXPR);
4307 /* Check for boolean identities that don't require recursive examination
4308 of inner1/inner2:
4309 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4310 inner1 AND (inner1 OR inner2) => inner1
4311 !inner1 AND (inner1 AND inner2) => false
4312 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4313 Likewise for similar cases involving inner2. */
4314 if (inner1 == true_test_var)
4315 return (is_and ? var : inner1);
4316 else if (inner2 == true_test_var)
4317 return (is_and ? var : inner2);
4318 else if (inner1 == false_test_var)
4319 return (is_and
4320 ? boolean_false_node
4321 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4322 else if (inner2 == false_test_var)
4323 return (is_and
4324 ? boolean_false_node
4325 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4327 /* Next, redistribute/reassociate the AND across the inner tests.
4328 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4329 if (TREE_CODE (inner1) == SSA_NAME
4330 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4331 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4332 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4333 gimple_assign_rhs1 (s),
4334 gimple_assign_rhs2 (s),
4335 code2, op2a, op2b)))
4337 /* Handle the AND case, where we are reassociating:
4338 (inner1 AND inner2) AND (op2a code2 op2b)
4339 => (t AND inner2)
4340 If the partial result t is a constant, we win. Otherwise
4341 continue on to try reassociating with the other inner test. */
4342 if (is_and)
4344 if (integer_onep (t))
4345 return inner2;
4346 else if (integer_zerop (t))
4347 return boolean_false_node;
4350 /* Handle the OR case, where we are redistributing:
4351 (inner1 OR inner2) AND (op2a code2 op2b)
4352 => (t OR (inner2 AND (op2a code2 op2b))) */
4353 else if (integer_onep (t))
4354 return boolean_true_node;
4356 /* Save partial result for later. */
4357 partial = t;
4360 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4361 if (TREE_CODE (inner2) == SSA_NAME
4362 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4363 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4364 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4365 gimple_assign_rhs1 (s),
4366 gimple_assign_rhs2 (s),
4367 code2, op2a, op2b)))
4369 /* Handle the AND case, where we are reassociating:
4370 (inner1 AND inner2) AND (op2a code2 op2b)
4371 => (inner1 AND t) */
4372 if (is_and)
4374 if (integer_onep (t))
4375 return inner1;
4376 else if (integer_zerop (t))
4377 return boolean_false_node;
4378 /* If both are the same, we can apply the identity
4379 (x AND x) == x. */
4380 else if (partial && same_bool_result_p (t, partial))
4381 return t;
4384 /* Handle the OR case. where we are redistributing:
4385 (inner1 OR inner2) AND (op2a code2 op2b)
4386 => (t OR (inner1 AND (op2a code2 op2b)))
4387 => (t OR partial) */
4388 else
4390 if (integer_onep (t))
4391 return boolean_true_node;
4392 else if (partial)
4394 /* We already got a simplification for the other
4395 operand to the redistributed OR expression. The
4396 interesting case is when at least one is false.
4397 Or, if both are the same, we can apply the identity
4398 (x OR x) == x. */
4399 if (integer_zerop (partial))
4400 return t;
4401 else if (integer_zerop (t))
4402 return partial;
4403 else if (same_bool_result_p (t, partial))
4404 return t;
4409 return NULL_TREE;
4412 /* Try to simplify the AND of two comparisons defined by
4413 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4414 If this can be done without constructing an intermediate value,
4415 return the resulting tree; otherwise NULL_TREE is returned.
4416 This function is deliberately asymmetric as it recurses on SSA_DEFs
4417 in the first comparison but not the second. */
4419 static tree
4420 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4421 enum tree_code code2, tree op2a, tree op2b)
4423 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4425 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4426 if (operand_equal_p (op1a, op2a, 0)
4427 && operand_equal_p (op1b, op2b, 0))
4429 /* Result will be either NULL_TREE, or a combined comparison. */
4430 tree t = combine_comparisons (UNKNOWN_LOCATION,
4431 TRUTH_ANDIF_EXPR, code1, code2,
4432 truth_type, op1a, op1b);
4433 if (t)
4434 return t;
4437 /* Likewise the swapped case of the above. */
4438 if (operand_equal_p (op1a, op2b, 0)
4439 && operand_equal_p (op1b, op2a, 0))
4441 /* Result will be either NULL_TREE, or a combined comparison. */
4442 tree t = combine_comparisons (UNKNOWN_LOCATION,
4443 TRUTH_ANDIF_EXPR, code1,
4444 swap_tree_comparison (code2),
4445 truth_type, op1a, op1b);
4446 if (t)
4447 return t;
4450 /* If both comparisons are of the same value against constants, we might
4451 be able to merge them. */
4452 if (operand_equal_p (op1a, op2a, 0)
4453 && TREE_CODE (op1b) == INTEGER_CST
4454 && TREE_CODE (op2b) == INTEGER_CST)
4456 int cmp = tree_int_cst_compare (op1b, op2b);
4458 /* If we have (op1a == op1b), we should either be able to
4459 return that or FALSE, depending on whether the constant op1b
4460 also satisfies the other comparison against op2b. */
4461 if (code1 == EQ_EXPR)
4463 bool done = true;
4464 bool val;
4465 switch (code2)
4467 case EQ_EXPR: val = (cmp == 0); break;
4468 case NE_EXPR: val = (cmp != 0); break;
4469 case LT_EXPR: val = (cmp < 0); break;
4470 case GT_EXPR: val = (cmp > 0); break;
4471 case LE_EXPR: val = (cmp <= 0); break;
4472 case GE_EXPR: val = (cmp >= 0); break;
4473 default: done = false;
4475 if (done)
4477 if (val)
4478 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4479 else
4480 return boolean_false_node;
4483 /* Likewise if the second comparison is an == comparison. */
4484 else if (code2 == EQ_EXPR)
4486 bool done = true;
4487 bool val;
4488 switch (code1)
4490 case EQ_EXPR: val = (cmp == 0); break;
4491 case NE_EXPR: val = (cmp != 0); break;
4492 case LT_EXPR: val = (cmp > 0); break;
4493 case GT_EXPR: val = (cmp < 0); break;
4494 case LE_EXPR: val = (cmp >= 0); break;
4495 case GE_EXPR: val = (cmp <= 0); break;
4496 default: done = false;
4498 if (done)
4500 if (val)
4501 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4502 else
4503 return boolean_false_node;
4507 /* Same business with inequality tests. */
4508 else if (code1 == NE_EXPR)
4510 bool val;
4511 switch (code2)
4513 case EQ_EXPR: val = (cmp != 0); break;
4514 case NE_EXPR: val = (cmp == 0); break;
4515 case LT_EXPR: val = (cmp >= 0); break;
4516 case GT_EXPR: val = (cmp <= 0); break;
4517 case LE_EXPR: val = (cmp > 0); break;
4518 case GE_EXPR: val = (cmp < 0); break;
4519 default:
4520 val = false;
4522 if (val)
4523 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4525 else if (code2 == NE_EXPR)
4527 bool val;
4528 switch (code1)
4530 case EQ_EXPR: val = (cmp == 0); break;
4531 case NE_EXPR: val = (cmp != 0); break;
4532 case LT_EXPR: val = (cmp <= 0); break;
4533 case GT_EXPR: val = (cmp >= 0); break;
4534 case LE_EXPR: val = (cmp < 0); break;
4535 case GE_EXPR: val = (cmp > 0); break;
4536 default:
4537 val = false;
4539 if (val)
4540 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4543 /* Chose the more restrictive of two < or <= comparisons. */
4544 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4545 && (code2 == LT_EXPR || code2 == LE_EXPR))
4547 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4548 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4549 else
4550 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4553 /* Likewise chose the more restrictive of two > or >= comparisons. */
4554 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4555 && (code2 == GT_EXPR || code2 == GE_EXPR))
4557 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4558 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4559 else
4560 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4563 /* Check for singleton ranges. */
4564 else if (cmp == 0
4565 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4566 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4567 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4569 /* Check for disjoint ranges. */
4570 else if (cmp <= 0
4571 && (code1 == LT_EXPR || code1 == LE_EXPR)
4572 && (code2 == GT_EXPR || code2 == GE_EXPR))
4573 return boolean_false_node;
4574 else if (cmp >= 0
4575 && (code1 == GT_EXPR || code1 == GE_EXPR)
4576 && (code2 == LT_EXPR || code2 == LE_EXPR))
4577 return boolean_false_node;
4580 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4581 NAME's definition is a truth value. See if there are any simplifications
4582 that can be done against the NAME's definition. */
4583 if (TREE_CODE (op1a) == SSA_NAME
4584 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4585 && (integer_zerop (op1b) || integer_onep (op1b)))
4587 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4588 || (code1 == NE_EXPR && integer_onep (op1b)));
4589 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4590 switch (gimple_code (stmt))
4592 case GIMPLE_ASSIGN:
4593 /* Try to simplify by copy-propagating the definition. */
4594 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4596 case GIMPLE_PHI:
4597 /* If every argument to the PHI produces the same result when
4598 ANDed with the second comparison, we win.
4599 Do not do this unless the type is bool since we need a bool
4600 result here anyway. */
4601 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4603 tree result = NULL_TREE;
4604 unsigned i;
4605 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4607 tree arg = gimple_phi_arg_def (stmt, i);
4609 /* If this PHI has itself as an argument, ignore it.
4610 If all the other args produce the same result,
4611 we're still OK. */
4612 if (arg == gimple_phi_result (stmt))
4613 continue;
4614 else if (TREE_CODE (arg) == INTEGER_CST)
4616 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4618 if (!result)
4619 result = boolean_false_node;
4620 else if (!integer_zerop (result))
4621 return NULL_TREE;
4623 else if (!result)
4624 result = fold_build2 (code2, boolean_type_node,
4625 op2a, op2b);
4626 else if (!same_bool_comparison_p (result,
4627 code2, op2a, op2b))
4628 return NULL_TREE;
4630 else if (TREE_CODE (arg) == SSA_NAME
4631 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4633 tree temp;
4634 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4635 /* In simple cases we can look through PHI nodes,
4636 but we have to be careful with loops.
4637 See PR49073. */
4638 if (! dom_info_available_p (CDI_DOMINATORS)
4639 || gimple_bb (def_stmt) == gimple_bb (stmt)
4640 || dominated_by_p (CDI_DOMINATORS,
4641 gimple_bb (def_stmt),
4642 gimple_bb (stmt)))
4643 return NULL_TREE;
4644 temp = and_var_with_comparison (arg, invert, code2,
4645 op2a, op2b);
4646 if (!temp)
4647 return NULL_TREE;
4648 else if (!result)
4649 result = temp;
4650 else if (!same_bool_result_p (result, temp))
4651 return NULL_TREE;
4653 else
4654 return NULL_TREE;
4656 return result;
4659 default:
4660 break;
4663 return NULL_TREE;
4666 /* Try to simplify the AND of two comparisons, specified by
4667 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4668 If this can be simplified to a single expression (without requiring
4669 introducing more SSA variables to hold intermediate values),
4670 return the resulting tree. Otherwise return NULL_TREE.
4671 If the result expression is non-null, it has boolean type. */
4673 tree
4674 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4675 enum tree_code code2, tree op2a, tree op2b)
4677 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4678 if (t)
4679 return t;
4680 else
4681 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4684 /* Helper function for or_comparisons_1: try to simplify the OR of the
4685 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4686 If INVERT is true, invert the value of VAR before doing the OR.
4687 Return NULL_EXPR if we can't simplify this to a single expression. */
4689 static tree
4690 or_var_with_comparison (tree var, bool invert,
4691 enum tree_code code2, tree op2a, tree op2b)
4693 tree t;
4694 gimple *stmt = SSA_NAME_DEF_STMT (var);
4696 /* We can only deal with variables whose definitions are assignments. */
4697 if (!is_gimple_assign (stmt))
4698 return NULL_TREE;
4700 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4701 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4702 Then we only have to consider the simpler non-inverted cases. */
4703 if (invert)
4704 t = and_var_with_comparison_1 (stmt,
4705 invert_tree_comparison (code2, false),
4706 op2a, op2b);
4707 else
4708 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4709 return canonicalize_bool (t, invert);
4712 /* Try to simplify the OR of the ssa variable defined by the assignment
4713 STMT with the comparison specified by (OP2A CODE2 OP2B).
4714 Return NULL_EXPR if we can't simplify this to a single expression. */
4716 static tree
4717 or_var_with_comparison_1 (gimple *stmt,
4718 enum tree_code code2, tree op2a, tree op2b)
4720 tree var = gimple_assign_lhs (stmt);
4721 tree true_test_var = NULL_TREE;
4722 tree false_test_var = NULL_TREE;
4723 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4725 /* Check for identities like (var OR (var != 0)) => true . */
4726 if (TREE_CODE (op2a) == SSA_NAME
4727 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4729 if ((code2 == NE_EXPR && integer_zerop (op2b))
4730 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4732 true_test_var = op2a;
4733 if (var == true_test_var)
4734 return var;
4736 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4737 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4739 false_test_var = op2a;
4740 if (var == false_test_var)
4741 return boolean_true_node;
4745 /* If the definition is a comparison, recurse on it. */
4746 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4748 tree t = or_comparisons_1 (innercode,
4749 gimple_assign_rhs1 (stmt),
4750 gimple_assign_rhs2 (stmt),
4751 code2,
4752 op2a,
4753 op2b);
4754 if (t)
4755 return t;
4758 /* If the definition is an AND or OR expression, we may be able to
4759 simplify by reassociating. */
4760 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4761 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4763 tree inner1 = gimple_assign_rhs1 (stmt);
4764 tree inner2 = gimple_assign_rhs2 (stmt);
4765 gimple *s;
4766 tree t;
4767 tree partial = NULL_TREE;
4768 bool is_or = (innercode == BIT_IOR_EXPR);
4770 /* Check for boolean identities that don't require recursive examination
4771 of inner1/inner2:
4772 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4773 inner1 OR (inner1 AND inner2) => inner1
4774 !inner1 OR (inner1 OR inner2) => true
4775 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4777 if (inner1 == true_test_var)
4778 return (is_or ? var : inner1);
4779 else if (inner2 == true_test_var)
4780 return (is_or ? var : inner2);
4781 else if (inner1 == false_test_var)
4782 return (is_or
4783 ? boolean_true_node
4784 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4785 else if (inner2 == false_test_var)
4786 return (is_or
4787 ? boolean_true_node
4788 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4790 /* Next, redistribute/reassociate the OR across the inner tests.
4791 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4792 if (TREE_CODE (inner1) == SSA_NAME
4793 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4794 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4795 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4796 gimple_assign_rhs1 (s),
4797 gimple_assign_rhs2 (s),
4798 code2, op2a, op2b)))
4800 /* Handle the OR case, where we are reassociating:
4801 (inner1 OR inner2) OR (op2a code2 op2b)
4802 => (t OR inner2)
4803 If the partial result t is a constant, we win. Otherwise
4804 continue on to try reassociating with the other inner test. */
4805 if (is_or)
4807 if (integer_onep (t))
4808 return boolean_true_node;
4809 else if (integer_zerop (t))
4810 return inner2;
4813 /* Handle the AND case, where we are redistributing:
4814 (inner1 AND inner2) OR (op2a code2 op2b)
4815 => (t AND (inner2 OR (op2a code op2b))) */
4816 else if (integer_zerop (t))
4817 return boolean_false_node;
4819 /* Save partial result for later. */
4820 partial = t;
4823 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4824 if (TREE_CODE (inner2) == SSA_NAME
4825 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4826 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4827 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4828 gimple_assign_rhs1 (s),
4829 gimple_assign_rhs2 (s),
4830 code2, op2a, op2b)))
4832 /* Handle the OR case, where we are reassociating:
4833 (inner1 OR inner2) OR (op2a code2 op2b)
4834 => (inner1 OR t)
4835 => (t OR partial) */
4836 if (is_or)
4838 if (integer_zerop (t))
4839 return inner1;
4840 else if (integer_onep (t))
4841 return boolean_true_node;
4842 /* If both are the same, we can apply the identity
4843 (x OR x) == x. */
4844 else if (partial && same_bool_result_p (t, partial))
4845 return t;
4848 /* Handle the AND case, where we are redistributing:
4849 (inner1 AND inner2) OR (op2a code2 op2b)
4850 => (t AND (inner1 OR (op2a code2 op2b)))
4851 => (t AND partial) */
4852 else
4854 if (integer_zerop (t))
4855 return boolean_false_node;
4856 else if (partial)
4858 /* We already got a simplification for the other
4859 operand to the redistributed AND expression. The
4860 interesting case is when at least one is true.
4861 Or, if both are the same, we can apply the identity
4862 (x AND x) == x. */
4863 if (integer_onep (partial))
4864 return t;
4865 else if (integer_onep (t))
4866 return partial;
4867 else if (same_bool_result_p (t, partial))
4868 return t;
4873 return NULL_TREE;
4876 /* Try to simplify the OR of two comparisons defined by
4877 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4878 If this can be done without constructing an intermediate value,
4879 return the resulting tree; otherwise NULL_TREE is returned.
4880 This function is deliberately asymmetric as it recurses on SSA_DEFs
4881 in the first comparison but not the second. */
4883 static tree
4884 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4885 enum tree_code code2, tree op2a, tree op2b)
4887 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4889 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4890 if (operand_equal_p (op1a, op2a, 0)
4891 && operand_equal_p (op1b, op2b, 0))
4893 /* Result will be either NULL_TREE, or a combined comparison. */
4894 tree t = combine_comparisons (UNKNOWN_LOCATION,
4895 TRUTH_ORIF_EXPR, code1, code2,
4896 truth_type, op1a, op1b);
4897 if (t)
4898 return t;
4901 /* Likewise the swapped case of the above. */
4902 if (operand_equal_p (op1a, op2b, 0)
4903 && operand_equal_p (op1b, op2a, 0))
4905 /* Result will be either NULL_TREE, or a combined comparison. */
4906 tree t = combine_comparisons (UNKNOWN_LOCATION,
4907 TRUTH_ORIF_EXPR, code1,
4908 swap_tree_comparison (code2),
4909 truth_type, op1a, op1b);
4910 if (t)
4911 return t;
4914 /* If both comparisons are of the same value against constants, we might
4915 be able to merge them. */
4916 if (operand_equal_p (op1a, op2a, 0)
4917 && TREE_CODE (op1b) == INTEGER_CST
4918 && TREE_CODE (op2b) == INTEGER_CST)
4920 int cmp = tree_int_cst_compare (op1b, op2b);
4922 /* If we have (op1a != op1b), we should either be able to
4923 return that or TRUE, depending on whether the constant op1b
4924 also satisfies the other comparison against op2b. */
4925 if (code1 == NE_EXPR)
4927 bool done = true;
4928 bool val;
4929 switch (code2)
4931 case EQ_EXPR: val = (cmp == 0); break;
4932 case NE_EXPR: val = (cmp != 0); break;
4933 case LT_EXPR: val = (cmp < 0); break;
4934 case GT_EXPR: val = (cmp > 0); break;
4935 case LE_EXPR: val = (cmp <= 0); break;
4936 case GE_EXPR: val = (cmp >= 0); break;
4937 default: done = false;
4939 if (done)
4941 if (val)
4942 return boolean_true_node;
4943 else
4944 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4947 /* Likewise if the second comparison is a != comparison. */
4948 else if (code2 == NE_EXPR)
4950 bool done = true;
4951 bool val;
4952 switch (code1)
4954 case EQ_EXPR: val = (cmp == 0); break;
4955 case NE_EXPR: val = (cmp != 0); break;
4956 case LT_EXPR: val = (cmp > 0); break;
4957 case GT_EXPR: val = (cmp < 0); break;
4958 case LE_EXPR: val = (cmp >= 0); break;
4959 case GE_EXPR: val = (cmp <= 0); break;
4960 default: done = false;
4962 if (done)
4964 if (val)
4965 return boolean_true_node;
4966 else
4967 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4971 /* See if an equality test is redundant with the other comparison. */
4972 else if (code1 == EQ_EXPR)
4974 bool val;
4975 switch (code2)
4977 case EQ_EXPR: val = (cmp == 0); break;
4978 case NE_EXPR: val = (cmp != 0); break;
4979 case LT_EXPR: val = (cmp < 0); break;
4980 case GT_EXPR: val = (cmp > 0); break;
4981 case LE_EXPR: val = (cmp <= 0); break;
4982 case GE_EXPR: val = (cmp >= 0); break;
4983 default:
4984 val = false;
4986 if (val)
4987 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4989 else if (code2 == EQ_EXPR)
4991 bool val;
4992 switch (code1)
4994 case EQ_EXPR: val = (cmp == 0); break;
4995 case NE_EXPR: val = (cmp != 0); break;
4996 case LT_EXPR: val = (cmp > 0); break;
4997 case GT_EXPR: val = (cmp < 0); break;
4998 case LE_EXPR: val = (cmp >= 0); break;
4999 case GE_EXPR: val = (cmp <= 0); break;
5000 default:
5001 val = false;
5003 if (val)
5004 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5007 /* Chose the less restrictive of two < or <= comparisons. */
5008 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5009 && (code2 == LT_EXPR || code2 == LE_EXPR))
5011 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5012 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5013 else
5014 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5017 /* Likewise chose the less restrictive of two > or >= comparisons. */
5018 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5019 && (code2 == GT_EXPR || code2 == GE_EXPR))
5021 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5022 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5023 else
5024 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5027 /* Check for singleton ranges. */
5028 else if (cmp == 0
5029 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5030 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5031 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5033 /* Check for less/greater pairs that don't restrict the range at all. */
5034 else if (cmp >= 0
5035 && (code1 == LT_EXPR || code1 == LE_EXPR)
5036 && (code2 == GT_EXPR || code2 == GE_EXPR))
5037 return boolean_true_node;
5038 else if (cmp <= 0
5039 && (code1 == GT_EXPR || code1 == GE_EXPR)
5040 && (code2 == LT_EXPR || code2 == LE_EXPR))
5041 return boolean_true_node;
5044 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5045 NAME's definition is a truth value. See if there are any simplifications
5046 that can be done against the NAME's definition. */
5047 if (TREE_CODE (op1a) == SSA_NAME
5048 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5049 && (integer_zerop (op1b) || integer_onep (op1b)))
5051 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5052 || (code1 == NE_EXPR && integer_onep (op1b)));
5053 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5054 switch (gimple_code (stmt))
5056 case GIMPLE_ASSIGN:
5057 /* Try to simplify by copy-propagating the definition. */
5058 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5060 case GIMPLE_PHI:
5061 /* If every argument to the PHI produces the same result when
5062 ORed with the second comparison, we win.
5063 Do not do this unless the type is bool since we need a bool
5064 result here anyway. */
5065 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5067 tree result = NULL_TREE;
5068 unsigned i;
5069 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5071 tree arg = gimple_phi_arg_def (stmt, i);
5073 /* If this PHI has itself as an argument, ignore it.
5074 If all the other args produce the same result,
5075 we're still OK. */
5076 if (arg == gimple_phi_result (stmt))
5077 continue;
5078 else if (TREE_CODE (arg) == INTEGER_CST)
5080 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5082 if (!result)
5083 result = boolean_true_node;
5084 else if (!integer_onep (result))
5085 return NULL_TREE;
5087 else if (!result)
5088 result = fold_build2 (code2, boolean_type_node,
5089 op2a, op2b);
5090 else if (!same_bool_comparison_p (result,
5091 code2, op2a, op2b))
5092 return NULL_TREE;
5094 else if (TREE_CODE (arg) == SSA_NAME
5095 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5097 tree temp;
5098 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5099 /* In simple cases we can look through PHI nodes,
5100 but we have to be careful with loops.
5101 See PR49073. */
5102 if (! dom_info_available_p (CDI_DOMINATORS)
5103 || gimple_bb (def_stmt) == gimple_bb (stmt)
5104 || dominated_by_p (CDI_DOMINATORS,
5105 gimple_bb (def_stmt),
5106 gimple_bb (stmt)))
5107 return NULL_TREE;
5108 temp = or_var_with_comparison (arg, invert, code2,
5109 op2a, op2b);
5110 if (!temp)
5111 return NULL_TREE;
5112 else if (!result)
5113 result = temp;
5114 else if (!same_bool_result_p (result, temp))
5115 return NULL_TREE;
5117 else
5118 return NULL_TREE;
5120 return result;
5123 default:
5124 break;
5127 return NULL_TREE;
5130 /* Try to simplify the OR of two comparisons, specified by
5131 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5132 If this can be simplified to a single expression (without requiring
5133 introducing more SSA variables to hold intermediate values),
5134 return the resulting tree. Otherwise return NULL_TREE.
5135 If the result expression is non-null, it has boolean type. */
5137 tree
5138 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5139 enum tree_code code2, tree op2a, tree op2b)
5141 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5142 if (t)
5143 return t;
5144 else
5145 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5149 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5151 Either NULL_TREE, a simplified but non-constant or a constant
5152 is returned.
5154 ??? This should go into a gimple-fold-inline.h file to be eventually
5155 privatized with the single valueize function used in the various TUs
5156 to avoid the indirect function call overhead. */
5158 tree
5159 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
5160 tree (*gvalueize) (tree))
5162 code_helper rcode;
5163 tree ops[3] = {};
5164 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5165 edges if there are intermediate VARYING defs. For this reason
5166 do not follow SSA edges here even though SCCVN can technically
5167 just deal fine with that. */
5168 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
5170 tree res = NULL_TREE;
5171 if (gimple_simplified_result_is_gimple_val (rcode, ops))
5172 res = ops[0];
5173 else if (mprts_hook)
5174 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5175 if (res)
5177 if (dump_file && dump_flags & TDF_DETAILS)
5179 fprintf (dump_file, "Match-and-simplified ");
5180 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5181 fprintf (dump_file, " to ");
5182 print_generic_expr (dump_file, res, 0);
5183 fprintf (dump_file, "\n");
5185 return res;
5189 location_t loc = gimple_location (stmt);
5190 switch (gimple_code (stmt))
5192 case GIMPLE_ASSIGN:
5194 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5196 switch (get_gimple_rhs_class (subcode))
5198 case GIMPLE_SINGLE_RHS:
5200 tree rhs = gimple_assign_rhs1 (stmt);
5201 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5203 if (TREE_CODE (rhs) == SSA_NAME)
5205 /* If the RHS is an SSA_NAME, return its known constant value,
5206 if any. */
5207 return (*valueize) (rhs);
5209 /* Handle propagating invariant addresses into address
5210 operations. */
5211 else if (TREE_CODE (rhs) == ADDR_EXPR
5212 && !is_gimple_min_invariant (rhs))
5214 HOST_WIDE_INT offset = 0;
5215 tree base;
5216 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5217 &offset,
5218 valueize);
5219 if (base
5220 && (CONSTANT_CLASS_P (base)
5221 || decl_address_invariant_p (base)))
5222 return build_invariant_address (TREE_TYPE (rhs),
5223 base, offset);
5225 else if (TREE_CODE (rhs) == CONSTRUCTOR
5226 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5227 && (CONSTRUCTOR_NELTS (rhs)
5228 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5230 unsigned i;
5231 tree val, *vec;
5233 vec = XALLOCAVEC (tree,
5234 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5235 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5237 val = (*valueize) (val);
5238 if (TREE_CODE (val) == INTEGER_CST
5239 || TREE_CODE (val) == REAL_CST
5240 || TREE_CODE (val) == FIXED_CST)
5241 vec[i] = val;
5242 else
5243 return NULL_TREE;
5246 return build_vector (TREE_TYPE (rhs), vec);
5248 if (subcode == OBJ_TYPE_REF)
5250 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5251 /* If callee is constant, we can fold away the wrapper. */
5252 if (is_gimple_min_invariant (val))
5253 return val;
5256 if (kind == tcc_reference)
5258 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5259 || TREE_CODE (rhs) == REALPART_EXPR
5260 || TREE_CODE (rhs) == IMAGPART_EXPR)
5261 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5263 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5264 return fold_unary_loc (EXPR_LOCATION (rhs),
5265 TREE_CODE (rhs),
5266 TREE_TYPE (rhs), val);
5268 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5269 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5271 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5272 return fold_ternary_loc (EXPR_LOCATION (rhs),
5273 TREE_CODE (rhs),
5274 TREE_TYPE (rhs), val,
5275 TREE_OPERAND (rhs, 1),
5276 TREE_OPERAND (rhs, 2));
5278 else if (TREE_CODE (rhs) == MEM_REF
5279 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5281 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5282 if (TREE_CODE (val) == ADDR_EXPR
5283 && is_gimple_min_invariant (val))
5285 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5286 unshare_expr (val),
5287 TREE_OPERAND (rhs, 1));
5288 if (tem)
5289 rhs = tem;
5292 return fold_const_aggregate_ref_1 (rhs, valueize);
5294 else if (kind == tcc_declaration)
5295 return get_symbol_constant_value (rhs);
5296 return rhs;
5299 case GIMPLE_UNARY_RHS:
5300 return NULL_TREE;
5302 case GIMPLE_BINARY_RHS:
5303 /* Translate &x + CST into an invariant form suitable for
5304 further propagation. */
5305 if (subcode == POINTER_PLUS_EXPR)
5307 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5308 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5309 if (TREE_CODE (op0) == ADDR_EXPR
5310 && TREE_CODE (op1) == INTEGER_CST)
5312 tree off = fold_convert (ptr_type_node, op1);
5313 return build_fold_addr_expr_loc
5314 (loc,
5315 fold_build2 (MEM_REF,
5316 TREE_TYPE (TREE_TYPE (op0)),
5317 unshare_expr (op0), off));
5320 /* Canonicalize bool != 0 and bool == 0 appearing after
5321 valueization. While gimple_simplify handles this
5322 it can get confused by the ~X == 1 -> X == 0 transform
5323 which we cant reduce to a SSA name or a constant
5324 (and we have no way to tell gimple_simplify to not
5325 consider those transforms in the first place). */
5326 else if (subcode == EQ_EXPR
5327 || subcode == NE_EXPR)
5329 tree lhs = gimple_assign_lhs (stmt);
5330 tree op0 = gimple_assign_rhs1 (stmt);
5331 if (useless_type_conversion_p (TREE_TYPE (lhs),
5332 TREE_TYPE (op0)))
5334 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5335 op0 = (*valueize) (op0);
5336 if (TREE_CODE (op0) == INTEGER_CST)
5337 std::swap (op0, op1);
5338 if (TREE_CODE (op1) == INTEGER_CST
5339 && ((subcode == NE_EXPR && integer_zerop (op1))
5340 || (subcode == EQ_EXPR && integer_onep (op1))))
5341 return op0;
5344 return NULL_TREE;
5346 case GIMPLE_TERNARY_RHS:
5348 /* Handle ternary operators that can appear in GIMPLE form. */
5349 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5350 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5351 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5352 return fold_ternary_loc (loc, subcode,
5353 gimple_expr_type (stmt), op0, op1, op2);
5356 default:
5357 gcc_unreachable ();
5361 case GIMPLE_CALL:
5363 tree fn;
5364 gcall *call_stmt = as_a <gcall *> (stmt);
5366 if (gimple_call_internal_p (stmt))
5368 enum tree_code subcode = ERROR_MARK;
5369 switch (gimple_call_internal_fn (stmt))
5371 case IFN_UBSAN_CHECK_ADD:
5372 subcode = PLUS_EXPR;
5373 break;
5374 case IFN_UBSAN_CHECK_SUB:
5375 subcode = MINUS_EXPR;
5376 break;
5377 case IFN_UBSAN_CHECK_MUL:
5378 subcode = MULT_EXPR;
5379 break;
5380 case IFN_BUILTIN_EXPECT:
5382 tree arg0 = gimple_call_arg (stmt, 0);
5383 tree op0 = (*valueize) (arg0);
5384 if (TREE_CODE (op0) == INTEGER_CST)
5385 return op0;
5386 return NULL_TREE;
5388 default:
5389 return NULL_TREE;
5391 tree arg0 = gimple_call_arg (stmt, 0);
5392 tree arg1 = gimple_call_arg (stmt, 1);
5393 tree op0 = (*valueize) (arg0);
5394 tree op1 = (*valueize) (arg1);
5396 if (TREE_CODE (op0) != INTEGER_CST
5397 || TREE_CODE (op1) != INTEGER_CST)
5399 switch (subcode)
5401 case MULT_EXPR:
5402 /* x * 0 = 0 * x = 0 without overflow. */
5403 if (integer_zerop (op0) || integer_zerop (op1))
5404 return build_zero_cst (TREE_TYPE (arg0));
5405 break;
5406 case MINUS_EXPR:
5407 /* y - y = 0 without overflow. */
5408 if (operand_equal_p (op0, op1, 0))
5409 return build_zero_cst (TREE_TYPE (arg0));
5410 break;
5411 default:
5412 break;
5415 tree res
5416 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5417 if (res
5418 && TREE_CODE (res) == INTEGER_CST
5419 && !TREE_OVERFLOW (res))
5420 return res;
5421 return NULL_TREE;
5424 fn = (*valueize) (gimple_call_fn (stmt));
5425 if (TREE_CODE (fn) == ADDR_EXPR
5426 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5427 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5428 && gimple_builtin_call_types_compatible_p (stmt,
5429 TREE_OPERAND (fn, 0)))
5431 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5432 tree retval;
5433 unsigned i;
5434 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5435 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5436 retval = fold_builtin_call_array (loc,
5437 gimple_call_return_type (call_stmt),
5438 fn, gimple_call_num_args (stmt), args);
5439 if (retval)
5441 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5442 STRIP_NOPS (retval);
5443 retval = fold_convert (gimple_call_return_type (call_stmt),
5444 retval);
5446 return retval;
5448 return NULL_TREE;
5451 default:
5452 return NULL_TREE;
5456 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5457 Returns NULL_TREE if folding to a constant is not possible, otherwise
5458 returns a constant according to is_gimple_min_invariant. */
5460 tree
5461 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5463 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5464 if (res && is_gimple_min_invariant (res))
5465 return res;
5466 return NULL_TREE;
5470 /* The following set of functions are supposed to fold references using
5471 their constant initializers. */
5473 /* See if we can find constructor defining value of BASE.
5474 When we know the consructor with constant offset (such as
5475 base is array[40] and we do know constructor of array), then
5476 BIT_OFFSET is adjusted accordingly.
5478 As a special case, return error_mark_node when constructor
5479 is not explicitly available, but it is known to be zero
5480 such as 'static const int a;'. */
5481 static tree
5482 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5483 tree (*valueize)(tree))
5485 HOST_WIDE_INT bit_offset2, size, max_size;
5486 bool reverse;
5488 if (TREE_CODE (base) == MEM_REF)
5490 if (!integer_zerop (TREE_OPERAND (base, 1)))
5492 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5493 return NULL_TREE;
5494 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5495 * BITS_PER_UNIT);
5498 if (valueize
5499 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5500 base = valueize (TREE_OPERAND (base, 0));
5501 if (!base || TREE_CODE (base) != ADDR_EXPR)
5502 return NULL_TREE;
5503 base = TREE_OPERAND (base, 0);
5506 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5507 DECL_INITIAL. If BASE is a nested reference into another
5508 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5509 the inner reference. */
5510 switch (TREE_CODE (base))
5512 case VAR_DECL:
5513 case CONST_DECL:
5515 tree init = ctor_for_folding (base);
5517 /* Our semantic is exact opposite of ctor_for_folding;
5518 NULL means unknown, while error_mark_node is 0. */
5519 if (init == error_mark_node)
5520 return NULL_TREE;
5521 if (!init)
5522 return error_mark_node;
5523 return init;
5526 case ARRAY_REF:
5527 case COMPONENT_REF:
5528 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
5529 &reverse);
5530 if (max_size == -1 || size != max_size)
5531 return NULL_TREE;
5532 *bit_offset += bit_offset2;
5533 return get_base_constructor (base, bit_offset, valueize);
5535 case STRING_CST:
5536 case CONSTRUCTOR:
5537 return base;
5539 default:
5540 return NULL_TREE;
5544 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5545 SIZE to the memory at bit OFFSET. */
5547 static tree
5548 fold_array_ctor_reference (tree type, tree ctor,
5549 unsigned HOST_WIDE_INT offset,
5550 unsigned HOST_WIDE_INT size,
5551 tree from_decl)
5553 offset_int low_bound;
5554 offset_int elt_size;
5555 offset_int access_index;
5556 tree domain_type = NULL_TREE;
5557 HOST_WIDE_INT inner_offset;
5559 /* Compute low bound and elt size. */
5560 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5561 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5562 if (domain_type && TYPE_MIN_VALUE (domain_type))
5564 /* Static constructors for variably sized objects makes no sense. */
5565 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
5566 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5568 else
5569 low_bound = 0;
5570 /* Static constructors for variably sized objects makes no sense. */
5571 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
5572 == INTEGER_CST);
5573 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5575 /* We can handle only constantly sized accesses that are known to not
5576 be larger than size of array element. */
5577 if (!TYPE_SIZE_UNIT (type)
5578 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5579 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
5580 || elt_size == 0)
5581 return NULL_TREE;
5583 /* Compute the array index we look for. */
5584 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5585 elt_size);
5586 access_index += low_bound;
5588 /* And offset within the access. */
5589 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5591 /* See if the array field is large enough to span whole access. We do not
5592 care to fold accesses spanning multiple array indexes. */
5593 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5594 return NULL_TREE;
5595 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
5596 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
5598 /* When memory is not explicitely mentioned in constructor,
5599 it is 0 (or out of range). */
5600 return build_zero_cst (type);
5603 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5604 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5606 static tree
5607 fold_nonarray_ctor_reference (tree type, tree ctor,
5608 unsigned HOST_WIDE_INT offset,
5609 unsigned HOST_WIDE_INT size,
5610 tree from_decl)
5612 unsigned HOST_WIDE_INT cnt;
5613 tree cfield, cval;
5615 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5616 cval)
5618 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5619 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5620 tree field_size = DECL_SIZE (cfield);
5621 offset_int bitoffset;
5622 offset_int bitoffset_end, access_end;
5624 /* Variable sized objects in static constructors makes no sense,
5625 but field_size can be NULL for flexible array members. */
5626 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5627 && TREE_CODE (byte_offset) == INTEGER_CST
5628 && (field_size != NULL_TREE
5629 ? TREE_CODE (field_size) == INTEGER_CST
5630 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5632 /* Compute bit offset of the field. */
5633 bitoffset = (wi::to_offset (field_offset)
5634 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
5635 /* Compute bit offset where the field ends. */
5636 if (field_size != NULL_TREE)
5637 bitoffset_end = bitoffset + wi::to_offset (field_size);
5638 else
5639 bitoffset_end = 0;
5641 access_end = offset_int (offset) + size;
5643 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5644 [BITOFFSET, BITOFFSET_END)? */
5645 if (wi::cmps (access_end, bitoffset) > 0
5646 && (field_size == NULL_TREE
5647 || wi::lts_p (offset, bitoffset_end)))
5649 offset_int inner_offset = offset_int (offset) - bitoffset;
5650 /* We do have overlap. Now see if field is large enough to
5651 cover the access. Give up for accesses spanning multiple
5652 fields. */
5653 if (wi::cmps (access_end, bitoffset_end) > 0)
5654 return NULL_TREE;
5655 if (offset < bitoffset)
5656 return NULL_TREE;
5657 return fold_ctor_reference (type, cval,
5658 inner_offset.to_uhwi (), size,
5659 from_decl);
5662 /* When memory is not explicitely mentioned in constructor, it is 0. */
5663 return build_zero_cst (type);
5666 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5667 to the memory at bit OFFSET. */
5669 tree
5670 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5671 unsigned HOST_WIDE_INT size, tree from_decl)
5673 tree ret;
5675 /* We found the field with exact match. */
5676 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5677 && !offset)
5678 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5680 /* We are at the end of walk, see if we can view convert the
5681 result. */
5682 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5683 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5684 && !compare_tree_int (TYPE_SIZE (type), size)
5685 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5687 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5688 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5689 if (ret)
5690 STRIP_USELESS_TYPE_CONVERSION (ret);
5691 return ret;
5693 /* For constants and byte-aligned/sized reads try to go through
5694 native_encode/interpret. */
5695 if (CONSTANT_CLASS_P (ctor)
5696 && BITS_PER_UNIT == 8
5697 && offset % BITS_PER_UNIT == 0
5698 && size % BITS_PER_UNIT == 0
5699 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5701 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5702 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5703 offset / BITS_PER_UNIT);
5704 if (len > 0)
5705 return native_interpret_expr (type, buf, len);
5707 if (TREE_CODE (ctor) == CONSTRUCTOR)
5710 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5711 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5712 return fold_array_ctor_reference (type, ctor, offset, size,
5713 from_decl);
5714 else
5715 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5716 from_decl);
5719 return NULL_TREE;
5722 /* Return the tree representing the element referenced by T if T is an
5723 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5724 names using VALUEIZE. Return NULL_TREE otherwise. */
5726 tree
5727 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5729 tree ctor, idx, base;
5730 HOST_WIDE_INT offset, size, max_size;
5731 tree tem;
5732 bool reverse;
5734 if (TREE_THIS_VOLATILE (t))
5735 return NULL_TREE;
5737 if (DECL_P (t))
5738 return get_symbol_constant_value (t);
5740 tem = fold_read_from_constant_string (t);
5741 if (tem)
5742 return tem;
5744 switch (TREE_CODE (t))
5746 case ARRAY_REF:
5747 case ARRAY_RANGE_REF:
5748 /* Constant indexes are handled well by get_base_constructor.
5749 Only special case variable offsets.
5750 FIXME: This code can't handle nested references with variable indexes
5751 (they will be handled only by iteration of ccp). Perhaps we can bring
5752 get_ref_base_and_extent here and make it use a valueize callback. */
5753 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5754 && valueize
5755 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5756 && TREE_CODE (idx) == INTEGER_CST)
5758 tree low_bound, unit_size;
5760 /* If the resulting bit-offset is constant, track it. */
5761 if ((low_bound = array_ref_low_bound (t),
5762 TREE_CODE (low_bound) == INTEGER_CST)
5763 && (unit_size = array_ref_element_size (t),
5764 tree_fits_uhwi_p (unit_size)))
5766 offset_int woffset
5767 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5768 TYPE_PRECISION (TREE_TYPE (idx)));
5770 if (wi::fits_shwi_p (woffset))
5772 offset = woffset.to_shwi ();
5773 /* TODO: This code seems wrong, multiply then check
5774 to see if it fits. */
5775 offset *= tree_to_uhwi (unit_size);
5776 offset *= BITS_PER_UNIT;
5778 base = TREE_OPERAND (t, 0);
5779 ctor = get_base_constructor (base, &offset, valueize);
5780 /* Empty constructor. Always fold to 0. */
5781 if (ctor == error_mark_node)
5782 return build_zero_cst (TREE_TYPE (t));
5783 /* Out of bound array access. Value is undefined,
5784 but don't fold. */
5785 if (offset < 0)
5786 return NULL_TREE;
5787 /* We can not determine ctor. */
5788 if (!ctor)
5789 return NULL_TREE;
5790 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5791 tree_to_uhwi (unit_size)
5792 * BITS_PER_UNIT,
5793 base);
5797 /* Fallthru. */
5799 case COMPONENT_REF:
5800 case BIT_FIELD_REF:
5801 case TARGET_MEM_REF:
5802 case MEM_REF:
5803 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
5804 ctor = get_base_constructor (base, &offset, valueize);
5806 /* Empty constructor. Always fold to 0. */
5807 if (ctor == error_mark_node)
5808 return build_zero_cst (TREE_TYPE (t));
5809 /* We do not know precise address. */
5810 if (max_size == -1 || max_size != size)
5811 return NULL_TREE;
5812 /* We can not determine ctor. */
5813 if (!ctor)
5814 return NULL_TREE;
5816 /* Out of bound array access. Value is undefined, but don't fold. */
5817 if (offset < 0)
5818 return NULL_TREE;
5820 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5821 base);
5823 case REALPART_EXPR:
5824 case IMAGPART_EXPR:
5826 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5827 if (c && TREE_CODE (c) == COMPLEX_CST)
5828 return fold_build1_loc (EXPR_LOCATION (t),
5829 TREE_CODE (t), TREE_TYPE (t), c);
5830 break;
5833 default:
5834 break;
5837 return NULL_TREE;
5840 tree
5841 fold_const_aggregate_ref (tree t)
5843 return fold_const_aggregate_ref_1 (t, NULL);
5846 /* Lookup virtual method with index TOKEN in a virtual table V
5847 at OFFSET.
5848 Set CAN_REFER if non-NULL to false if method
5849 is not referable or if the virtual table is ill-formed (such as rewriten
5850 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5852 tree
5853 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5854 tree v,
5855 unsigned HOST_WIDE_INT offset,
5856 bool *can_refer)
5858 tree vtable = v, init, fn;
5859 unsigned HOST_WIDE_INT size;
5860 unsigned HOST_WIDE_INT elt_size, access_index;
5861 tree domain_type;
5863 if (can_refer)
5864 *can_refer = true;
5866 /* First of all double check we have virtual table. */
5867 if (TREE_CODE (v) != VAR_DECL
5868 || !DECL_VIRTUAL_P (v))
5870 /* Pass down that we lost track of the target. */
5871 if (can_refer)
5872 *can_refer = false;
5873 return NULL_TREE;
5876 init = ctor_for_folding (v);
5878 /* The virtual tables should always be born with constructors
5879 and we always should assume that they are avaialble for
5880 folding. At the moment we do not stream them in all cases,
5881 but it should never happen that ctor seem unreachable. */
5882 gcc_assert (init);
5883 if (init == error_mark_node)
5885 gcc_assert (in_lto_p);
5886 /* Pass down that we lost track of the target. */
5887 if (can_refer)
5888 *can_refer = false;
5889 return NULL_TREE;
5891 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5892 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5893 offset *= BITS_PER_UNIT;
5894 offset += token * size;
5896 /* Lookup the value in the constructor that is assumed to be array.
5897 This is equivalent to
5898 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5899 offset, size, NULL);
5900 but in a constant time. We expect that frontend produced a simple
5901 array without indexed initializers. */
5903 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5904 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5905 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5906 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5908 access_index = offset / BITS_PER_UNIT / elt_size;
5909 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5911 /* This code makes an assumption that there are no
5912 indexed fileds produced by C++ FE, so we can directly index the array. */
5913 if (access_index < CONSTRUCTOR_NELTS (init))
5915 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5916 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5917 STRIP_NOPS (fn);
5919 else
5920 fn = NULL;
5922 /* For type inconsistent program we may end up looking up virtual method
5923 in virtual table that does not contain TOKEN entries. We may overrun
5924 the virtual table and pick up a constant or RTTI info pointer.
5925 In any case the call is undefined. */
5926 if (!fn
5927 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5928 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5929 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5930 else
5932 fn = TREE_OPERAND (fn, 0);
5934 /* When cgraph node is missing and function is not public, we cannot
5935 devirtualize. This can happen in WHOPR when the actual method
5936 ends up in other partition, because we found devirtualization
5937 possibility too late. */
5938 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5940 if (can_refer)
5942 *can_refer = false;
5943 return fn;
5945 return NULL_TREE;
5949 /* Make sure we create a cgraph node for functions we'll reference.
5950 They can be non-existent if the reference comes from an entry
5951 of an external vtable for example. */
5952 cgraph_node::get_create (fn);
5954 return fn;
5957 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5958 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5959 KNOWN_BINFO carries the binfo describing the true type of
5960 OBJ_TYPE_REF_OBJECT(REF).
5961 Set CAN_REFER if non-NULL to false if method
5962 is not referable or if the virtual table is ill-formed (such as rewriten
5963 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5965 tree
5966 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5967 bool *can_refer)
5969 unsigned HOST_WIDE_INT offset;
5970 tree v;
5972 v = BINFO_VTABLE (known_binfo);
5973 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5974 if (!v)
5975 return NULL_TREE;
5977 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5979 if (can_refer)
5980 *can_refer = false;
5981 return NULL_TREE;
5983 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5986 /* Given a pointer value OP0, return a simplified version of an
5987 indirection through OP0, or NULL_TREE if no simplification is
5988 possible. Note that the resulting type may be different from
5989 the type pointed to in the sense that it is still compatible
5990 from the langhooks point of view. */
5992 tree
5993 gimple_fold_indirect_ref (tree t)
5995 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5996 tree sub = t;
5997 tree subtype;
5999 STRIP_NOPS (sub);
6000 subtype = TREE_TYPE (sub);
6001 if (!POINTER_TYPE_P (subtype))
6002 return NULL_TREE;
6004 if (TREE_CODE (sub) == ADDR_EXPR)
6006 tree op = TREE_OPERAND (sub, 0);
6007 tree optype = TREE_TYPE (op);
6008 /* *&p => p */
6009 if (useless_type_conversion_p (type, optype))
6010 return op;
6012 /* *(foo *)&fooarray => fooarray[0] */
6013 if (TREE_CODE (optype) == ARRAY_TYPE
6014 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6015 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6017 tree type_domain = TYPE_DOMAIN (optype);
6018 tree min_val = size_zero_node;
6019 if (type_domain && TYPE_MIN_VALUE (type_domain))
6020 min_val = TYPE_MIN_VALUE (type_domain);
6021 if (TREE_CODE (min_val) == INTEGER_CST)
6022 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6024 /* *(foo *)&complexfoo => __real__ complexfoo */
6025 else if (TREE_CODE (optype) == COMPLEX_TYPE
6026 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6027 return fold_build1 (REALPART_EXPR, type, op);
6028 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6029 else if (TREE_CODE (optype) == VECTOR_TYPE
6030 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6032 tree part_width = TYPE_SIZE (type);
6033 tree index = bitsize_int (0);
6034 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6038 /* *(p + CST) -> ... */
6039 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6040 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6042 tree addr = TREE_OPERAND (sub, 0);
6043 tree off = TREE_OPERAND (sub, 1);
6044 tree addrtype;
6046 STRIP_NOPS (addr);
6047 addrtype = TREE_TYPE (addr);
6049 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6050 if (TREE_CODE (addr) == ADDR_EXPR
6051 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6052 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
6053 && tree_fits_uhwi_p (off))
6055 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
6056 tree part_width = TYPE_SIZE (type);
6057 unsigned HOST_WIDE_INT part_widthi
6058 = tree_to_shwi (part_width) / BITS_PER_UNIT;
6059 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6060 tree index = bitsize_int (indexi);
6061 if (offset / part_widthi
6062 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
6063 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6064 part_width, index);
6067 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6068 if (TREE_CODE (addr) == ADDR_EXPR
6069 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6070 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6072 tree size = TYPE_SIZE_UNIT (type);
6073 if (tree_int_cst_equal (size, off))
6074 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6077 /* *(p + CST) -> MEM_REF <p, CST>. */
6078 if (TREE_CODE (addr) != ADDR_EXPR
6079 || DECL_P (TREE_OPERAND (addr, 0)))
6080 return fold_build2 (MEM_REF, type,
6081 addr,
6082 wide_int_to_tree (ptype, off));
6085 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6086 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6087 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6088 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6090 tree type_domain;
6091 tree min_val = size_zero_node;
6092 tree osub = sub;
6093 sub = gimple_fold_indirect_ref (sub);
6094 if (! sub)
6095 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6096 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6097 if (type_domain && TYPE_MIN_VALUE (type_domain))
6098 min_val = TYPE_MIN_VALUE (type_domain);
6099 if (TREE_CODE (min_val) == INTEGER_CST)
6100 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6103 return NULL_TREE;
6106 /* Return true if CODE is an operation that when operating on signed
6107 integer types involves undefined behavior on overflow and the
6108 operation can be expressed with unsigned arithmetic. */
6110 bool
6111 arith_code_with_undefined_signed_overflow (tree_code code)
6113 switch (code)
6115 case PLUS_EXPR:
6116 case MINUS_EXPR:
6117 case MULT_EXPR:
6118 case NEGATE_EXPR:
6119 case POINTER_PLUS_EXPR:
6120 return true;
6121 default:
6122 return false;
6126 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6127 operation that can be transformed to unsigned arithmetic by converting
6128 its operand, carrying out the operation in the corresponding unsigned
6129 type and converting the result back to the original type.
6131 Returns a sequence of statements that replace STMT and also contain
6132 a modified form of STMT itself. */
6134 gimple_seq
6135 rewrite_to_defined_overflow (gimple *stmt)
6137 if (dump_file && (dump_flags & TDF_DETAILS))
6139 fprintf (dump_file, "rewriting stmt with undefined signed "
6140 "overflow ");
6141 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6144 tree lhs = gimple_assign_lhs (stmt);
6145 tree type = unsigned_type_for (TREE_TYPE (lhs));
6146 gimple_seq stmts = NULL;
6147 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6149 tree op = gimple_op (stmt, i);
6150 op = gimple_convert (&stmts, type, op);
6151 gimple_set_op (stmt, i, op);
6153 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6154 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6155 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6156 gimple_seq_add_stmt (&stmts, stmt);
6157 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6158 gimple_seq_add_stmt (&stmts, cvt);
6160 return stmts;
6164 /* The valueization hook we use for the gimple_build API simplification.
6165 This makes us match fold_buildN behavior by only combining with
6166 statements in the sequence(s) we are currently building. */
6168 static tree
6169 gimple_build_valueize (tree op)
6171 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6172 return op;
6173 return NULL_TREE;
6176 /* Build the expression CODE OP0 of type TYPE with location LOC,
6177 simplifying it first if possible. Returns the built
6178 expression value and appends statements possibly defining it
6179 to SEQ. */
6181 tree
6182 gimple_build (gimple_seq *seq, location_t loc,
6183 enum tree_code code, tree type, tree op0)
6185 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
6186 if (!res)
6188 if (gimple_in_ssa_p (cfun))
6189 res = make_ssa_name (type);
6190 else
6191 res = create_tmp_reg (type);
6192 gimple *stmt;
6193 if (code == REALPART_EXPR
6194 || code == IMAGPART_EXPR
6195 || code == VIEW_CONVERT_EXPR)
6196 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6197 else
6198 stmt = gimple_build_assign (res, code, op0);
6199 gimple_set_location (stmt, loc);
6200 gimple_seq_add_stmt_without_update (seq, stmt);
6202 return res;
6205 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6206 simplifying it first if possible. Returns the built
6207 expression value and appends statements possibly defining it
6208 to SEQ. */
6210 tree
6211 gimple_build (gimple_seq *seq, location_t loc,
6212 enum tree_code code, tree type, tree op0, tree op1)
6214 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6215 if (!res)
6217 if (gimple_in_ssa_p (cfun))
6218 res = make_ssa_name (type);
6219 else
6220 res = create_tmp_reg (type);
6221 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6222 gimple_set_location (stmt, loc);
6223 gimple_seq_add_stmt_without_update (seq, stmt);
6225 return res;
6228 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6229 simplifying it first if possible. Returns the built
6230 expression value and appends statements possibly defining it
6231 to SEQ. */
6233 tree
6234 gimple_build (gimple_seq *seq, location_t loc,
6235 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6237 tree res = gimple_simplify (code, type, op0, op1, op2,
6238 seq, gimple_build_valueize);
6239 if (!res)
6241 if (gimple_in_ssa_p (cfun))
6242 res = make_ssa_name (type);
6243 else
6244 res = create_tmp_reg (type);
6245 gimple *stmt;
6246 if (code == BIT_FIELD_REF)
6247 stmt = gimple_build_assign (res, code,
6248 build3 (code, type, op0, op1, op2));
6249 else
6250 stmt = gimple_build_assign (res, code, op0, op1, op2);
6251 gimple_set_location (stmt, loc);
6252 gimple_seq_add_stmt_without_update (seq, stmt);
6254 return res;
6257 /* Build the call FN (ARG0) with a result of type TYPE
6258 (or no result if TYPE is void) with location LOC,
6259 simplifying it first if possible. Returns the built
6260 expression value (or NULL_TREE if TYPE is void) and appends
6261 statements possibly defining it to SEQ. */
6263 tree
6264 gimple_build (gimple_seq *seq, location_t loc,
6265 enum built_in_function fn, tree type, tree arg0)
6267 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6268 if (!res)
6270 tree decl = builtin_decl_implicit (fn);
6271 gimple *stmt = gimple_build_call (decl, 1, arg0);
6272 if (!VOID_TYPE_P (type))
6274 if (gimple_in_ssa_p (cfun))
6275 res = make_ssa_name (type);
6276 else
6277 res = create_tmp_reg (type);
6278 gimple_call_set_lhs (stmt, res);
6280 gimple_set_location (stmt, loc);
6281 gimple_seq_add_stmt_without_update (seq, stmt);
6283 return res;
6286 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6287 (or no result if TYPE is void) with location LOC,
6288 simplifying it first if possible. Returns the built
6289 expression value (or NULL_TREE if TYPE is void) and appends
6290 statements possibly defining it to SEQ. */
6292 tree
6293 gimple_build (gimple_seq *seq, location_t loc,
6294 enum built_in_function fn, tree type, tree arg0, tree arg1)
6296 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6297 if (!res)
6299 tree decl = builtin_decl_implicit (fn);
6300 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6301 if (!VOID_TYPE_P (type))
6303 if (gimple_in_ssa_p (cfun))
6304 res = make_ssa_name (type);
6305 else
6306 res = create_tmp_reg (type);
6307 gimple_call_set_lhs (stmt, res);
6309 gimple_set_location (stmt, loc);
6310 gimple_seq_add_stmt_without_update (seq, stmt);
6312 return res;
6315 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6316 (or no result if TYPE is void) with location LOC,
6317 simplifying it first if possible. Returns the built
6318 expression value (or NULL_TREE if TYPE is void) and appends
6319 statements possibly defining it to SEQ. */
6321 tree
6322 gimple_build (gimple_seq *seq, location_t loc,
6323 enum built_in_function fn, tree type,
6324 tree arg0, tree arg1, tree arg2)
6326 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6327 seq, gimple_build_valueize);
6328 if (!res)
6330 tree decl = builtin_decl_implicit (fn);
6331 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6332 if (!VOID_TYPE_P (type))
6334 if (gimple_in_ssa_p (cfun))
6335 res = make_ssa_name (type);
6336 else
6337 res = create_tmp_reg (type);
6338 gimple_call_set_lhs (stmt, res);
6340 gimple_set_location (stmt, loc);
6341 gimple_seq_add_stmt_without_update (seq, stmt);
6343 return res;
6346 /* Build the conversion (TYPE) OP with a result of type TYPE
6347 with location LOC if such conversion is neccesary in GIMPLE,
6348 simplifying it first.
6349 Returns the built expression value and appends
6350 statements possibly defining it to SEQ. */
6352 tree
6353 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6355 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6356 return op;
6357 return gimple_build (seq, loc, NOP_EXPR, type, op);
6360 /* Build the conversion (ptrofftype) OP with a result of a type
6361 compatible with ptrofftype with location LOC if such conversion
6362 is neccesary in GIMPLE, simplifying it first.
6363 Returns the built expression value and appends
6364 statements possibly defining it to SEQ. */
6366 tree
6367 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6369 if (ptrofftype_p (TREE_TYPE (op)))
6370 return op;
6371 return gimple_convert (seq, loc, sizetype, op);
6374 /* Return true if the result of assignment STMT is known to be non-negative.
6375 If the return value is based on the assumption that signed overflow is
6376 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6377 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6379 static bool
6380 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6381 int depth)
6383 enum tree_code code = gimple_assign_rhs_code (stmt);
6384 switch (get_gimple_rhs_class (code))
6386 case GIMPLE_UNARY_RHS:
6387 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6388 gimple_expr_type (stmt),
6389 gimple_assign_rhs1 (stmt),
6390 strict_overflow_p, depth);
6391 case GIMPLE_BINARY_RHS:
6392 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6393 gimple_expr_type (stmt),
6394 gimple_assign_rhs1 (stmt),
6395 gimple_assign_rhs2 (stmt),
6396 strict_overflow_p, depth);
6397 case GIMPLE_TERNARY_RHS:
6398 return false;
6399 case GIMPLE_SINGLE_RHS:
6400 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6401 strict_overflow_p, depth);
6402 case GIMPLE_INVALID_RHS:
6403 break;
6405 gcc_unreachable ();
6408 /* Return true if return value of call STMT is known to be non-negative.
6409 If the return value is based on the assumption that signed overflow is
6410 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6411 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6413 static bool
6414 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6415 int depth)
6417 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6418 gimple_call_arg (stmt, 0) : NULL_TREE;
6419 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6420 gimple_call_arg (stmt, 1) : NULL_TREE;
6422 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6423 gimple_call_combined_fn (stmt),
6424 arg0,
6425 arg1,
6426 strict_overflow_p, depth);
6429 /* Return true if return value of call STMT is known to be non-negative.
6430 If the return value is based on the assumption that signed overflow is
6431 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6432 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6434 static bool
6435 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6436 int depth)
6438 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6440 tree arg = gimple_phi_arg_def (stmt, i);
6441 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6442 return false;
6444 return true;
6447 /* Return true if STMT is known to compute a non-negative value.
6448 If the return value is based on the assumption that signed overflow is
6449 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6450 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6452 bool
6453 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6454 int depth)
6456 switch (gimple_code (stmt))
6458 case GIMPLE_ASSIGN:
6459 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6460 depth);
6461 case GIMPLE_CALL:
6462 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6463 depth);
6464 case GIMPLE_PHI:
6465 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6466 depth);
6467 default:
6468 return false;
6472 /* Return true if the floating-point value computed by assignment STMT
6473 is known to have an integer value. We also allow +Inf, -Inf and NaN
6474 to be considered integer values. Return false for signaling NaN.
6476 DEPTH is the current nesting depth of the query. */
6478 static bool
6479 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6481 enum tree_code code = gimple_assign_rhs_code (stmt);
6482 switch (get_gimple_rhs_class (code))
6484 case GIMPLE_UNARY_RHS:
6485 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6486 gimple_assign_rhs1 (stmt), depth);
6487 case GIMPLE_BINARY_RHS:
6488 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6489 gimple_assign_rhs1 (stmt),
6490 gimple_assign_rhs2 (stmt), depth);
6491 case GIMPLE_TERNARY_RHS:
6492 return false;
6493 case GIMPLE_SINGLE_RHS:
6494 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6495 case GIMPLE_INVALID_RHS:
6496 break;
6498 gcc_unreachable ();
6501 /* Return true if the floating-point value computed by call STMT is known
6502 to have an integer value. We also allow +Inf, -Inf and NaN to be
6503 considered integer values. Return false for signaling NaN.
6505 DEPTH is the current nesting depth of the query. */
6507 static bool
6508 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6510 tree arg0 = (gimple_call_num_args (stmt) > 0
6511 ? gimple_call_arg (stmt, 0)
6512 : NULL_TREE);
6513 tree arg1 = (gimple_call_num_args (stmt) > 1
6514 ? gimple_call_arg (stmt, 1)
6515 : NULL_TREE);
6516 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
6517 arg0, arg1, depth);
6520 /* Return true if the floating-point result of phi STMT is known to have
6521 an integer value. We also allow +Inf, -Inf and NaN to be considered
6522 integer values. Return false for signaling NaN.
6524 DEPTH is the current nesting depth of the query. */
6526 static bool
6527 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6529 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6531 tree arg = gimple_phi_arg_def (stmt, i);
6532 if (!integer_valued_real_single_p (arg, depth + 1))
6533 return false;
6535 return true;
6538 /* Return true if the floating-point value computed by STMT is known
6539 to have an integer value. We also allow +Inf, -Inf and NaN to be
6540 considered integer values. Return false for signaling NaN.
6542 DEPTH is the current nesting depth of the query. */
6544 bool
6545 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6547 switch (gimple_code (stmt))
6549 case GIMPLE_ASSIGN:
6550 return gimple_assign_integer_valued_real_p (stmt, depth);
6551 case GIMPLE_CALL:
6552 return gimple_call_integer_valued_real_p (stmt, depth);
6553 case GIMPLE_PHI:
6554 return gimple_phi_integer_valued_real_p (stmt, depth);
6555 default:
6556 return false;