C: hints for missing stdlib includes for macros and types
[official-gcc.git] / gcc / gimple-fold.c
blob1ed63833b732aa019a60c0ec6d499b4ac873a7ac
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2017 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stmt.h"
35 #include "expr.h"
36 #include "stor-layout.h"
37 #include "dumpfile.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-object-size.h"
44 #include "tree-ssa.h"
45 #include "tree-ssa-propagate.h"
46 #include "ipa-utils.h"
47 #include "tree-ssa-address.h"
48 #include "langhooks.h"
49 #include "gimplify-me.h"
50 #include "dbgcnt.h"
51 #include "builtins.h"
52 #include "tree-eh.h"
53 #include "gimple-match.h"
54 #include "gomp-constants.h"
55 #include "optabs-query.h"
56 #include "omp-general.h"
57 #include "ipa-chkp.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
66 /* Return true when DECL can be referenced from current unit.
67 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
68 We can get declarations that are not possible to reference for various
69 reasons:
71 1) When analyzing C++ virtual tables.
72 C++ virtual tables do have known constructors even
73 when they are keyed to other compilation unit.
74 Those tables can contain pointers to methods and vars
75 in other units. Those methods have both STATIC and EXTERNAL
76 set.
77 2) In WHOPR mode devirtualization might lead to reference
78 to method that was partitioned elsehwere.
79 In this case we have static VAR_DECL or FUNCTION_DECL
80 that has no corresponding callgraph/varpool node
81 declaring the body.
82 3) COMDAT functions referred by external vtables that
83 we devirtualize only during final compilation stage.
84 At this time we already decided that we will not output
85 the function body and thus we can't reference the symbol
86 directly. */
88 static bool
89 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
91 varpool_node *vnode;
92 struct cgraph_node *node;
93 symtab_node *snode;
95 if (DECL_ABSTRACT_P (decl))
96 return false;
98 /* We are concerned only about static/external vars and functions. */
99 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
100 || !VAR_OR_FUNCTION_DECL_P (decl))
101 return true;
103 /* Static objects can be referred only if they was not optimized out yet. */
104 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
106 /* Before we start optimizing unreachable code we can be sure all
107 static objects are defined. */
108 if (symtab->function_flags_ready)
109 return true;
110 snode = symtab_node::get (decl);
111 if (!snode || !snode->definition)
112 return false;
113 node = dyn_cast <cgraph_node *> (snode);
114 return !node || !node->global.inlined_to;
117 /* We will later output the initializer, so we can refer to it.
118 So we are concerned only when DECL comes from initializer of
119 external var or var that has been optimized out. */
120 if (!from_decl
121 || !VAR_P (from_decl)
122 || (!DECL_EXTERNAL (from_decl)
123 && (vnode = varpool_node::get (from_decl)) != NULL
124 && vnode->definition)
125 || (flag_ltrans
126 && (vnode = varpool_node::get (from_decl)) != NULL
127 && vnode->in_other_partition))
128 return true;
129 /* We are folding reference from external vtable. The vtable may reffer
130 to a symbol keyed to other compilation unit. The other compilation
131 unit may be in separate DSO and the symbol may be hidden. */
132 if (DECL_VISIBILITY_SPECIFIED (decl)
133 && DECL_EXTERNAL (decl)
134 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
135 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
136 return false;
137 /* When function is public, we always can introduce new reference.
138 Exception are the COMDAT functions where introducing a direct
139 reference imply need to include function body in the curren tunit. */
140 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
141 return true;
142 /* We have COMDAT. We are going to check if we still have definition
143 or if the definition is going to be output in other partition.
144 Bypass this when gimplifying; all needed functions will be produced.
146 As observed in PR20991 for already optimized out comdat virtual functions
147 it may be tempting to not necessarily give up because the copy will be
148 output elsewhere when corresponding vtable is output.
149 This is however not possible - ABI specify that COMDATs are output in
150 units where they are used and when the other unit was compiled with LTO
151 it is possible that vtable was kept public while the function itself
152 was privatized. */
153 if (!symtab->function_flags_ready)
154 return true;
156 snode = symtab_node::get (decl);
157 if (!snode
158 || ((!snode->definition || DECL_EXTERNAL (decl))
159 && (!snode->in_other_partition
160 || (!snode->forced_by_abi && !snode->force_output))))
161 return false;
162 node = dyn_cast <cgraph_node *> (snode);
163 return !node || !node->global.inlined_to;
166 /* Create a temporary for TYPE for a statement STMT. If the current function
167 is in SSA form, a SSA name is created. Otherwise a temporary register
168 is made. */
170 tree
171 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
173 if (gimple_in_ssa_p (cfun))
174 return make_ssa_name (type, stmt);
175 else
176 return create_tmp_reg (type);
179 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
180 acceptable form for is_gimple_min_invariant.
181 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
183 tree
184 canonicalize_constructor_val (tree cval, tree from_decl)
186 tree orig_cval = cval;
187 STRIP_NOPS (cval);
188 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
189 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
191 tree ptr = TREE_OPERAND (cval, 0);
192 if (is_gimple_min_invariant (ptr))
193 cval = build1_loc (EXPR_LOCATION (cval),
194 ADDR_EXPR, TREE_TYPE (ptr),
195 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
196 ptr,
197 fold_convert (ptr_type_node,
198 TREE_OPERAND (cval, 1))));
200 if (TREE_CODE (cval) == ADDR_EXPR)
202 tree base = NULL_TREE;
203 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
205 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
206 if (base)
207 TREE_OPERAND (cval, 0) = base;
209 else
210 base = get_base_address (TREE_OPERAND (cval, 0));
211 if (!base)
212 return NULL_TREE;
214 if (VAR_OR_FUNCTION_DECL_P (base)
215 && !can_refer_decl_in_current_unit_p (base, from_decl))
216 return NULL_TREE;
217 if (TREE_TYPE (base) == error_mark_node)
218 return NULL_TREE;
219 if (VAR_P (base))
220 TREE_ADDRESSABLE (base) = 1;
221 else if (TREE_CODE (base) == FUNCTION_DECL)
223 /* Make sure we create a cgraph node for functions we'll reference.
224 They can be non-existent if the reference comes from an entry
225 of an external vtable for example. */
226 cgraph_node::get_create (base);
228 /* Fixup types in global initializers. */
229 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
230 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
232 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
233 cval = fold_convert (TREE_TYPE (orig_cval), cval);
234 return cval;
236 if (TREE_OVERFLOW_P (cval))
237 return drop_tree_overflow (cval);
238 return orig_cval;
241 /* If SYM is a constant variable with known value, return the value.
242 NULL_TREE is returned otherwise. */
244 tree
245 get_symbol_constant_value (tree sym)
247 tree val = ctor_for_folding (sym);
248 if (val != error_mark_node)
250 if (val)
252 val = canonicalize_constructor_val (unshare_expr (val), sym);
253 if (val && is_gimple_min_invariant (val))
254 return val;
255 else
256 return NULL_TREE;
258 /* Variables declared 'const' without an initializer
259 have zero as the initializer if they may not be
260 overridden at link or run time. */
261 if (!val
262 && is_gimple_reg_type (TREE_TYPE (sym)))
263 return build_zero_cst (TREE_TYPE (sym));
266 return NULL_TREE;
271 /* Subroutine of fold_stmt. We perform several simplifications of the
272 memory reference tree EXPR and make sure to re-gimplify them properly
273 after propagation of constant addresses. IS_LHS is true if the
274 reference is supposed to be an lvalue. */
276 static tree
277 maybe_fold_reference (tree expr, bool is_lhs)
279 tree result;
281 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
282 || TREE_CODE (expr) == REALPART_EXPR
283 || TREE_CODE (expr) == IMAGPART_EXPR)
284 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
285 return fold_unary_loc (EXPR_LOCATION (expr),
286 TREE_CODE (expr),
287 TREE_TYPE (expr),
288 TREE_OPERAND (expr, 0));
289 else if (TREE_CODE (expr) == BIT_FIELD_REF
290 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
291 return fold_ternary_loc (EXPR_LOCATION (expr),
292 TREE_CODE (expr),
293 TREE_TYPE (expr),
294 TREE_OPERAND (expr, 0),
295 TREE_OPERAND (expr, 1),
296 TREE_OPERAND (expr, 2));
298 if (!is_lhs
299 && (result = fold_const_aggregate_ref (expr))
300 && is_gimple_min_invariant (result))
301 return result;
303 return NULL_TREE;
307 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
308 replacement rhs for the statement or NULL_TREE if no simplification
309 could be made. It is assumed that the operands have been previously
310 folded. */
312 static tree
313 fold_gimple_assign (gimple_stmt_iterator *si)
315 gimple *stmt = gsi_stmt (*si);
316 enum tree_code subcode = gimple_assign_rhs_code (stmt);
317 location_t loc = gimple_location (stmt);
319 tree result = NULL_TREE;
321 switch (get_gimple_rhs_class (subcode))
323 case GIMPLE_SINGLE_RHS:
325 tree rhs = gimple_assign_rhs1 (stmt);
327 if (TREE_CLOBBER_P (rhs))
328 return NULL_TREE;
330 if (REFERENCE_CLASS_P (rhs))
331 return maybe_fold_reference (rhs, false);
333 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
335 tree val = OBJ_TYPE_REF_EXPR (rhs);
336 if (is_gimple_min_invariant (val))
337 return val;
338 else if (flag_devirtualize && virtual_method_call_p (rhs))
340 bool final;
341 vec <cgraph_node *>targets
342 = possible_polymorphic_call_targets (rhs, stmt, &final);
343 if (final && targets.length () <= 1 && dbg_cnt (devirt))
345 if (dump_enabled_p ())
347 location_t loc = gimple_location_safe (stmt);
348 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
349 "resolving virtual function address "
350 "reference to function %s\n",
351 targets.length () == 1
352 ? targets[0]->name ()
353 : "NULL");
355 if (targets.length () == 1)
357 val = fold_convert (TREE_TYPE (val),
358 build_fold_addr_expr_loc
359 (loc, targets[0]->decl));
360 STRIP_USELESS_TYPE_CONVERSION (val);
362 else
363 /* We can not use __builtin_unreachable here because it
364 can not have address taken. */
365 val = build_int_cst (TREE_TYPE (val), 0);
366 return val;
371 else if (TREE_CODE (rhs) == ADDR_EXPR)
373 tree ref = TREE_OPERAND (rhs, 0);
374 tree tem = maybe_fold_reference (ref, true);
375 if (tem
376 && TREE_CODE (tem) == MEM_REF
377 && integer_zerop (TREE_OPERAND (tem, 1)))
378 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
379 else if (tem)
380 result = fold_convert (TREE_TYPE (rhs),
381 build_fold_addr_expr_loc (loc, tem));
382 else if (TREE_CODE (ref) == MEM_REF
383 && integer_zerop (TREE_OPERAND (ref, 1)))
384 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
386 if (result)
388 /* Strip away useless type conversions. Both the
389 NON_LVALUE_EXPR that may have been added by fold, and
390 "useless" type conversions that might now be apparent
391 due to propagation. */
392 STRIP_USELESS_TYPE_CONVERSION (result);
394 if (result != rhs && valid_gimple_rhs_p (result))
395 return result;
399 else if (TREE_CODE (rhs) == CONSTRUCTOR
400 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
402 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
403 unsigned i;
404 tree val;
406 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
407 if (! CONSTANT_CLASS_P (val))
408 return NULL_TREE;
410 return build_vector_from_ctor (TREE_TYPE (rhs),
411 CONSTRUCTOR_ELTS (rhs));
414 else if (DECL_P (rhs))
415 return get_symbol_constant_value (rhs);
417 break;
419 case GIMPLE_UNARY_RHS:
420 break;
422 case GIMPLE_BINARY_RHS:
423 break;
425 case GIMPLE_TERNARY_RHS:
426 result = fold_ternary_loc (loc, subcode,
427 TREE_TYPE (gimple_assign_lhs (stmt)),
428 gimple_assign_rhs1 (stmt),
429 gimple_assign_rhs2 (stmt),
430 gimple_assign_rhs3 (stmt));
432 if (result)
434 STRIP_USELESS_TYPE_CONVERSION (result);
435 if (valid_gimple_rhs_p (result))
436 return result;
438 break;
440 case GIMPLE_INVALID_RHS:
441 gcc_unreachable ();
444 return NULL_TREE;
448 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
449 adjusting the replacement stmts location and virtual operands.
450 If the statement has a lhs the last stmt in the sequence is expected
451 to assign to that lhs. */
453 static void
454 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
456 gimple *stmt = gsi_stmt (*si_p);
458 if (gimple_has_location (stmt))
459 annotate_all_with_location (stmts, gimple_location (stmt));
461 /* First iterate over the replacement statements backward, assigning
462 virtual operands to their defining statements. */
463 gimple *laststore = NULL;
464 for (gimple_stmt_iterator i = gsi_last (stmts);
465 !gsi_end_p (i); gsi_prev (&i))
467 gimple *new_stmt = gsi_stmt (i);
468 if ((gimple_assign_single_p (new_stmt)
469 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
470 || (is_gimple_call (new_stmt)
471 && (gimple_call_flags (new_stmt)
472 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
474 tree vdef;
475 if (!laststore)
476 vdef = gimple_vdef (stmt);
477 else
478 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
479 gimple_set_vdef (new_stmt, vdef);
480 if (vdef && TREE_CODE (vdef) == SSA_NAME)
481 SSA_NAME_DEF_STMT (vdef) = new_stmt;
482 laststore = new_stmt;
486 /* Second iterate over the statements forward, assigning virtual
487 operands to their uses. */
488 tree reaching_vuse = gimple_vuse (stmt);
489 for (gimple_stmt_iterator i = gsi_start (stmts);
490 !gsi_end_p (i); gsi_next (&i))
492 gimple *new_stmt = gsi_stmt (i);
493 /* If the new statement possibly has a VUSE, update it with exact SSA
494 name we know will reach this one. */
495 if (gimple_has_mem_ops (new_stmt))
496 gimple_set_vuse (new_stmt, reaching_vuse);
497 gimple_set_modified (new_stmt, true);
498 if (gimple_vdef (new_stmt))
499 reaching_vuse = gimple_vdef (new_stmt);
502 /* If the new sequence does not do a store release the virtual
503 definition of the original statement. */
504 if (reaching_vuse
505 && reaching_vuse == gimple_vuse (stmt))
507 tree vdef = gimple_vdef (stmt);
508 if (vdef
509 && TREE_CODE (vdef) == SSA_NAME)
511 unlink_stmt_vdef (stmt);
512 release_ssa_name (vdef);
516 /* Finally replace the original statement with the sequence. */
517 gsi_replace_with_seq (si_p, stmts, false);
520 /* Convert EXPR into a GIMPLE value suitable for substitution on the
521 RHS of an assignment. Insert the necessary statements before
522 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
523 is replaced. If the call is expected to produces a result, then it
524 is replaced by an assignment of the new RHS to the result variable.
525 If the result is to be ignored, then the call is replaced by a
526 GIMPLE_NOP. A proper VDEF chain is retained by making the first
527 VUSE and the last VDEF of the whole sequence be the same as the replaced
528 statement and using new SSA names for stores in between. */
530 void
531 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
533 tree lhs;
534 gimple *stmt, *new_stmt;
535 gimple_stmt_iterator i;
536 gimple_seq stmts = NULL;
538 stmt = gsi_stmt (*si_p);
540 gcc_assert (is_gimple_call (stmt));
542 push_gimplify_context (gimple_in_ssa_p (cfun));
544 lhs = gimple_call_lhs (stmt);
545 if (lhs == NULL_TREE)
547 gimplify_and_add (expr, &stmts);
548 /* We can end up with folding a memcpy of an empty class assignment
549 which gets optimized away by C++ gimplification. */
550 if (gimple_seq_empty_p (stmts))
552 pop_gimplify_context (NULL);
553 if (gimple_in_ssa_p (cfun))
555 unlink_stmt_vdef (stmt);
556 release_defs (stmt);
558 gsi_replace (si_p, gimple_build_nop (), false);
559 return;
562 else
564 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
565 new_stmt = gimple_build_assign (lhs, tmp);
566 i = gsi_last (stmts);
567 gsi_insert_after_without_update (&i, new_stmt,
568 GSI_CONTINUE_LINKING);
571 pop_gimplify_context (NULL);
573 gsi_replace_with_seq_vops (si_p, stmts);
577 /* Replace the call at *GSI with the gimple value VAL. */
579 void
580 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
582 gimple *stmt = gsi_stmt (*gsi);
583 tree lhs = gimple_call_lhs (stmt);
584 gimple *repl;
585 if (lhs)
587 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
588 val = fold_convert (TREE_TYPE (lhs), val);
589 repl = gimple_build_assign (lhs, val);
591 else
592 repl = gimple_build_nop ();
593 tree vdef = gimple_vdef (stmt);
594 if (vdef && TREE_CODE (vdef) == SSA_NAME)
596 unlink_stmt_vdef (stmt);
597 release_ssa_name (vdef);
599 gsi_replace (gsi, repl, false);
602 /* Replace the call at *GSI with the new call REPL and fold that
603 again. */
605 static void
606 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
608 gimple *stmt = gsi_stmt (*gsi);
609 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
610 gimple_set_location (repl, gimple_location (stmt));
611 if (gimple_vdef (stmt)
612 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
614 gimple_set_vdef (repl, gimple_vdef (stmt));
615 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
617 if (gimple_vuse (stmt))
618 gimple_set_vuse (repl, gimple_vuse (stmt));
619 gsi_replace (gsi, repl, false);
620 fold_stmt (gsi);
623 /* Return true if VAR is a VAR_DECL or a component thereof. */
625 static bool
626 var_decl_component_p (tree var)
628 tree inner = var;
629 while (handled_component_p (inner))
630 inner = TREE_OPERAND (inner, 0);
631 return SSA_VAR_P (inner);
634 /* If the SIZE argument representing the size of an object is in a range
635 of values of which exactly one is valid (and that is zero), return
636 true, otherwise false. */
638 static bool
639 size_must_be_zero_p (tree size)
641 if (integer_zerop (size))
642 return true;
644 if (TREE_CODE (size) != SSA_NAME)
645 return false;
647 wide_int min, max;
648 enum value_range_type rtype = get_range_info (size, &min, &max);
649 if (rtype != VR_ANTI_RANGE)
650 return false;
652 tree type = TREE_TYPE (size);
653 int prec = TYPE_PRECISION (type);
655 wide_int wone = wi::one (prec);
657 /* Compute the value of SSIZE_MAX, the largest positive value that
658 can be stored in ssize_t, the signed counterpart of size_t. */
659 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
661 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
664 /* Fold function call to builtin mem{{,p}cpy,move}. Return
665 false if no simplification can be made.
666 If ENDP is 0, return DEST (like memcpy).
667 If ENDP is 1, return DEST+LEN (like mempcpy).
668 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
669 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
670 (memmove). */
672 static bool
673 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
674 tree dest, tree src, int endp)
676 gimple *stmt = gsi_stmt (*gsi);
677 tree lhs = gimple_call_lhs (stmt);
678 tree len = gimple_call_arg (stmt, 2);
679 tree destvar, srcvar;
680 location_t loc = gimple_location (stmt);
682 /* If the LEN parameter is a constant zero or in range where
683 the only valid value is zero, return DEST. */
684 if (size_must_be_zero_p (len))
686 gimple *repl;
687 if (gimple_call_lhs (stmt))
688 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
689 else
690 repl = gimple_build_nop ();
691 tree vdef = gimple_vdef (stmt);
692 if (vdef && TREE_CODE (vdef) == SSA_NAME)
694 unlink_stmt_vdef (stmt);
695 release_ssa_name (vdef);
697 gsi_replace (gsi, repl, false);
698 return true;
701 /* If SRC and DEST are the same (and not volatile), return
702 DEST{,+LEN,+LEN-1}. */
703 if (operand_equal_p (src, dest, 0))
705 unlink_stmt_vdef (stmt);
706 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
707 release_ssa_name (gimple_vdef (stmt));
708 if (!lhs)
710 gsi_replace (gsi, gimple_build_nop (), false);
711 return true;
713 goto done;
715 else
717 tree srctype, desttype;
718 unsigned int src_align, dest_align;
719 tree off0;
721 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
722 pointers as wide integer) and also may result in huge function
723 size because of inlined bounds copy. Thus don't inline for
724 functions we want to instrument. */
725 if (flag_check_pointer_bounds
726 && chkp_instrumentable_p (cfun->decl)
727 /* Even if data may contain pointers we can inline if copy
728 less than a pointer size. */
729 && (!tree_fits_uhwi_p (len)
730 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
731 return false;
733 /* Build accesses at offset zero with a ref-all character type. */
734 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
735 ptr_mode, true), 0);
737 /* If we can perform the copy efficiently with first doing all loads
738 and then all stores inline it that way. Currently efficiently
739 means that we can load all the memory into a single integer
740 register which is what MOVE_MAX gives us. */
741 src_align = get_pointer_alignment (src);
742 dest_align = get_pointer_alignment (dest);
743 if (tree_fits_uhwi_p (len)
744 && compare_tree_int (len, MOVE_MAX) <= 0
745 /* ??? Don't transform copies from strings with known length this
746 confuses the tree-ssa-strlen.c. This doesn't handle
747 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
748 reason. */
749 && !c_strlen (src, 2))
751 unsigned ilen = tree_to_uhwi (len);
752 if (pow2p_hwi (ilen))
754 scalar_int_mode mode;
755 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
756 if (type
757 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
758 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
759 /* If the destination pointer is not aligned we must be able
760 to emit an unaligned store. */
761 && (dest_align >= GET_MODE_ALIGNMENT (mode)
762 || !targetm.slow_unaligned_access (mode, dest_align)
763 || (optab_handler (movmisalign_optab, mode)
764 != CODE_FOR_nothing)))
766 tree srctype = type;
767 tree desttype = type;
768 if (src_align < GET_MODE_ALIGNMENT (mode))
769 srctype = build_aligned_type (type, src_align);
770 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
771 tree tem = fold_const_aggregate_ref (srcmem);
772 if (tem)
773 srcmem = tem;
774 else if (src_align < GET_MODE_ALIGNMENT (mode)
775 && targetm.slow_unaligned_access (mode, src_align)
776 && (optab_handler (movmisalign_optab, mode)
777 == CODE_FOR_nothing))
778 srcmem = NULL_TREE;
779 if (srcmem)
781 gimple *new_stmt;
782 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
784 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
785 srcmem
786 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
787 new_stmt);
788 gimple_assign_set_lhs (new_stmt, srcmem);
789 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
790 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
792 if (dest_align < GET_MODE_ALIGNMENT (mode))
793 desttype = build_aligned_type (type, dest_align);
794 new_stmt
795 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
796 dest, off0),
797 srcmem);
798 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
799 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
800 if (gimple_vdef (new_stmt)
801 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
802 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
803 if (!lhs)
805 gsi_replace (gsi, new_stmt, false);
806 return true;
808 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
809 goto done;
815 if (endp == 3)
817 /* Both DEST and SRC must be pointer types.
818 ??? This is what old code did. Is the testing for pointer types
819 really mandatory?
821 If either SRC is readonly or length is 1, we can use memcpy. */
822 if (!dest_align || !src_align)
823 return false;
824 if (readonly_data_expr (src)
825 || (tree_fits_uhwi_p (len)
826 && (MIN (src_align, dest_align) / BITS_PER_UNIT
827 >= tree_to_uhwi (len))))
829 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
830 if (!fn)
831 return false;
832 gimple_call_set_fndecl (stmt, fn);
833 gimple_call_set_arg (stmt, 0, dest);
834 gimple_call_set_arg (stmt, 1, src);
835 fold_stmt (gsi);
836 return true;
839 /* If *src and *dest can't overlap, optimize into memcpy as well. */
840 if (TREE_CODE (src) == ADDR_EXPR
841 && TREE_CODE (dest) == ADDR_EXPR)
843 tree src_base, dest_base, fn;
844 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
845 HOST_WIDE_INT maxsize;
847 srcvar = TREE_OPERAND (src, 0);
848 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
849 if (src_base == NULL)
850 src_base = srcvar;
851 destvar = TREE_OPERAND (dest, 0);
852 dest_base = get_addr_base_and_unit_offset (destvar,
853 &dest_offset);
854 if (dest_base == NULL)
855 dest_base = destvar;
856 if (tree_fits_uhwi_p (len))
857 maxsize = tree_to_uhwi (len);
858 else
859 maxsize = -1;
860 if (SSA_VAR_P (src_base)
861 && SSA_VAR_P (dest_base))
863 if (operand_equal_p (src_base, dest_base, 0)
864 && ranges_overlap_p (src_offset, maxsize,
865 dest_offset, maxsize))
866 return false;
868 else if (TREE_CODE (src_base) == MEM_REF
869 && TREE_CODE (dest_base) == MEM_REF)
871 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
872 TREE_OPERAND (dest_base, 0), 0))
873 return false;
874 offset_int off = mem_ref_offset (src_base) + src_offset;
875 if (!wi::fits_shwi_p (off))
876 return false;
877 src_offset = off.to_shwi ();
879 off = mem_ref_offset (dest_base) + dest_offset;
880 if (!wi::fits_shwi_p (off))
881 return false;
882 dest_offset = off.to_shwi ();
883 if (ranges_overlap_p (src_offset, maxsize,
884 dest_offset, maxsize))
885 return false;
887 else
888 return false;
890 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
891 if (!fn)
892 return false;
893 gimple_call_set_fndecl (stmt, fn);
894 gimple_call_set_arg (stmt, 0, dest);
895 gimple_call_set_arg (stmt, 1, src);
896 fold_stmt (gsi);
897 return true;
900 /* If the destination and source do not alias optimize into
901 memcpy as well. */
902 if ((is_gimple_min_invariant (dest)
903 || TREE_CODE (dest) == SSA_NAME)
904 && (is_gimple_min_invariant (src)
905 || TREE_CODE (src) == SSA_NAME))
907 ao_ref destr, srcr;
908 ao_ref_init_from_ptr_and_size (&destr, dest, len);
909 ao_ref_init_from_ptr_and_size (&srcr, src, len);
910 if (!refs_may_alias_p_1 (&destr, &srcr, false))
912 tree fn;
913 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
914 if (!fn)
915 return false;
916 gimple_call_set_fndecl (stmt, fn);
917 gimple_call_set_arg (stmt, 0, dest);
918 gimple_call_set_arg (stmt, 1, src);
919 fold_stmt (gsi);
920 return true;
924 return false;
927 if (!tree_fits_shwi_p (len))
928 return false;
929 /* FIXME:
930 This logic lose for arguments like (type *)malloc (sizeof (type)),
931 since we strip the casts of up to VOID return value from malloc.
932 Perhaps we ought to inherit type from non-VOID argument here? */
933 STRIP_NOPS (src);
934 STRIP_NOPS (dest);
935 if (!POINTER_TYPE_P (TREE_TYPE (src))
936 || !POINTER_TYPE_P (TREE_TYPE (dest)))
937 return false;
938 /* In the following try to find a type that is most natural to be
939 used for the memcpy source and destination and that allows
940 the most optimization when memcpy is turned into a plain assignment
941 using that type. In theory we could always use a char[len] type
942 but that only gains us that the destination and source possibly
943 no longer will have their address taken. */
944 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
945 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
947 tree tem = TREE_OPERAND (src, 0);
948 STRIP_NOPS (tem);
949 if (tem != TREE_OPERAND (src, 0))
950 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
952 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
954 tree tem = TREE_OPERAND (dest, 0);
955 STRIP_NOPS (tem);
956 if (tem != TREE_OPERAND (dest, 0))
957 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
959 srctype = TREE_TYPE (TREE_TYPE (src));
960 if (TREE_CODE (srctype) == ARRAY_TYPE
961 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
963 srctype = TREE_TYPE (srctype);
964 STRIP_NOPS (src);
965 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
967 desttype = TREE_TYPE (TREE_TYPE (dest));
968 if (TREE_CODE (desttype) == ARRAY_TYPE
969 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
971 desttype = TREE_TYPE (desttype);
972 STRIP_NOPS (dest);
973 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
975 if (TREE_ADDRESSABLE (srctype)
976 || TREE_ADDRESSABLE (desttype))
977 return false;
979 /* Make sure we are not copying using a floating-point mode or
980 a type whose size possibly does not match its precision. */
981 if (FLOAT_MODE_P (TYPE_MODE (desttype))
982 || TREE_CODE (desttype) == BOOLEAN_TYPE
983 || TREE_CODE (desttype) == ENUMERAL_TYPE)
984 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
985 if (FLOAT_MODE_P (TYPE_MODE (srctype))
986 || TREE_CODE (srctype) == BOOLEAN_TYPE
987 || TREE_CODE (srctype) == ENUMERAL_TYPE)
988 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
989 if (!srctype)
990 srctype = desttype;
991 if (!desttype)
992 desttype = srctype;
993 if (!srctype)
994 return false;
996 src_align = get_pointer_alignment (src);
997 dest_align = get_pointer_alignment (dest);
998 if (dest_align < TYPE_ALIGN (desttype)
999 || src_align < TYPE_ALIGN (srctype))
1000 return false;
1002 destvar = dest;
1003 STRIP_NOPS (destvar);
1004 if (TREE_CODE (destvar) == ADDR_EXPR
1005 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1006 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1007 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1008 else
1009 destvar = NULL_TREE;
1011 srcvar = src;
1012 STRIP_NOPS (srcvar);
1013 if (TREE_CODE (srcvar) == ADDR_EXPR
1014 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1015 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1017 if (!destvar
1018 || src_align >= TYPE_ALIGN (desttype))
1019 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1020 srcvar, off0);
1021 else if (!STRICT_ALIGNMENT)
1023 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1024 src_align);
1025 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1027 else
1028 srcvar = NULL_TREE;
1030 else
1031 srcvar = NULL_TREE;
1033 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1034 return false;
1036 if (srcvar == NULL_TREE)
1038 STRIP_NOPS (src);
1039 if (src_align >= TYPE_ALIGN (desttype))
1040 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1041 else
1043 if (STRICT_ALIGNMENT)
1044 return false;
1045 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1046 src_align);
1047 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1050 else if (destvar == NULL_TREE)
1052 STRIP_NOPS (dest);
1053 if (dest_align >= TYPE_ALIGN (srctype))
1054 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1055 else
1057 if (STRICT_ALIGNMENT)
1058 return false;
1059 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1060 dest_align);
1061 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1065 gimple *new_stmt;
1066 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1068 tree tem = fold_const_aggregate_ref (srcvar);
1069 if (tem)
1070 srcvar = tem;
1071 if (! is_gimple_min_invariant (srcvar))
1073 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1074 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1075 new_stmt);
1076 gimple_assign_set_lhs (new_stmt, srcvar);
1077 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1078 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1081 new_stmt = gimple_build_assign (destvar, srcvar);
1082 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1083 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1084 if (gimple_vdef (new_stmt)
1085 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1086 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1087 if (!lhs)
1089 gsi_replace (gsi, new_stmt, false);
1090 return true;
1092 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1095 done:
1096 gimple_seq stmts = NULL;
1097 if (endp == 0 || endp == 3)
1098 len = NULL_TREE;
1099 else if (endp == 2)
1100 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1101 ssize_int (1));
1102 if (endp == 2 || endp == 1)
1104 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1105 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1106 TREE_TYPE (dest), dest, len);
1109 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1110 gimple *repl = gimple_build_assign (lhs, dest);
1111 gsi_replace (gsi, repl, false);
1112 return true;
1115 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1116 to built-in memcmp (a, b, len). */
1118 static bool
1119 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1121 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1123 if (!fn)
1124 return false;
1126 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1128 gimple *stmt = gsi_stmt (*gsi);
1129 tree a = gimple_call_arg (stmt, 0);
1130 tree b = gimple_call_arg (stmt, 1);
1131 tree len = gimple_call_arg (stmt, 2);
1133 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1134 replace_call_with_call_and_fold (gsi, repl);
1136 return true;
1139 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1140 to built-in memmove (dest, src, len). */
1142 static bool
1143 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1145 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1147 if (!fn)
1148 return false;
1150 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1151 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1152 len) into memmove (dest, src, len). */
1154 gimple *stmt = gsi_stmt (*gsi);
1155 tree src = gimple_call_arg (stmt, 0);
1156 tree dest = gimple_call_arg (stmt, 1);
1157 tree len = gimple_call_arg (stmt, 2);
1159 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1160 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1161 replace_call_with_call_and_fold (gsi, repl);
1163 return true;
1166 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1167 to built-in memset (dest, 0, len). */
1169 static bool
1170 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1172 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1174 if (!fn)
1175 return false;
1177 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1179 gimple *stmt = gsi_stmt (*gsi);
1180 tree dest = gimple_call_arg (stmt, 0);
1181 tree len = gimple_call_arg (stmt, 1);
1183 gimple_seq seq = NULL;
1184 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1185 gimple_seq_add_stmt_without_update (&seq, repl);
1186 gsi_replace_with_seq_vops (gsi, seq);
1187 fold_stmt (gsi);
1189 return true;
1192 /* Fold function call to builtin memset or bzero at *GSI setting the
1193 memory of size LEN to VAL. Return whether a simplification was made. */
1195 static bool
1196 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1198 gimple *stmt = gsi_stmt (*gsi);
1199 tree etype;
1200 unsigned HOST_WIDE_INT length, cval;
1202 /* If the LEN parameter is zero, return DEST. */
1203 if (integer_zerop (len))
1205 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1206 return true;
1209 if (! tree_fits_uhwi_p (len))
1210 return false;
1212 if (TREE_CODE (c) != INTEGER_CST)
1213 return false;
1215 tree dest = gimple_call_arg (stmt, 0);
1216 tree var = dest;
1217 if (TREE_CODE (var) != ADDR_EXPR)
1218 return false;
1220 var = TREE_OPERAND (var, 0);
1221 if (TREE_THIS_VOLATILE (var))
1222 return false;
1224 etype = TREE_TYPE (var);
1225 if (TREE_CODE (etype) == ARRAY_TYPE)
1226 etype = TREE_TYPE (etype);
1228 if (!INTEGRAL_TYPE_P (etype)
1229 && !POINTER_TYPE_P (etype))
1230 return NULL_TREE;
1232 if (! var_decl_component_p (var))
1233 return NULL_TREE;
1235 length = tree_to_uhwi (len);
1236 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1237 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1238 return NULL_TREE;
1240 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1241 return NULL_TREE;
1243 if (integer_zerop (c))
1244 cval = 0;
1245 else
1247 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1248 return NULL_TREE;
1250 cval = TREE_INT_CST_LOW (c);
1251 cval &= 0xff;
1252 cval |= cval << 8;
1253 cval |= cval << 16;
1254 cval |= (cval << 31) << 1;
1257 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1258 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1259 gimple_set_vuse (store, gimple_vuse (stmt));
1260 tree vdef = gimple_vdef (stmt);
1261 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1263 gimple_set_vdef (store, gimple_vdef (stmt));
1264 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1266 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1267 if (gimple_call_lhs (stmt))
1269 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1270 gsi_replace (gsi, asgn, false);
1272 else
1274 gimple_stmt_iterator gsi2 = *gsi;
1275 gsi_prev (gsi);
1276 gsi_remove (&gsi2, true);
1279 return true;
1283 /* Obtain the minimum and maximum string length or minimum and maximum
1284 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1285 If ARG is an SSA name variable, follow its use-def chains. When
1286 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1287 if we are unable to determine the length or value, return False.
1288 VISITED is a bitmap of visited variables.
1289 TYPE is 0 if string length should be obtained, 1 for maximum string
1290 length and 2 for maximum value ARG can have.
1291 When FUZZY is set and the length of a string cannot be determined,
1292 the function instead considers as the maximum possible length the
1293 size of a character array it may refer to.
1294 Set *FLEXP to true if the range of the string lengths has been
1295 obtained from the upper bound of an array at the end of a struct.
1296 Such an array may hold a string that's longer than its upper bound
1297 due to it being used as a poor-man's flexible array member. */
1299 static bool
1300 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1301 bool fuzzy, bool *flexp)
1303 tree var, val;
1304 gimple *def_stmt;
1306 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1307 but MINLEN may be cleared during the execution of the function. */
1308 tree *minlen = length;
1309 tree *const maxlen = length + 1;
1311 if (TREE_CODE (arg) != SSA_NAME)
1313 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1314 if (TREE_CODE (arg) == ADDR_EXPR
1315 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1316 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1318 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1319 if (TREE_CODE (aop0) == INDIRECT_REF
1320 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1321 return get_range_strlen (TREE_OPERAND (aop0, 0),
1322 length, visited, type, fuzzy, flexp);
1325 if (type == 2)
1327 val = arg;
1328 if (TREE_CODE (val) != INTEGER_CST
1329 || tree_int_cst_sgn (val) < 0)
1330 return false;
1332 else
1333 val = c_strlen (arg, 1);
1335 if (!val && fuzzy)
1337 if (TREE_CODE (arg) == ADDR_EXPR)
1338 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1339 visited, type, fuzzy, flexp);
1341 if (TREE_CODE (arg) == COMPONENT_REF
1342 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE)
1344 /* Use the type of the member array to determine the upper
1345 bound on the length of the array. This may be overly
1346 optimistic if the array itself isn't NUL-terminated and
1347 the caller relies on the subsequent member to contain
1348 the NUL.
1349 Set *FLEXP to true if the array whose bound is being
1350 used is at the end of a struct. */
1351 if (array_at_struct_end_p (arg))
1352 *flexp = true;
1354 arg = TREE_OPERAND (arg, 1);
1355 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1356 if (!val || integer_zerop (val))
1357 return false;
1358 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1359 integer_one_node);
1360 /* Set the minimum size to zero since the string in
1361 the array could have zero length. */
1362 *minlen = ssize_int (0);
1366 if (!val)
1367 return false;
1369 if (minlen
1370 && (!*minlen
1371 || (type > 0
1372 && TREE_CODE (*minlen) == INTEGER_CST
1373 && TREE_CODE (val) == INTEGER_CST
1374 && tree_int_cst_lt (val, *minlen))))
1375 *minlen = val;
1377 if (*maxlen)
1379 if (type > 0)
1381 if (TREE_CODE (*maxlen) != INTEGER_CST
1382 || TREE_CODE (val) != INTEGER_CST)
1383 return false;
1385 if (tree_int_cst_lt (*maxlen, val))
1386 *maxlen = val;
1387 return true;
1389 else if (simple_cst_equal (val, *maxlen) != 1)
1390 return false;
1393 *maxlen = val;
1394 return true;
1397 /* If ARG is registered for SSA update we cannot look at its defining
1398 statement. */
1399 if (name_registered_for_update_p (arg))
1400 return false;
1402 /* If we were already here, break the infinite cycle. */
1403 if (!*visited)
1404 *visited = BITMAP_ALLOC (NULL);
1405 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1406 return true;
1408 var = arg;
1409 def_stmt = SSA_NAME_DEF_STMT (var);
1411 switch (gimple_code (def_stmt))
1413 case GIMPLE_ASSIGN:
1414 /* The RHS of the statement defining VAR must either have a
1415 constant length or come from another SSA_NAME with a constant
1416 length. */
1417 if (gimple_assign_single_p (def_stmt)
1418 || gimple_assign_unary_nop_p (def_stmt))
1420 tree rhs = gimple_assign_rhs1 (def_stmt);
1421 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
1423 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1425 tree op2 = gimple_assign_rhs2 (def_stmt);
1426 tree op3 = gimple_assign_rhs3 (def_stmt);
1427 return get_range_strlen (op2, length, visited, type, fuzzy, flexp)
1428 && get_range_strlen (op3, length, visited, type, fuzzy, flexp);
1430 return false;
1432 case GIMPLE_PHI:
1434 /* All the arguments of the PHI node must have the same constant
1435 length. */
1436 unsigned i;
1438 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1440 tree arg = gimple_phi_arg (def_stmt, i)->def;
1442 /* If this PHI has itself as an argument, we cannot
1443 determine the string length of this argument. However,
1444 if we can find a constant string length for the other
1445 PHI args then we can still be sure that this is a
1446 constant string length. So be optimistic and just
1447 continue with the next argument. */
1448 if (arg == gimple_phi_result (def_stmt))
1449 continue;
1451 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
1453 if (fuzzy)
1454 *maxlen = build_all_ones_cst (size_type_node);
1455 else
1456 return false;
1460 return true;
1462 default:
1463 return false;
1467 /* Determine the minimum and maximum value or string length that ARG
1468 refers to and store each in the first two elements of MINMAXLEN.
1469 For expressions that point to strings of unknown lengths that are
1470 character arrays, use the upper bound of the array as the maximum
1471 length. For example, given an expression like 'x ? array : "xyz"'
1472 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1473 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1474 stored in array.
1475 Return true if the range of the string lengths has been obtained
1476 from the upper bound of an array at the end of a struct. Such
1477 an array may hold a string that's longer than its upper bound
1478 due to it being used as a poor-man's flexible array member. */
1480 bool
1481 get_range_strlen (tree arg, tree minmaxlen[2])
1483 bitmap visited = NULL;
1485 minmaxlen[0] = NULL_TREE;
1486 minmaxlen[1] = NULL_TREE;
1488 bool flexarray = false;
1489 get_range_strlen (arg, minmaxlen, &visited, 1, true, &flexarray);
1491 if (visited)
1492 BITMAP_FREE (visited);
1494 return flexarray;
1497 tree
1498 get_maxval_strlen (tree arg, int type)
1500 bitmap visited = NULL;
1501 tree len[2] = { NULL_TREE, NULL_TREE };
1503 bool dummy;
1504 if (!get_range_strlen (arg, len, &visited, type, false, &dummy))
1505 len[1] = NULL_TREE;
1506 if (visited)
1507 BITMAP_FREE (visited);
1509 return len[1];
1513 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1514 If LEN is not NULL, it represents the length of the string to be
1515 copied. Return NULL_TREE if no simplification can be made. */
1517 static bool
1518 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1519 tree dest, tree src)
1521 location_t loc = gimple_location (gsi_stmt (*gsi));
1522 tree fn;
1524 /* If SRC and DEST are the same (and not volatile), return DEST. */
1525 if (operand_equal_p (src, dest, 0))
1527 replace_call_with_value (gsi, dest);
1528 return true;
1531 if (optimize_function_for_size_p (cfun))
1532 return false;
1534 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1535 if (!fn)
1536 return false;
1538 tree len = get_maxval_strlen (src, 0);
1539 if (!len)
1540 return false;
1542 len = fold_convert_loc (loc, size_type_node, len);
1543 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1544 len = force_gimple_operand_gsi (gsi, len, true,
1545 NULL_TREE, true, GSI_SAME_STMT);
1546 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1547 replace_call_with_call_and_fold (gsi, repl);
1548 return true;
1551 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1552 If SLEN is not NULL, it represents the length of the source string.
1553 Return NULL_TREE if no simplification can be made. */
1555 static bool
1556 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1557 tree dest, tree src, tree len)
1559 gimple *stmt = gsi_stmt (*gsi);
1560 location_t loc = gimple_location (stmt);
1562 /* If the LEN parameter is zero, return DEST. */
1563 if (integer_zerop (len))
1565 tree fndecl = gimple_call_fndecl (stmt);
1566 gcall *call = as_a <gcall *> (stmt);
1568 /* Warn about the lack of nul termination: the result is not
1569 a (nul-terminated) string. */
1570 tree slen = get_maxval_strlen (src, 0);
1571 if (slen && !integer_zerop (slen))
1572 warning_at (loc, OPT_Wstringop_truncation,
1573 "%G%qD destination unchanged after copying no bytes "
1574 "from a string of length %E",
1575 call, fndecl, slen);
1576 else
1577 warning_at (loc, OPT_Wstringop_truncation,
1578 "%G%qD destination unchanged after copying no bytes",
1579 call, fndecl);
1581 replace_call_with_value (gsi, dest);
1582 return true;
1585 /* We can't compare slen with len as constants below if len is not a
1586 constant. */
1587 if (TREE_CODE (len) != INTEGER_CST)
1588 return false;
1590 /* Now, we must be passed a constant src ptr parameter. */
1591 tree slen = get_maxval_strlen (src, 0);
1592 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1593 return false;
1595 /* The size of the source string including the terminating nul. */
1596 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1598 /* We do not support simplification of this case, though we do
1599 support it when expanding trees into RTL. */
1600 /* FIXME: generate a call to __builtin_memset. */
1601 if (tree_int_cst_lt (ssize, len))
1602 return false;
1604 if (tree_int_cst_lt (len, slen))
1606 tree fndecl = gimple_call_fndecl (stmt);
1607 gcall *call = as_a <gcall *> (stmt);
1609 warning_at (loc, OPT_Wstringop_truncation,
1610 (tree_int_cst_equal (size_one_node, len)
1611 ? G_("%G%qD output truncated copying %E byte "
1612 "from a string of length %E")
1613 : G_("%G%qD output truncated copying %E bytes "
1614 "from a string of length %E")),
1615 call, fndecl, len, slen);
1617 else if (tree_int_cst_equal (len, slen))
1619 tree decl = dest;
1620 if (TREE_CODE (decl) == SSA_NAME)
1622 gimple *def_stmt = SSA_NAME_DEF_STMT (decl);
1623 if (is_gimple_assign (def_stmt))
1625 tree_code code = gimple_assign_rhs_code (def_stmt);
1626 if (code == ADDR_EXPR || code == VAR_DECL)
1627 decl = gimple_assign_rhs1 (def_stmt);
1631 if (TREE_CODE (decl) == ADDR_EXPR)
1632 decl = TREE_OPERAND (decl, 0);
1634 if (TREE_CODE (decl) == COMPONENT_REF)
1635 decl = TREE_OPERAND (decl, 1);
1637 tree fndecl = gimple_call_fndecl (stmt);
1638 gcall *call = as_a <gcall *> (stmt);
1640 if (!DECL_P (decl)
1641 || !lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
1642 warning_at (loc, OPT_Wstringop_truncation,
1643 (tree_int_cst_equal (size_one_node, len)
1644 ? G_("%G%qD output truncated before terminating nul "
1645 "copying %E byte from a string of the same "
1646 "length")
1647 : G_("%G%qD output truncated before terminating nul "
1648 "copying %E bytes from a string of the same "
1649 "length")),
1650 call, fndecl, len);
1653 /* OK transform into builtin memcpy. */
1654 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1655 if (!fn)
1656 return false;
1658 len = fold_convert_loc (loc, size_type_node, len);
1659 len = force_gimple_operand_gsi (gsi, len, true,
1660 NULL_TREE, true, GSI_SAME_STMT);
1661 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1662 replace_call_with_call_and_fold (gsi, repl);
1664 return true;
1667 /* Fold function call to builtin strchr or strrchr.
1668 If both arguments are constant, evaluate and fold the result,
1669 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1670 In general strlen is significantly faster than strchr
1671 due to being a simpler operation. */
1672 static bool
1673 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1675 gimple *stmt = gsi_stmt (*gsi);
1676 tree str = gimple_call_arg (stmt, 0);
1677 tree c = gimple_call_arg (stmt, 1);
1678 location_t loc = gimple_location (stmt);
1679 const char *p;
1680 char ch;
1682 if (!gimple_call_lhs (stmt))
1683 return false;
1685 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1687 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1689 if (p1 == NULL)
1691 replace_call_with_value (gsi, integer_zero_node);
1692 return true;
1695 tree len = build_int_cst (size_type_node, p1 - p);
1696 gimple_seq stmts = NULL;
1697 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1698 POINTER_PLUS_EXPR, str, len);
1699 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1700 gsi_replace_with_seq_vops (gsi, stmts);
1701 return true;
1704 if (!integer_zerop (c))
1705 return false;
1707 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1708 if (is_strrchr && optimize_function_for_size_p (cfun))
1710 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1712 if (strchr_fn)
1714 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1715 replace_call_with_call_and_fold (gsi, repl);
1716 return true;
1719 return false;
1722 tree len;
1723 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1725 if (!strlen_fn)
1726 return false;
1728 /* Create newstr = strlen (str). */
1729 gimple_seq stmts = NULL;
1730 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1731 gimple_set_location (new_stmt, loc);
1732 len = create_tmp_reg_or_ssa_name (size_type_node);
1733 gimple_call_set_lhs (new_stmt, len);
1734 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1736 /* Create (str p+ strlen (str)). */
1737 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1738 POINTER_PLUS_EXPR, str, len);
1739 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1740 gsi_replace_with_seq_vops (gsi, stmts);
1741 /* gsi now points at the assignment to the lhs, get a
1742 stmt iterator to the strlen.
1743 ??? We can't use gsi_for_stmt as that doesn't work when the
1744 CFG isn't built yet. */
1745 gimple_stmt_iterator gsi2 = *gsi;
1746 gsi_prev (&gsi2);
1747 fold_stmt (&gsi2);
1748 return true;
1751 /* Fold function call to builtin strstr.
1752 If both arguments are constant, evaluate and fold the result,
1753 additionally fold strstr (x, "") into x and strstr (x, "c")
1754 into strchr (x, 'c'). */
1755 static bool
1756 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1758 gimple *stmt = gsi_stmt (*gsi);
1759 tree haystack = gimple_call_arg (stmt, 0);
1760 tree needle = gimple_call_arg (stmt, 1);
1761 const char *p, *q;
1763 if (!gimple_call_lhs (stmt))
1764 return false;
1766 q = c_getstr (needle);
1767 if (q == NULL)
1768 return false;
1770 if ((p = c_getstr (haystack)))
1772 const char *r = strstr (p, q);
1774 if (r == NULL)
1776 replace_call_with_value (gsi, integer_zero_node);
1777 return true;
1780 tree len = build_int_cst (size_type_node, r - p);
1781 gimple_seq stmts = NULL;
1782 gimple *new_stmt
1783 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1784 haystack, len);
1785 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1786 gsi_replace_with_seq_vops (gsi, stmts);
1787 return true;
1790 /* For strstr (x, "") return x. */
1791 if (q[0] == '\0')
1793 replace_call_with_value (gsi, haystack);
1794 return true;
1797 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1798 if (q[1] == '\0')
1800 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1801 if (strchr_fn)
1803 tree c = build_int_cst (integer_type_node, q[0]);
1804 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1805 replace_call_with_call_and_fold (gsi, repl);
1806 return true;
1810 return false;
1813 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1814 to the call.
1816 Return NULL_TREE if no simplification was possible, otherwise return the
1817 simplified form of the call as a tree.
1819 The simplified form may be a constant or other expression which
1820 computes the same value, but in a more efficient manner (including
1821 calls to other builtin functions).
1823 The call may contain arguments which need to be evaluated, but
1824 which are not useful to determine the result of the call. In
1825 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1826 COMPOUND_EXPR will be an argument which must be evaluated.
1827 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1828 COMPOUND_EXPR in the chain will contain the tree for the simplified
1829 form of the builtin function call. */
1831 static bool
1832 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1834 gimple *stmt = gsi_stmt (*gsi);
1835 location_t loc = gimple_location (stmt);
1837 const char *p = c_getstr (src);
1839 /* If the string length is zero, return the dst parameter. */
1840 if (p && *p == '\0')
1842 replace_call_with_value (gsi, dst);
1843 return true;
1846 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1847 return false;
1849 /* See if we can store by pieces into (dst + strlen(dst)). */
1850 tree newdst;
1851 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1852 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1854 if (!strlen_fn || !memcpy_fn)
1855 return false;
1857 /* If the length of the source string isn't computable don't
1858 split strcat into strlen and memcpy. */
1859 tree len = get_maxval_strlen (src, 0);
1860 if (! len)
1861 return false;
1863 /* Create strlen (dst). */
1864 gimple_seq stmts = NULL, stmts2;
1865 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1866 gimple_set_location (repl, loc);
1867 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1868 gimple_call_set_lhs (repl, newdst);
1869 gimple_seq_add_stmt_without_update (&stmts, repl);
1871 /* Create (dst p+ strlen (dst)). */
1872 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1873 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1874 gimple_seq_add_seq_without_update (&stmts, stmts2);
1876 len = fold_convert_loc (loc, size_type_node, len);
1877 len = size_binop_loc (loc, PLUS_EXPR, len,
1878 build_int_cst (size_type_node, 1));
1879 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1880 gimple_seq_add_seq_without_update (&stmts, stmts2);
1882 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1883 gimple_seq_add_stmt_without_update (&stmts, repl);
1884 if (gimple_call_lhs (stmt))
1886 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1887 gimple_seq_add_stmt_without_update (&stmts, repl);
1888 gsi_replace_with_seq_vops (gsi, stmts);
1889 /* gsi now points at the assignment to the lhs, get a
1890 stmt iterator to the memcpy call.
1891 ??? We can't use gsi_for_stmt as that doesn't work when the
1892 CFG isn't built yet. */
1893 gimple_stmt_iterator gsi2 = *gsi;
1894 gsi_prev (&gsi2);
1895 fold_stmt (&gsi2);
1897 else
1899 gsi_replace_with_seq_vops (gsi, stmts);
1900 fold_stmt (gsi);
1902 return true;
1905 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1906 are the arguments to the call. */
1908 static bool
1909 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1911 gimple *stmt = gsi_stmt (*gsi);
1912 tree dest = gimple_call_arg (stmt, 0);
1913 tree src = gimple_call_arg (stmt, 1);
1914 tree size = gimple_call_arg (stmt, 2);
1915 tree fn;
1916 const char *p;
1919 p = c_getstr (src);
1920 /* If the SRC parameter is "", return DEST. */
1921 if (p && *p == '\0')
1923 replace_call_with_value (gsi, dest);
1924 return true;
1927 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1928 return false;
1930 /* If __builtin_strcat_chk is used, assume strcat is available. */
1931 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1932 if (!fn)
1933 return false;
1935 gimple *repl = gimple_build_call (fn, 2, dest, src);
1936 replace_call_with_call_and_fold (gsi, repl);
1937 return true;
1940 /* Simplify a call to the strncat builtin. */
1942 static bool
1943 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1945 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1946 tree dst = gimple_call_arg (stmt, 0);
1947 tree src = gimple_call_arg (stmt, 1);
1948 tree len = gimple_call_arg (stmt, 2);
1950 const char *p = c_getstr (src);
1952 /* If the requested length is zero, or the src parameter string
1953 length is zero, return the dst parameter. */
1954 if (integer_zerop (len) || (p && *p == '\0'))
1956 replace_call_with_value (gsi, dst);
1957 return true;
1960 if (TREE_CODE (len) != INTEGER_CST || !p)
1961 return false;
1963 unsigned srclen = strlen (p);
1965 int cmpsrc = compare_tree_int (len, srclen);
1967 /* Return early if the requested len is less than the string length.
1968 Warnings will be issued elsewhere later. */
1969 if (cmpsrc < 0)
1970 return false;
1972 unsigned HOST_WIDE_INT dstsize;
1974 bool nowarn = gimple_no_warning_p (stmt);
1976 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
1978 int cmpdst = compare_tree_int (len, dstsize);
1980 if (cmpdst >= 0)
1982 tree fndecl = gimple_call_fndecl (stmt);
1984 /* Strncat copies (at most) LEN bytes and always appends
1985 the terminating NUL so the specified bound should never
1986 be equal to (or greater than) the size of the destination.
1987 If it is, the copy could overflow. */
1988 location_t loc = gimple_location (stmt);
1989 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
1990 cmpdst == 0
1991 ? G_("%G%qD specified bound %E equals "
1992 "destination size")
1993 : G_("%G%qD specified bound %E exceeds "
1994 "destination size %wu"),
1995 stmt, fndecl, len, dstsize);
1996 if (nowarn)
1997 gimple_set_no_warning (stmt, true);
2001 if (!nowarn && cmpsrc == 0)
2003 tree fndecl = gimple_call_fndecl (stmt);
2005 /* To avoid certain truncation the specified bound should also
2006 not be equal to (or less than) the length of the source. */
2007 location_t loc = gimple_location (stmt);
2008 if (warning_at (loc, OPT_Wstringop_overflow_,
2009 "%G%qD specified bound %E equals source length",
2010 stmt, fndecl, len))
2011 gimple_set_no_warning (stmt, true);
2014 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2016 /* If the replacement _DECL isn't initialized, don't do the
2017 transformation. */
2018 if (!fn)
2019 return false;
2021 /* Otherwise, emit a call to strcat. */
2022 gcall *repl = gimple_build_call (fn, 2, dst, src);
2023 replace_call_with_call_and_fold (gsi, repl);
2024 return true;
2027 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2028 LEN, and SIZE. */
2030 static bool
2031 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2033 gimple *stmt = gsi_stmt (*gsi);
2034 tree dest = gimple_call_arg (stmt, 0);
2035 tree src = gimple_call_arg (stmt, 1);
2036 tree len = gimple_call_arg (stmt, 2);
2037 tree size = gimple_call_arg (stmt, 3);
2038 tree fn;
2039 const char *p;
2041 p = c_getstr (src);
2042 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2043 if ((p && *p == '\0')
2044 || integer_zerop (len))
2046 replace_call_with_value (gsi, dest);
2047 return true;
2050 if (! tree_fits_uhwi_p (size))
2051 return false;
2053 if (! integer_all_onesp (size))
2055 tree src_len = c_strlen (src, 1);
2056 if (src_len
2057 && tree_fits_uhwi_p (src_len)
2058 && tree_fits_uhwi_p (len)
2059 && ! tree_int_cst_lt (len, src_len))
2061 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2062 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2063 if (!fn)
2064 return false;
2066 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2067 replace_call_with_call_and_fold (gsi, repl);
2068 return true;
2070 return false;
2073 /* If __builtin_strncat_chk is used, assume strncat is available. */
2074 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2075 if (!fn)
2076 return false;
2078 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2079 replace_call_with_call_and_fold (gsi, repl);
2080 return true;
2083 /* Build and append gimple statements to STMTS that would load a first
2084 character of a memory location identified by STR. LOC is location
2085 of the statement. */
2087 static tree
2088 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2090 tree var;
2092 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2093 tree cst_uchar_ptr_node
2094 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2095 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2097 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2098 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2099 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2101 gimple_assign_set_lhs (stmt, var);
2102 gimple_seq_add_stmt_without_update (stmts, stmt);
2104 return var;
2107 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2108 FCODE is the name of the builtin. */
2110 static bool
2111 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2113 gimple *stmt = gsi_stmt (*gsi);
2114 tree callee = gimple_call_fndecl (stmt);
2115 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2117 tree type = integer_type_node;
2118 tree str1 = gimple_call_arg (stmt, 0);
2119 tree str2 = gimple_call_arg (stmt, 1);
2120 tree lhs = gimple_call_lhs (stmt);
2121 HOST_WIDE_INT length = -1;
2123 /* Handle strncmp and strncasecmp functions. */
2124 if (gimple_call_num_args (stmt) == 3)
2126 tree len = gimple_call_arg (stmt, 2);
2127 if (tree_fits_uhwi_p (len))
2128 length = tree_to_uhwi (len);
2131 /* If the LEN parameter is zero, return zero. */
2132 if (length == 0)
2134 replace_call_with_value (gsi, integer_zero_node);
2135 return true;
2138 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2139 if (operand_equal_p (str1, str2, 0))
2141 replace_call_with_value (gsi, integer_zero_node);
2142 return true;
2145 const char *p1 = c_getstr (str1);
2146 const char *p2 = c_getstr (str2);
2148 /* For known strings, return an immediate value. */
2149 if (p1 && p2)
2151 int r = 0;
2152 bool known_result = false;
2154 switch (fcode)
2156 case BUILT_IN_STRCMP:
2158 r = strcmp (p1, p2);
2159 known_result = true;
2160 break;
2162 case BUILT_IN_STRNCMP:
2164 if (length == -1)
2165 break;
2166 r = strncmp (p1, p2, length);
2167 known_result = true;
2168 break;
2170 /* Only handleable situation is where the string are equal (result 0),
2171 which is already handled by operand_equal_p case. */
2172 case BUILT_IN_STRCASECMP:
2173 break;
2174 case BUILT_IN_STRNCASECMP:
2176 if (length == -1)
2177 break;
2178 r = strncmp (p1, p2, length);
2179 if (r == 0)
2180 known_result = true;
2181 break;;
2183 default:
2184 gcc_unreachable ();
2187 if (known_result)
2189 replace_call_with_value (gsi, build_cmp_result (type, r));
2190 return true;
2194 bool nonzero_length = length >= 1
2195 || fcode == BUILT_IN_STRCMP
2196 || fcode == BUILT_IN_STRCASECMP;
2198 location_t loc = gimple_location (stmt);
2200 /* If the second arg is "", return *(const unsigned char*)arg1. */
2201 if (p2 && *p2 == '\0' && nonzero_length)
2203 gimple_seq stmts = NULL;
2204 tree var = gimple_load_first_char (loc, str1, &stmts);
2205 if (lhs)
2207 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2208 gimple_seq_add_stmt_without_update (&stmts, stmt);
2211 gsi_replace_with_seq_vops (gsi, stmts);
2212 return true;
2215 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2216 if (p1 && *p1 == '\0' && nonzero_length)
2218 gimple_seq stmts = NULL;
2219 tree var = gimple_load_first_char (loc, str2, &stmts);
2221 if (lhs)
2223 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2224 stmt = gimple_build_assign (c, NOP_EXPR, var);
2225 gimple_seq_add_stmt_without_update (&stmts, stmt);
2227 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2228 gimple_seq_add_stmt_without_update (&stmts, stmt);
2231 gsi_replace_with_seq_vops (gsi, stmts);
2232 return true;
2235 /* If len parameter is one, return an expression corresponding to
2236 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2237 if (fcode == BUILT_IN_STRNCMP && length == 1)
2239 gimple_seq stmts = NULL;
2240 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2241 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2243 if (lhs)
2245 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2246 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2247 gimple_seq_add_stmt_without_update (&stmts, convert1);
2249 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2250 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2251 gimple_seq_add_stmt_without_update (&stmts, convert2);
2253 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2254 gimple_seq_add_stmt_without_update (&stmts, stmt);
2257 gsi_replace_with_seq_vops (gsi, stmts);
2258 return true;
2261 /* If length is larger than the length of one constant string,
2262 replace strncmp with corresponding strcmp */
2263 if (fcode == BUILT_IN_STRNCMP
2264 && length > 0
2265 && ((p2 && (size_t) length > strlen (p2))
2266 || (p1 && (size_t) length > strlen (p1))))
2268 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2269 if (!fn)
2270 return false;
2271 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2272 replace_call_with_call_and_fold (gsi, repl);
2273 return true;
2276 return false;
2279 /* Fold a call to the memchr pointed by GSI iterator. */
2281 static bool
2282 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2284 gimple *stmt = gsi_stmt (*gsi);
2285 tree lhs = gimple_call_lhs (stmt);
2286 tree arg1 = gimple_call_arg (stmt, 0);
2287 tree arg2 = gimple_call_arg (stmt, 1);
2288 tree len = gimple_call_arg (stmt, 2);
2290 /* If the LEN parameter is zero, return zero. */
2291 if (integer_zerop (len))
2293 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2294 return true;
2297 char c;
2298 if (TREE_CODE (arg2) != INTEGER_CST
2299 || !tree_fits_uhwi_p (len)
2300 || !target_char_cst_p (arg2, &c))
2301 return false;
2303 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2304 unsigned HOST_WIDE_INT string_length;
2305 const char *p1 = c_getstr (arg1, &string_length);
2307 if (p1)
2309 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2310 if (r == NULL)
2312 if (length <= string_length)
2314 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2315 return true;
2318 else
2320 unsigned HOST_WIDE_INT offset = r - p1;
2321 gimple_seq stmts = NULL;
2322 if (lhs != NULL_TREE)
2324 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2325 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2326 arg1, offset_cst);
2327 gimple_seq_add_stmt_without_update (&stmts, stmt);
2329 else
2330 gimple_seq_add_stmt_without_update (&stmts,
2331 gimple_build_nop ());
2333 gsi_replace_with_seq_vops (gsi, stmts);
2334 return true;
2338 return false;
2341 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2342 to the call. IGNORE is true if the value returned
2343 by the builtin will be ignored. UNLOCKED is true is true if this
2344 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2345 the known length of the string. Return NULL_TREE if no simplification
2346 was possible. */
2348 static bool
2349 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2350 tree arg0, tree arg1,
2351 bool unlocked)
2353 gimple *stmt = gsi_stmt (*gsi);
2355 /* If we're using an unlocked function, assume the other unlocked
2356 functions exist explicitly. */
2357 tree const fn_fputc = (unlocked
2358 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2359 : builtin_decl_implicit (BUILT_IN_FPUTC));
2360 tree const fn_fwrite = (unlocked
2361 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2362 : builtin_decl_implicit (BUILT_IN_FWRITE));
2364 /* If the return value is used, don't do the transformation. */
2365 if (gimple_call_lhs (stmt))
2366 return false;
2368 /* Get the length of the string passed to fputs. If the length
2369 can't be determined, punt. */
2370 tree len = get_maxval_strlen (arg0, 0);
2371 if (!len
2372 || TREE_CODE (len) != INTEGER_CST)
2373 return false;
2375 switch (compare_tree_int (len, 1))
2377 case -1: /* length is 0, delete the call entirely . */
2378 replace_call_with_value (gsi, integer_zero_node);
2379 return true;
2381 case 0: /* length is 1, call fputc. */
2383 const char *p = c_getstr (arg0);
2384 if (p != NULL)
2386 if (!fn_fputc)
2387 return false;
2389 gimple *repl = gimple_build_call (fn_fputc, 2,
2390 build_int_cst
2391 (integer_type_node, p[0]), arg1);
2392 replace_call_with_call_and_fold (gsi, repl);
2393 return true;
2396 /* FALLTHROUGH */
2397 case 1: /* length is greater than 1, call fwrite. */
2399 /* If optimizing for size keep fputs. */
2400 if (optimize_function_for_size_p (cfun))
2401 return false;
2402 /* New argument list transforming fputs(string, stream) to
2403 fwrite(string, 1, len, stream). */
2404 if (!fn_fwrite)
2405 return false;
2407 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2408 size_one_node, len, arg1);
2409 replace_call_with_call_and_fold (gsi, repl);
2410 return true;
2412 default:
2413 gcc_unreachable ();
2415 return false;
2418 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2419 DEST, SRC, LEN, and SIZE are the arguments to the call.
2420 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2421 code of the builtin. If MAXLEN is not NULL, it is maximum length
2422 passed as third argument. */
2424 static bool
2425 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2426 tree dest, tree src, tree len, tree size,
2427 enum built_in_function fcode)
2429 gimple *stmt = gsi_stmt (*gsi);
2430 location_t loc = gimple_location (stmt);
2431 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2432 tree fn;
2434 /* If SRC and DEST are the same (and not volatile), return DEST
2435 (resp. DEST+LEN for __mempcpy_chk). */
2436 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2438 if (fcode != BUILT_IN_MEMPCPY_CHK)
2440 replace_call_with_value (gsi, dest);
2441 return true;
2443 else
2445 gimple_seq stmts = NULL;
2446 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2447 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2448 TREE_TYPE (dest), dest, len);
2449 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2450 replace_call_with_value (gsi, temp);
2451 return true;
2455 if (! tree_fits_uhwi_p (size))
2456 return false;
2458 tree maxlen = get_maxval_strlen (len, 2);
2459 if (! integer_all_onesp (size))
2461 if (! tree_fits_uhwi_p (len))
2463 /* If LEN is not constant, try MAXLEN too.
2464 For MAXLEN only allow optimizing into non-_ocs function
2465 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2466 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2468 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2470 /* (void) __mempcpy_chk () can be optimized into
2471 (void) __memcpy_chk (). */
2472 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2473 if (!fn)
2474 return false;
2476 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2477 replace_call_with_call_and_fold (gsi, repl);
2478 return true;
2480 return false;
2483 else
2484 maxlen = len;
2486 if (tree_int_cst_lt (size, maxlen))
2487 return false;
2490 fn = NULL_TREE;
2491 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2492 mem{cpy,pcpy,move,set} is available. */
2493 switch (fcode)
2495 case BUILT_IN_MEMCPY_CHK:
2496 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2497 break;
2498 case BUILT_IN_MEMPCPY_CHK:
2499 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2500 break;
2501 case BUILT_IN_MEMMOVE_CHK:
2502 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2503 break;
2504 case BUILT_IN_MEMSET_CHK:
2505 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2506 break;
2507 default:
2508 break;
2511 if (!fn)
2512 return false;
2514 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2515 replace_call_with_call_and_fold (gsi, repl);
2516 return true;
2519 /* Fold a call to the __st[rp]cpy_chk builtin.
2520 DEST, SRC, and SIZE are the arguments to the call.
2521 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2522 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2523 strings passed as second argument. */
2525 static bool
2526 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2527 tree dest,
2528 tree src, tree size,
2529 enum built_in_function fcode)
2531 gimple *stmt = gsi_stmt (*gsi);
2532 location_t loc = gimple_location (stmt);
2533 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2534 tree len, fn;
2536 /* If SRC and DEST are the same (and not volatile), return DEST. */
2537 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2539 replace_call_with_value (gsi, dest);
2540 return true;
2543 if (! tree_fits_uhwi_p (size))
2544 return false;
2546 tree maxlen = get_maxval_strlen (src, 1);
2547 if (! integer_all_onesp (size))
2549 len = c_strlen (src, 1);
2550 if (! len || ! tree_fits_uhwi_p (len))
2552 /* If LEN is not constant, try MAXLEN too.
2553 For MAXLEN only allow optimizing into non-_ocs function
2554 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2555 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2557 if (fcode == BUILT_IN_STPCPY_CHK)
2559 if (! ignore)
2560 return false;
2562 /* If return value of __stpcpy_chk is ignored,
2563 optimize into __strcpy_chk. */
2564 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2565 if (!fn)
2566 return false;
2568 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2569 replace_call_with_call_and_fold (gsi, repl);
2570 return true;
2573 if (! len || TREE_SIDE_EFFECTS (len))
2574 return false;
2576 /* If c_strlen returned something, but not a constant,
2577 transform __strcpy_chk into __memcpy_chk. */
2578 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2579 if (!fn)
2580 return false;
2582 gimple_seq stmts = NULL;
2583 len = gimple_convert (&stmts, loc, size_type_node, len);
2584 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2585 build_int_cst (size_type_node, 1));
2586 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2587 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2588 replace_call_with_call_and_fold (gsi, repl);
2589 return true;
2592 else
2593 maxlen = len;
2595 if (! tree_int_cst_lt (maxlen, size))
2596 return false;
2599 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2600 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2601 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2602 if (!fn)
2603 return false;
2605 gimple *repl = gimple_build_call (fn, 2, dest, src);
2606 replace_call_with_call_and_fold (gsi, repl);
2607 return true;
2610 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2611 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2612 length passed as third argument. IGNORE is true if return value can be
2613 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2615 static bool
2616 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2617 tree dest, tree src,
2618 tree len, tree size,
2619 enum built_in_function fcode)
2621 gimple *stmt = gsi_stmt (*gsi);
2622 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2623 tree fn;
2625 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2627 /* If return value of __stpncpy_chk is ignored,
2628 optimize into __strncpy_chk. */
2629 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2630 if (fn)
2632 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2633 replace_call_with_call_and_fold (gsi, repl);
2634 return true;
2638 if (! tree_fits_uhwi_p (size))
2639 return false;
2641 tree maxlen = get_maxval_strlen (len, 2);
2642 if (! integer_all_onesp (size))
2644 if (! tree_fits_uhwi_p (len))
2646 /* If LEN is not constant, try MAXLEN too.
2647 For MAXLEN only allow optimizing into non-_ocs function
2648 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2649 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2650 return false;
2652 else
2653 maxlen = len;
2655 if (tree_int_cst_lt (size, maxlen))
2656 return false;
2659 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2660 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2661 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2662 if (!fn)
2663 return false;
2665 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2666 replace_call_with_call_and_fold (gsi, repl);
2667 return true;
2670 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2671 Return NULL_TREE if no simplification can be made. */
2673 static bool
2674 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2676 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2677 location_t loc = gimple_location (stmt);
2678 tree dest = gimple_call_arg (stmt, 0);
2679 tree src = gimple_call_arg (stmt, 1);
2680 tree fn, len, lenp1;
2682 /* If the result is unused, replace stpcpy with strcpy. */
2683 if (gimple_call_lhs (stmt) == NULL_TREE)
2685 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2686 if (!fn)
2687 return false;
2688 gimple_call_set_fndecl (stmt, fn);
2689 fold_stmt (gsi);
2690 return true;
2693 len = c_strlen (src, 1);
2694 if (!len
2695 || TREE_CODE (len) != INTEGER_CST)
2696 return false;
2698 if (optimize_function_for_size_p (cfun)
2699 /* If length is zero it's small enough. */
2700 && !integer_zerop (len))
2701 return false;
2703 /* If the source has a known length replace stpcpy with memcpy. */
2704 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2705 if (!fn)
2706 return false;
2708 gimple_seq stmts = NULL;
2709 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2710 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2711 tem, build_int_cst (size_type_node, 1));
2712 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2713 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2714 gimple_set_vuse (repl, gimple_vuse (stmt));
2715 gimple_set_vdef (repl, gimple_vdef (stmt));
2716 if (gimple_vdef (repl)
2717 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2718 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2719 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2720 /* Replace the result with dest + len. */
2721 stmts = NULL;
2722 tem = gimple_convert (&stmts, loc, sizetype, len);
2723 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2724 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2725 POINTER_PLUS_EXPR, dest, tem);
2726 gsi_replace (gsi, ret, false);
2727 /* Finally fold the memcpy call. */
2728 gimple_stmt_iterator gsi2 = *gsi;
2729 gsi_prev (&gsi2);
2730 fold_stmt (&gsi2);
2731 return true;
2734 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2735 NULL_TREE if a normal call should be emitted rather than expanding
2736 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2737 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2738 passed as second argument. */
2740 static bool
2741 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2742 enum built_in_function fcode)
2744 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2745 tree dest, size, len, fn, fmt, flag;
2746 const char *fmt_str;
2748 /* Verify the required arguments in the original call. */
2749 if (gimple_call_num_args (stmt) < 5)
2750 return false;
2752 dest = gimple_call_arg (stmt, 0);
2753 len = gimple_call_arg (stmt, 1);
2754 flag = gimple_call_arg (stmt, 2);
2755 size = gimple_call_arg (stmt, 3);
2756 fmt = gimple_call_arg (stmt, 4);
2758 if (! tree_fits_uhwi_p (size))
2759 return false;
2761 if (! integer_all_onesp (size))
2763 tree maxlen = get_maxval_strlen (len, 2);
2764 if (! tree_fits_uhwi_p (len))
2766 /* If LEN is not constant, try MAXLEN too.
2767 For MAXLEN only allow optimizing into non-_ocs function
2768 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2769 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2770 return false;
2772 else
2773 maxlen = len;
2775 if (tree_int_cst_lt (size, maxlen))
2776 return false;
2779 if (!init_target_chars ())
2780 return false;
2782 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2783 or if format doesn't contain % chars or is "%s". */
2784 if (! integer_zerop (flag))
2786 fmt_str = c_getstr (fmt);
2787 if (fmt_str == NULL)
2788 return false;
2789 if (strchr (fmt_str, target_percent) != NULL
2790 && strcmp (fmt_str, target_percent_s))
2791 return false;
2794 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2795 available. */
2796 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2797 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2798 if (!fn)
2799 return false;
2801 /* Replace the called function and the first 5 argument by 3 retaining
2802 trailing varargs. */
2803 gimple_call_set_fndecl (stmt, fn);
2804 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2805 gimple_call_set_arg (stmt, 0, dest);
2806 gimple_call_set_arg (stmt, 1, len);
2807 gimple_call_set_arg (stmt, 2, fmt);
2808 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2809 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2810 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2811 fold_stmt (gsi);
2812 return true;
2815 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2816 Return NULL_TREE if a normal call should be emitted rather than
2817 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2818 or BUILT_IN_VSPRINTF_CHK. */
2820 static bool
2821 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2822 enum built_in_function fcode)
2824 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2825 tree dest, size, len, fn, fmt, flag;
2826 const char *fmt_str;
2827 unsigned nargs = gimple_call_num_args (stmt);
2829 /* Verify the required arguments in the original call. */
2830 if (nargs < 4)
2831 return false;
2832 dest = gimple_call_arg (stmt, 0);
2833 flag = gimple_call_arg (stmt, 1);
2834 size = gimple_call_arg (stmt, 2);
2835 fmt = gimple_call_arg (stmt, 3);
2837 if (! tree_fits_uhwi_p (size))
2838 return false;
2840 len = NULL_TREE;
2842 if (!init_target_chars ())
2843 return false;
2845 /* Check whether the format is a literal string constant. */
2846 fmt_str = c_getstr (fmt);
2847 if (fmt_str != NULL)
2849 /* If the format doesn't contain % args or %%, we know the size. */
2850 if (strchr (fmt_str, target_percent) == 0)
2852 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2853 len = build_int_cstu (size_type_node, strlen (fmt_str));
2855 /* If the format is "%s" and first ... argument is a string literal,
2856 we know the size too. */
2857 else if (fcode == BUILT_IN_SPRINTF_CHK
2858 && strcmp (fmt_str, target_percent_s) == 0)
2860 tree arg;
2862 if (nargs == 5)
2864 arg = gimple_call_arg (stmt, 4);
2865 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2867 len = c_strlen (arg, 1);
2868 if (! len || ! tree_fits_uhwi_p (len))
2869 len = NULL_TREE;
2875 if (! integer_all_onesp (size))
2877 if (! len || ! tree_int_cst_lt (len, size))
2878 return false;
2881 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2882 or if format doesn't contain % chars or is "%s". */
2883 if (! integer_zerop (flag))
2885 if (fmt_str == NULL)
2886 return false;
2887 if (strchr (fmt_str, target_percent) != NULL
2888 && strcmp (fmt_str, target_percent_s))
2889 return false;
2892 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2893 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2894 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2895 if (!fn)
2896 return false;
2898 /* Replace the called function and the first 4 argument by 2 retaining
2899 trailing varargs. */
2900 gimple_call_set_fndecl (stmt, fn);
2901 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2902 gimple_call_set_arg (stmt, 0, dest);
2903 gimple_call_set_arg (stmt, 1, fmt);
2904 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2905 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2906 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2907 fold_stmt (gsi);
2908 return true;
2911 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2912 ORIG may be null if this is a 2-argument call. We don't attempt to
2913 simplify calls with more than 3 arguments.
2915 Return true if simplification was possible, otherwise false. */
2917 bool
2918 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2920 gimple *stmt = gsi_stmt (*gsi);
2921 tree dest = gimple_call_arg (stmt, 0);
2922 tree fmt = gimple_call_arg (stmt, 1);
2923 tree orig = NULL_TREE;
2924 const char *fmt_str = NULL;
2926 /* Verify the required arguments in the original call. We deal with two
2927 types of sprintf() calls: 'sprintf (str, fmt)' and
2928 'sprintf (dest, "%s", orig)'. */
2929 if (gimple_call_num_args (stmt) > 3)
2930 return false;
2932 if (gimple_call_num_args (stmt) == 3)
2933 orig = gimple_call_arg (stmt, 2);
2935 /* Check whether the format is a literal string constant. */
2936 fmt_str = c_getstr (fmt);
2937 if (fmt_str == NULL)
2938 return false;
2940 if (!init_target_chars ())
2941 return false;
2943 /* If the format doesn't contain % args or %%, use strcpy. */
2944 if (strchr (fmt_str, target_percent) == NULL)
2946 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2948 if (!fn)
2949 return false;
2951 /* Don't optimize sprintf (buf, "abc", ptr++). */
2952 if (orig)
2953 return false;
2955 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2956 'format' is known to contain no % formats. */
2957 gimple_seq stmts = NULL;
2958 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2959 gimple_seq_add_stmt_without_update (&stmts, repl);
2960 if (gimple_call_lhs (stmt))
2962 repl = gimple_build_assign (gimple_call_lhs (stmt),
2963 build_int_cst (integer_type_node,
2964 strlen (fmt_str)));
2965 gimple_seq_add_stmt_without_update (&stmts, repl);
2966 gsi_replace_with_seq_vops (gsi, stmts);
2967 /* gsi now points at the assignment to the lhs, get a
2968 stmt iterator to the memcpy call.
2969 ??? We can't use gsi_for_stmt as that doesn't work when the
2970 CFG isn't built yet. */
2971 gimple_stmt_iterator gsi2 = *gsi;
2972 gsi_prev (&gsi2);
2973 fold_stmt (&gsi2);
2975 else
2977 gsi_replace_with_seq_vops (gsi, stmts);
2978 fold_stmt (gsi);
2980 return true;
2983 /* If the format is "%s", use strcpy if the result isn't used. */
2984 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2986 tree fn;
2987 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2989 if (!fn)
2990 return false;
2992 /* Don't crash on sprintf (str1, "%s"). */
2993 if (!orig)
2994 return false;
2996 tree orig_len = NULL_TREE;
2997 if (gimple_call_lhs (stmt))
2999 orig_len = get_maxval_strlen (orig, 0);
3000 if (!orig_len)
3001 return false;
3004 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3005 gimple_seq stmts = NULL;
3006 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3007 gimple_seq_add_stmt_without_update (&stmts, repl);
3008 if (gimple_call_lhs (stmt))
3010 if (!useless_type_conversion_p (integer_type_node,
3011 TREE_TYPE (orig_len)))
3012 orig_len = fold_convert (integer_type_node, orig_len);
3013 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3014 gimple_seq_add_stmt_without_update (&stmts, repl);
3015 gsi_replace_with_seq_vops (gsi, stmts);
3016 /* gsi now points at the assignment to the lhs, get a
3017 stmt iterator to the memcpy call.
3018 ??? We can't use gsi_for_stmt as that doesn't work when the
3019 CFG isn't built yet. */
3020 gimple_stmt_iterator gsi2 = *gsi;
3021 gsi_prev (&gsi2);
3022 fold_stmt (&gsi2);
3024 else
3026 gsi_replace_with_seq_vops (gsi, stmts);
3027 fold_stmt (gsi);
3029 return true;
3031 return false;
3034 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3035 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3036 attempt to simplify calls with more than 4 arguments.
3038 Return true if simplification was possible, otherwise false. */
3040 bool
3041 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3043 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3044 tree dest = gimple_call_arg (stmt, 0);
3045 tree destsize = gimple_call_arg (stmt, 1);
3046 tree fmt = gimple_call_arg (stmt, 2);
3047 tree orig = NULL_TREE;
3048 const char *fmt_str = NULL;
3050 if (gimple_call_num_args (stmt) > 4)
3051 return false;
3053 if (gimple_call_num_args (stmt) == 4)
3054 orig = gimple_call_arg (stmt, 3);
3056 if (!tree_fits_uhwi_p (destsize))
3057 return false;
3058 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3060 /* Check whether the format is a literal string constant. */
3061 fmt_str = c_getstr (fmt);
3062 if (fmt_str == NULL)
3063 return false;
3065 if (!init_target_chars ())
3066 return false;
3068 /* If the format doesn't contain % args or %%, use strcpy. */
3069 if (strchr (fmt_str, target_percent) == NULL)
3071 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3072 if (!fn)
3073 return false;
3075 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3076 if (orig)
3077 return false;
3079 /* We could expand this as
3080 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3081 or to
3082 memcpy (str, fmt_with_nul_at_cstm1, cst);
3083 but in the former case that might increase code size
3084 and in the latter case grow .rodata section too much.
3085 So punt for now. */
3086 size_t len = strlen (fmt_str);
3087 if (len >= destlen)
3088 return false;
3090 gimple_seq stmts = NULL;
3091 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3092 gimple_seq_add_stmt_without_update (&stmts, repl);
3093 if (gimple_call_lhs (stmt))
3095 repl = gimple_build_assign (gimple_call_lhs (stmt),
3096 build_int_cst (integer_type_node, len));
3097 gimple_seq_add_stmt_without_update (&stmts, repl);
3098 gsi_replace_with_seq_vops (gsi, stmts);
3099 /* gsi now points at the assignment to the lhs, get a
3100 stmt iterator to the memcpy call.
3101 ??? We can't use gsi_for_stmt as that doesn't work when the
3102 CFG isn't built yet. */
3103 gimple_stmt_iterator gsi2 = *gsi;
3104 gsi_prev (&gsi2);
3105 fold_stmt (&gsi2);
3107 else
3109 gsi_replace_with_seq_vops (gsi, stmts);
3110 fold_stmt (gsi);
3112 return true;
3115 /* If the format is "%s", use strcpy if the result isn't used. */
3116 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3118 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3119 if (!fn)
3120 return false;
3122 /* Don't crash on snprintf (str1, cst, "%s"). */
3123 if (!orig)
3124 return false;
3126 tree orig_len = get_maxval_strlen (orig, 0);
3127 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3128 return false;
3130 /* We could expand this as
3131 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3132 or to
3133 memcpy (str1, str2_with_nul_at_cstm1, cst);
3134 but in the former case that might increase code size
3135 and in the latter case grow .rodata section too much.
3136 So punt for now. */
3137 if (compare_tree_int (orig_len, destlen) >= 0)
3138 return false;
3140 /* Convert snprintf (str1, cst, "%s", str2) into
3141 strcpy (str1, str2) if strlen (str2) < cst. */
3142 gimple_seq stmts = NULL;
3143 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3144 gimple_seq_add_stmt_without_update (&stmts, repl);
3145 if (gimple_call_lhs (stmt))
3147 if (!useless_type_conversion_p (integer_type_node,
3148 TREE_TYPE (orig_len)))
3149 orig_len = fold_convert (integer_type_node, orig_len);
3150 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3151 gimple_seq_add_stmt_without_update (&stmts, repl);
3152 gsi_replace_with_seq_vops (gsi, stmts);
3153 /* gsi now points at the assignment to the lhs, get a
3154 stmt iterator to the memcpy call.
3155 ??? We can't use gsi_for_stmt as that doesn't work when the
3156 CFG isn't built yet. */
3157 gimple_stmt_iterator gsi2 = *gsi;
3158 gsi_prev (&gsi2);
3159 fold_stmt (&gsi2);
3161 else
3163 gsi_replace_with_seq_vops (gsi, stmts);
3164 fold_stmt (gsi);
3166 return true;
3168 return false;
3171 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3172 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3173 more than 3 arguments, and ARG may be null in the 2-argument case.
3175 Return NULL_TREE if no simplification was possible, otherwise return the
3176 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3177 code of the function to be simplified. */
3179 static bool
3180 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3181 tree fp, tree fmt, tree arg,
3182 enum built_in_function fcode)
3184 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3185 tree fn_fputc, fn_fputs;
3186 const char *fmt_str = NULL;
3188 /* If the return value is used, don't do the transformation. */
3189 if (gimple_call_lhs (stmt) != NULL_TREE)
3190 return false;
3192 /* Check whether the format is a literal string constant. */
3193 fmt_str = c_getstr (fmt);
3194 if (fmt_str == NULL)
3195 return false;
3197 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3199 /* If we're using an unlocked function, assume the other
3200 unlocked functions exist explicitly. */
3201 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3202 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3204 else
3206 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3207 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3210 if (!init_target_chars ())
3211 return false;
3213 /* If the format doesn't contain % args or %%, use strcpy. */
3214 if (strchr (fmt_str, target_percent) == NULL)
3216 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3217 && arg)
3218 return false;
3220 /* If the format specifier was "", fprintf does nothing. */
3221 if (fmt_str[0] == '\0')
3223 replace_call_with_value (gsi, NULL_TREE);
3224 return true;
3227 /* When "string" doesn't contain %, replace all cases of
3228 fprintf (fp, string) with fputs (string, fp). The fputs
3229 builtin will take care of special cases like length == 1. */
3230 if (fn_fputs)
3232 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3233 replace_call_with_call_and_fold (gsi, repl);
3234 return true;
3238 /* The other optimizations can be done only on the non-va_list variants. */
3239 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3240 return false;
3242 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3243 else if (strcmp (fmt_str, target_percent_s) == 0)
3245 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3246 return false;
3247 if (fn_fputs)
3249 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3250 replace_call_with_call_and_fold (gsi, repl);
3251 return true;
3255 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3256 else if (strcmp (fmt_str, target_percent_c) == 0)
3258 if (!arg
3259 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3260 return false;
3261 if (fn_fputc)
3263 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3264 replace_call_with_call_and_fold (gsi, repl);
3265 return true;
3269 return false;
3272 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3273 FMT and ARG are the arguments to the call; we don't fold cases with
3274 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3276 Return NULL_TREE if no simplification was possible, otherwise return the
3277 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3278 code of the function to be simplified. */
3280 static bool
3281 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3282 tree arg, enum built_in_function fcode)
3284 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3285 tree fn_putchar, fn_puts, newarg;
3286 const char *fmt_str = NULL;
3288 /* If the return value is used, don't do the transformation. */
3289 if (gimple_call_lhs (stmt) != NULL_TREE)
3290 return false;
3292 /* Check whether the format is a literal string constant. */
3293 fmt_str = c_getstr (fmt);
3294 if (fmt_str == NULL)
3295 return false;
3297 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3299 /* If we're using an unlocked function, assume the other
3300 unlocked functions exist explicitly. */
3301 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3302 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3304 else
3306 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3307 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3310 if (!init_target_chars ())
3311 return false;
3313 if (strcmp (fmt_str, target_percent_s) == 0
3314 || strchr (fmt_str, target_percent) == NULL)
3316 const char *str;
3318 if (strcmp (fmt_str, target_percent_s) == 0)
3320 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3321 return false;
3323 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3324 return false;
3326 str = c_getstr (arg);
3327 if (str == NULL)
3328 return false;
3330 else
3332 /* The format specifier doesn't contain any '%' characters. */
3333 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3334 && arg)
3335 return false;
3336 str = fmt_str;
3339 /* If the string was "", printf does nothing. */
3340 if (str[0] == '\0')
3342 replace_call_with_value (gsi, NULL_TREE);
3343 return true;
3346 /* If the string has length of 1, call putchar. */
3347 if (str[1] == '\0')
3349 /* Given printf("c"), (where c is any one character,)
3350 convert "c"[0] to an int and pass that to the replacement
3351 function. */
3352 newarg = build_int_cst (integer_type_node, str[0]);
3353 if (fn_putchar)
3355 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3356 replace_call_with_call_and_fold (gsi, repl);
3357 return true;
3360 else
3362 /* If the string was "string\n", call puts("string"). */
3363 size_t len = strlen (str);
3364 if ((unsigned char)str[len - 1] == target_newline
3365 && (size_t) (int) len == len
3366 && (int) len > 0)
3368 char *newstr;
3369 tree offset_node, string_cst;
3371 /* Create a NUL-terminated string that's one char shorter
3372 than the original, stripping off the trailing '\n'. */
3373 newarg = build_string_literal (len, str);
3374 string_cst = string_constant (newarg, &offset_node);
3375 gcc_checking_assert (string_cst
3376 && (TREE_STRING_LENGTH (string_cst)
3377 == (int) len)
3378 && integer_zerop (offset_node)
3379 && (unsigned char)
3380 TREE_STRING_POINTER (string_cst)[len - 1]
3381 == target_newline);
3382 /* build_string_literal creates a new STRING_CST,
3383 modify it in place to avoid double copying. */
3384 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3385 newstr[len - 1] = '\0';
3386 if (fn_puts)
3388 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3389 replace_call_with_call_and_fold (gsi, repl);
3390 return true;
3393 else
3394 /* We'd like to arrange to call fputs(string,stdout) here,
3395 but we need stdout and don't have a way to get it yet. */
3396 return false;
3400 /* The other optimizations can be done only on the non-va_list variants. */
3401 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3402 return false;
3404 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3405 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3407 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3408 return false;
3409 if (fn_puts)
3411 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3412 replace_call_with_call_and_fold (gsi, repl);
3413 return true;
3417 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3418 else if (strcmp (fmt_str, target_percent_c) == 0)
3420 if (!arg || ! useless_type_conversion_p (integer_type_node,
3421 TREE_TYPE (arg)))
3422 return false;
3423 if (fn_putchar)
3425 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3426 replace_call_with_call_and_fold (gsi, repl);
3427 return true;
3431 return false;
3436 /* Fold a call to __builtin_strlen with known length LEN. */
3438 static bool
3439 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3441 gimple *stmt = gsi_stmt (*gsi);
3442 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
3443 if (!len)
3444 return false;
3445 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
3446 replace_call_with_value (gsi, len);
3447 return true;
3450 /* Fold a call to __builtin_acc_on_device. */
3452 static bool
3453 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3455 /* Defer folding until we know which compiler we're in. */
3456 if (symtab->state != EXPANSION)
3457 return false;
3459 unsigned val_host = GOMP_DEVICE_HOST;
3460 unsigned val_dev = GOMP_DEVICE_NONE;
3462 #ifdef ACCEL_COMPILER
3463 val_host = GOMP_DEVICE_NOT_HOST;
3464 val_dev = ACCEL_COMPILER_acc_device;
3465 #endif
3467 location_t loc = gimple_location (gsi_stmt (*gsi));
3469 tree host_eq = make_ssa_name (boolean_type_node);
3470 gimple *host_ass = gimple_build_assign
3471 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3472 gimple_set_location (host_ass, loc);
3473 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3475 tree dev_eq = make_ssa_name (boolean_type_node);
3476 gimple *dev_ass = gimple_build_assign
3477 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3478 gimple_set_location (dev_ass, loc);
3479 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3481 tree result = make_ssa_name (boolean_type_node);
3482 gimple *result_ass = gimple_build_assign
3483 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3484 gimple_set_location (result_ass, loc);
3485 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3487 replace_call_with_value (gsi, result);
3489 return true;
3492 /* Fold realloc (0, n) -> malloc (n). */
3494 static bool
3495 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3497 gimple *stmt = gsi_stmt (*gsi);
3498 tree arg = gimple_call_arg (stmt, 0);
3499 tree size = gimple_call_arg (stmt, 1);
3501 if (operand_equal_p (arg, null_pointer_node, 0))
3503 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3504 if (fn_malloc)
3506 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3507 replace_call_with_call_and_fold (gsi, repl);
3508 return true;
3511 return false;
3514 /* Fold the non-target builtin at *GSI and return whether any simplification
3515 was made. */
3517 static bool
3518 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3520 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3521 tree callee = gimple_call_fndecl (stmt);
3523 /* Give up for always_inline inline builtins until they are
3524 inlined. */
3525 if (avoid_folding_inline_builtin (callee))
3526 return false;
3528 unsigned n = gimple_call_num_args (stmt);
3529 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3530 switch (fcode)
3532 case BUILT_IN_BCMP:
3533 return gimple_fold_builtin_bcmp (gsi);
3534 case BUILT_IN_BCOPY:
3535 return gimple_fold_builtin_bcopy (gsi);
3536 case BUILT_IN_BZERO:
3537 return gimple_fold_builtin_bzero (gsi);
3539 case BUILT_IN_MEMSET:
3540 return gimple_fold_builtin_memset (gsi,
3541 gimple_call_arg (stmt, 1),
3542 gimple_call_arg (stmt, 2));
3543 case BUILT_IN_MEMCPY:
3544 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3545 gimple_call_arg (stmt, 1), 0);
3546 case BUILT_IN_MEMPCPY:
3547 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3548 gimple_call_arg (stmt, 1), 1);
3549 case BUILT_IN_MEMMOVE:
3550 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3551 gimple_call_arg (stmt, 1), 3);
3552 case BUILT_IN_SPRINTF_CHK:
3553 case BUILT_IN_VSPRINTF_CHK:
3554 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3555 case BUILT_IN_STRCAT_CHK:
3556 return gimple_fold_builtin_strcat_chk (gsi);
3557 case BUILT_IN_STRNCAT_CHK:
3558 return gimple_fold_builtin_strncat_chk (gsi);
3559 case BUILT_IN_STRLEN:
3560 return gimple_fold_builtin_strlen (gsi);
3561 case BUILT_IN_STRCPY:
3562 return gimple_fold_builtin_strcpy (gsi,
3563 gimple_call_arg (stmt, 0),
3564 gimple_call_arg (stmt, 1));
3565 case BUILT_IN_STRNCPY:
3566 return gimple_fold_builtin_strncpy (gsi,
3567 gimple_call_arg (stmt, 0),
3568 gimple_call_arg (stmt, 1),
3569 gimple_call_arg (stmt, 2));
3570 case BUILT_IN_STRCAT:
3571 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3572 gimple_call_arg (stmt, 1));
3573 case BUILT_IN_STRNCAT:
3574 return gimple_fold_builtin_strncat (gsi);
3575 case BUILT_IN_INDEX:
3576 case BUILT_IN_STRCHR:
3577 return gimple_fold_builtin_strchr (gsi, false);
3578 case BUILT_IN_RINDEX:
3579 case BUILT_IN_STRRCHR:
3580 return gimple_fold_builtin_strchr (gsi, true);
3581 case BUILT_IN_STRSTR:
3582 return gimple_fold_builtin_strstr (gsi);
3583 case BUILT_IN_STRCMP:
3584 case BUILT_IN_STRCASECMP:
3585 case BUILT_IN_STRNCMP:
3586 case BUILT_IN_STRNCASECMP:
3587 return gimple_fold_builtin_string_compare (gsi);
3588 case BUILT_IN_MEMCHR:
3589 return gimple_fold_builtin_memchr (gsi);
3590 case BUILT_IN_FPUTS:
3591 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3592 gimple_call_arg (stmt, 1), false);
3593 case BUILT_IN_FPUTS_UNLOCKED:
3594 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3595 gimple_call_arg (stmt, 1), true);
3596 case BUILT_IN_MEMCPY_CHK:
3597 case BUILT_IN_MEMPCPY_CHK:
3598 case BUILT_IN_MEMMOVE_CHK:
3599 case BUILT_IN_MEMSET_CHK:
3600 return gimple_fold_builtin_memory_chk (gsi,
3601 gimple_call_arg (stmt, 0),
3602 gimple_call_arg (stmt, 1),
3603 gimple_call_arg (stmt, 2),
3604 gimple_call_arg (stmt, 3),
3605 fcode);
3606 case BUILT_IN_STPCPY:
3607 return gimple_fold_builtin_stpcpy (gsi);
3608 case BUILT_IN_STRCPY_CHK:
3609 case BUILT_IN_STPCPY_CHK:
3610 return gimple_fold_builtin_stxcpy_chk (gsi,
3611 gimple_call_arg (stmt, 0),
3612 gimple_call_arg (stmt, 1),
3613 gimple_call_arg (stmt, 2),
3614 fcode);
3615 case BUILT_IN_STRNCPY_CHK:
3616 case BUILT_IN_STPNCPY_CHK:
3617 return gimple_fold_builtin_stxncpy_chk (gsi,
3618 gimple_call_arg (stmt, 0),
3619 gimple_call_arg (stmt, 1),
3620 gimple_call_arg (stmt, 2),
3621 gimple_call_arg (stmt, 3),
3622 fcode);
3623 case BUILT_IN_SNPRINTF_CHK:
3624 case BUILT_IN_VSNPRINTF_CHK:
3625 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3627 case BUILT_IN_FPRINTF:
3628 case BUILT_IN_FPRINTF_UNLOCKED:
3629 case BUILT_IN_VFPRINTF:
3630 if (n == 2 || n == 3)
3631 return gimple_fold_builtin_fprintf (gsi,
3632 gimple_call_arg (stmt, 0),
3633 gimple_call_arg (stmt, 1),
3634 n == 3
3635 ? gimple_call_arg (stmt, 2)
3636 : NULL_TREE,
3637 fcode);
3638 break;
3639 case BUILT_IN_FPRINTF_CHK:
3640 case BUILT_IN_VFPRINTF_CHK:
3641 if (n == 3 || n == 4)
3642 return gimple_fold_builtin_fprintf (gsi,
3643 gimple_call_arg (stmt, 0),
3644 gimple_call_arg (stmt, 2),
3645 n == 4
3646 ? gimple_call_arg (stmt, 3)
3647 : NULL_TREE,
3648 fcode);
3649 break;
3650 case BUILT_IN_PRINTF:
3651 case BUILT_IN_PRINTF_UNLOCKED:
3652 case BUILT_IN_VPRINTF:
3653 if (n == 1 || n == 2)
3654 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3655 n == 2
3656 ? gimple_call_arg (stmt, 1)
3657 : NULL_TREE, fcode);
3658 break;
3659 case BUILT_IN_PRINTF_CHK:
3660 case BUILT_IN_VPRINTF_CHK:
3661 if (n == 2 || n == 3)
3662 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3663 n == 3
3664 ? gimple_call_arg (stmt, 2)
3665 : NULL_TREE, fcode);
3666 break;
3667 case BUILT_IN_ACC_ON_DEVICE:
3668 return gimple_fold_builtin_acc_on_device (gsi,
3669 gimple_call_arg (stmt, 0));
3670 case BUILT_IN_REALLOC:
3671 return gimple_fold_builtin_realloc (gsi);
3673 default:;
3676 /* Try the generic builtin folder. */
3677 bool ignore = (gimple_call_lhs (stmt) == NULL);
3678 tree result = fold_call_stmt (stmt, ignore);
3679 if (result)
3681 if (ignore)
3682 STRIP_NOPS (result);
3683 else
3684 result = fold_convert (gimple_call_return_type (stmt), result);
3685 if (!update_call_from_tree (gsi, result))
3686 gimplify_and_update_call_from_tree (gsi, result);
3687 return true;
3690 return false;
3693 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3694 function calls to constants, where possible. */
3696 static tree
3697 fold_internal_goacc_dim (const gimple *call)
3699 int axis = oacc_get_ifn_dim_arg (call);
3700 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3701 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
3702 tree result = NULL_TREE;
3704 /* If the size is 1, or we only want the size and it is not dynamic,
3705 we know the answer. */
3706 if (size == 1 || (!is_pos && size))
3708 tree type = TREE_TYPE (gimple_call_lhs (call));
3709 result = build_int_cst (type, size - is_pos);
3712 return result;
3715 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3716 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3717 &var where var is only addressable because of such calls. */
3719 bool
3720 optimize_atomic_compare_exchange_p (gimple *stmt)
3722 if (gimple_call_num_args (stmt) != 6
3723 || !flag_inline_atomics
3724 || !optimize
3725 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3726 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3727 || !gimple_vdef (stmt)
3728 || !gimple_vuse (stmt))
3729 return false;
3731 tree fndecl = gimple_call_fndecl (stmt);
3732 switch (DECL_FUNCTION_CODE (fndecl))
3734 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3735 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3736 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3737 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3738 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3739 break;
3740 default:
3741 return false;
3744 tree expected = gimple_call_arg (stmt, 1);
3745 if (TREE_CODE (expected) != ADDR_EXPR
3746 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3747 return false;
3749 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3750 if (!is_gimple_reg_type (etype)
3751 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3752 || TREE_THIS_VOLATILE (etype)
3753 || VECTOR_TYPE_P (etype)
3754 || TREE_CODE (etype) == COMPLEX_TYPE
3755 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3756 might not preserve all the bits. See PR71716. */
3757 || SCALAR_FLOAT_TYPE_P (etype)
3758 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype)))
3759 return false;
3761 tree weak = gimple_call_arg (stmt, 3);
3762 if (!integer_zerop (weak) && !integer_onep (weak))
3763 return false;
3765 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3766 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3767 machine_mode mode = TYPE_MODE (itype);
3769 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3770 == CODE_FOR_nothing
3771 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3772 return false;
3774 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode))
3775 return false;
3777 return true;
3780 /* Fold
3781 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3782 into
3783 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3784 i = IMAGPART_EXPR <t>;
3785 r = (_Bool) i;
3786 e = REALPART_EXPR <t>; */
3788 void
3789 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3791 gimple *stmt = gsi_stmt (*gsi);
3792 tree fndecl = gimple_call_fndecl (stmt);
3793 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3794 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3795 tree ctype = build_complex_type (itype);
3796 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3797 bool throws = false;
3798 edge e = NULL;
3799 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3800 expected);
3801 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3802 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3803 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3805 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3806 build1 (VIEW_CONVERT_EXPR, itype,
3807 gimple_assign_lhs (g)));
3808 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3810 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3811 + int_size_in_bytes (itype);
3812 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3813 gimple_call_arg (stmt, 0),
3814 gimple_assign_lhs (g),
3815 gimple_call_arg (stmt, 2),
3816 build_int_cst (integer_type_node, flag),
3817 gimple_call_arg (stmt, 4),
3818 gimple_call_arg (stmt, 5));
3819 tree lhs = make_ssa_name (ctype);
3820 gimple_call_set_lhs (g, lhs);
3821 gimple_set_vdef (g, gimple_vdef (stmt));
3822 gimple_set_vuse (g, gimple_vuse (stmt));
3823 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3824 tree oldlhs = gimple_call_lhs (stmt);
3825 if (stmt_can_throw_internal (stmt))
3827 throws = true;
3828 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3830 gimple_call_set_nothrow (as_a <gcall *> (g),
3831 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3832 gimple_call_set_lhs (stmt, NULL_TREE);
3833 gsi_replace (gsi, g, true);
3834 if (oldlhs)
3836 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3837 build1 (IMAGPART_EXPR, itype, lhs));
3838 if (throws)
3840 gsi_insert_on_edge_immediate (e, g);
3841 *gsi = gsi_for_stmt (g);
3843 else
3844 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3845 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3846 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3848 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3849 build1 (REALPART_EXPR, itype, lhs));
3850 if (throws && oldlhs == NULL_TREE)
3852 gsi_insert_on_edge_immediate (e, g);
3853 *gsi = gsi_for_stmt (g);
3855 else
3856 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3857 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3859 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3860 VIEW_CONVERT_EXPR,
3861 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3862 gimple_assign_lhs (g)));
3863 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3865 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3866 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3867 *gsi = gsiret;
3870 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3871 doesn't fit into TYPE. The test for overflow should be regardless of
3872 -fwrapv, and even for unsigned types. */
3874 bool
3875 arith_overflowed_p (enum tree_code code, const_tree type,
3876 const_tree arg0, const_tree arg1)
3878 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3879 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3880 widest2_int_cst;
3881 widest2_int warg0 = widest2_int_cst (arg0);
3882 widest2_int warg1 = widest2_int_cst (arg1);
3883 widest2_int wres;
3884 switch (code)
3886 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3887 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3888 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3889 default: gcc_unreachable ();
3891 signop sign = TYPE_SIGN (type);
3892 if (sign == UNSIGNED && wi::neg_p (wres))
3893 return true;
3894 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3897 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3898 The statement may be replaced by another statement, e.g., if the call
3899 simplifies to a constant value. Return true if any changes were made.
3900 It is assumed that the operands have been previously folded. */
3902 static bool
3903 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3905 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3906 tree callee;
3907 bool changed = false;
3908 unsigned i;
3910 /* Fold *& in call arguments. */
3911 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3912 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3914 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3915 if (tmp)
3917 gimple_call_set_arg (stmt, i, tmp);
3918 changed = true;
3922 /* Check for virtual calls that became direct calls. */
3923 callee = gimple_call_fn (stmt);
3924 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3926 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3928 if (dump_file && virtual_method_call_p (callee)
3929 && !possible_polymorphic_call_target_p
3930 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3931 (OBJ_TYPE_REF_EXPR (callee)))))
3933 fprintf (dump_file,
3934 "Type inheritance inconsistent devirtualization of ");
3935 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3936 fprintf (dump_file, " to ");
3937 print_generic_expr (dump_file, callee, TDF_SLIM);
3938 fprintf (dump_file, "\n");
3941 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3942 changed = true;
3944 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3946 bool final;
3947 vec <cgraph_node *>targets
3948 = possible_polymorphic_call_targets (callee, stmt, &final);
3949 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3951 tree lhs = gimple_call_lhs (stmt);
3952 if (dump_enabled_p ())
3954 location_t loc = gimple_location_safe (stmt);
3955 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3956 "folding virtual function call to %s\n",
3957 targets.length () == 1
3958 ? targets[0]->name ()
3959 : "__builtin_unreachable");
3961 if (targets.length () == 1)
3963 tree fndecl = targets[0]->decl;
3964 gimple_call_set_fndecl (stmt, fndecl);
3965 changed = true;
3966 /* If changing the call to __cxa_pure_virtual
3967 or similar noreturn function, adjust gimple_call_fntype
3968 too. */
3969 if (gimple_call_noreturn_p (stmt)
3970 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3971 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3972 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3973 == void_type_node))
3974 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
3975 /* If the call becomes noreturn, remove the lhs. */
3976 if (lhs
3977 && gimple_call_noreturn_p (stmt)
3978 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
3979 || should_remove_lhs_p (lhs)))
3981 if (TREE_CODE (lhs) == SSA_NAME)
3983 tree var = create_tmp_var (TREE_TYPE (lhs));
3984 tree def = get_or_create_ssa_default_def (cfun, var);
3985 gimple *new_stmt = gimple_build_assign (lhs, def);
3986 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3988 gimple_call_set_lhs (stmt, NULL_TREE);
3990 maybe_remove_unused_call_args (cfun, stmt);
3992 else
3994 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3995 gimple *new_stmt = gimple_build_call (fndecl, 0);
3996 gimple_set_location (new_stmt, gimple_location (stmt));
3997 /* If the call had a SSA name as lhs morph that into
3998 an uninitialized value. */
3999 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4001 tree var = create_tmp_var (TREE_TYPE (lhs));
4002 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4003 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4004 set_ssa_default_def (cfun, var, lhs);
4006 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4007 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4008 gsi_replace (gsi, new_stmt, false);
4009 return true;
4015 /* Check for indirect calls that became direct calls, and then
4016 no longer require a static chain. */
4017 if (gimple_call_chain (stmt))
4019 tree fn = gimple_call_fndecl (stmt);
4020 if (fn && !DECL_STATIC_CHAIN (fn))
4022 gimple_call_set_chain (stmt, NULL);
4023 changed = true;
4025 else
4027 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4028 if (tmp)
4030 gimple_call_set_chain (stmt, tmp);
4031 changed = true;
4036 if (inplace)
4037 return changed;
4039 /* Check for builtins that CCP can handle using information not
4040 available in the generic fold routines. */
4041 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4043 if (gimple_fold_builtin (gsi))
4044 changed = true;
4046 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4048 changed |= targetm.gimple_fold_builtin (gsi);
4050 else if (gimple_call_internal_p (stmt))
4052 enum tree_code subcode = ERROR_MARK;
4053 tree result = NULL_TREE;
4054 bool cplx_result = false;
4055 tree overflow = NULL_TREE;
4056 switch (gimple_call_internal_fn (stmt))
4058 case IFN_BUILTIN_EXPECT:
4059 result = fold_builtin_expect (gimple_location (stmt),
4060 gimple_call_arg (stmt, 0),
4061 gimple_call_arg (stmt, 1),
4062 gimple_call_arg (stmt, 2));
4063 break;
4064 case IFN_UBSAN_OBJECT_SIZE:
4066 tree offset = gimple_call_arg (stmt, 1);
4067 tree objsize = gimple_call_arg (stmt, 2);
4068 if (integer_all_onesp (objsize)
4069 || (TREE_CODE (offset) == INTEGER_CST
4070 && TREE_CODE (objsize) == INTEGER_CST
4071 && tree_int_cst_le (offset, objsize)))
4073 replace_call_with_value (gsi, NULL_TREE);
4074 return true;
4077 break;
4078 case IFN_UBSAN_PTR:
4079 if (integer_zerop (gimple_call_arg (stmt, 1)))
4081 replace_call_with_value (gsi, NULL_TREE);
4082 return true;
4084 break;
4085 case IFN_UBSAN_BOUNDS:
4087 tree index = gimple_call_arg (stmt, 1);
4088 tree bound = gimple_call_arg (stmt, 2);
4089 if (TREE_CODE (index) == INTEGER_CST
4090 && TREE_CODE (bound) == INTEGER_CST)
4092 index = fold_convert (TREE_TYPE (bound), index);
4093 if (TREE_CODE (index) == INTEGER_CST
4094 && tree_int_cst_le (index, bound))
4096 replace_call_with_value (gsi, NULL_TREE);
4097 return true;
4101 break;
4102 case IFN_GOACC_DIM_SIZE:
4103 case IFN_GOACC_DIM_POS:
4104 result = fold_internal_goacc_dim (stmt);
4105 break;
4106 case IFN_UBSAN_CHECK_ADD:
4107 subcode = PLUS_EXPR;
4108 break;
4109 case IFN_UBSAN_CHECK_SUB:
4110 subcode = MINUS_EXPR;
4111 break;
4112 case IFN_UBSAN_CHECK_MUL:
4113 subcode = MULT_EXPR;
4114 break;
4115 case IFN_ADD_OVERFLOW:
4116 subcode = PLUS_EXPR;
4117 cplx_result = true;
4118 break;
4119 case IFN_SUB_OVERFLOW:
4120 subcode = MINUS_EXPR;
4121 cplx_result = true;
4122 break;
4123 case IFN_MUL_OVERFLOW:
4124 subcode = MULT_EXPR;
4125 cplx_result = true;
4126 break;
4127 default:
4128 break;
4130 if (subcode != ERROR_MARK)
4132 tree arg0 = gimple_call_arg (stmt, 0);
4133 tree arg1 = gimple_call_arg (stmt, 1);
4134 tree type = TREE_TYPE (arg0);
4135 if (cplx_result)
4137 tree lhs = gimple_call_lhs (stmt);
4138 if (lhs == NULL_TREE)
4139 type = NULL_TREE;
4140 else
4141 type = TREE_TYPE (TREE_TYPE (lhs));
4143 if (type == NULL_TREE)
4145 /* x = y + 0; x = y - 0; x = y * 0; */
4146 else if (integer_zerop (arg1))
4147 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4148 /* x = 0 + y; x = 0 * y; */
4149 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4150 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4151 /* x = y - y; */
4152 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4153 result = integer_zero_node;
4154 /* x = y * 1; x = 1 * y; */
4155 else if (subcode == MULT_EXPR && integer_onep (arg1))
4156 result = arg0;
4157 else if (subcode == MULT_EXPR && integer_onep (arg0))
4158 result = arg1;
4159 else if (TREE_CODE (arg0) == INTEGER_CST
4160 && TREE_CODE (arg1) == INTEGER_CST)
4162 if (cplx_result)
4163 result = int_const_binop (subcode, fold_convert (type, arg0),
4164 fold_convert (type, arg1));
4165 else
4166 result = int_const_binop (subcode, arg0, arg1);
4167 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4169 if (cplx_result)
4170 overflow = build_one_cst (type);
4171 else
4172 result = NULL_TREE;
4175 if (result)
4177 if (result == integer_zero_node)
4178 result = build_zero_cst (type);
4179 else if (cplx_result && TREE_TYPE (result) != type)
4181 if (TREE_CODE (result) == INTEGER_CST)
4183 if (arith_overflowed_p (PLUS_EXPR, type, result,
4184 integer_zero_node))
4185 overflow = build_one_cst (type);
4187 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4188 && TYPE_UNSIGNED (type))
4189 || (TYPE_PRECISION (type)
4190 < (TYPE_PRECISION (TREE_TYPE (result))
4191 + (TYPE_UNSIGNED (TREE_TYPE (result))
4192 && !TYPE_UNSIGNED (type)))))
4193 result = NULL_TREE;
4194 if (result)
4195 result = fold_convert (type, result);
4200 if (result)
4202 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4203 result = drop_tree_overflow (result);
4204 if (cplx_result)
4206 if (overflow == NULL_TREE)
4207 overflow = build_zero_cst (TREE_TYPE (result));
4208 tree ctype = build_complex_type (TREE_TYPE (result));
4209 if (TREE_CODE (result) == INTEGER_CST
4210 && TREE_CODE (overflow) == INTEGER_CST)
4211 result = build_complex (ctype, result, overflow);
4212 else
4213 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4214 ctype, result, overflow);
4216 if (!update_call_from_tree (gsi, result))
4217 gimplify_and_update_call_from_tree (gsi, result);
4218 changed = true;
4222 return changed;
4226 /* Return true whether NAME has a use on STMT. */
4228 static bool
4229 has_use_on_stmt (tree name, gimple *stmt)
4231 imm_use_iterator iter;
4232 use_operand_p use_p;
4233 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4234 if (USE_STMT (use_p) == stmt)
4235 return true;
4236 return false;
4239 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4240 gimple_simplify.
4242 Replaces *GSI with the simplification result in RCODE and OPS
4243 and the associated statements in *SEQ. Does the replacement
4244 according to INPLACE and returns true if the operation succeeded. */
4246 static bool
4247 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4248 code_helper rcode, tree *ops,
4249 gimple_seq *seq, bool inplace)
4251 gimple *stmt = gsi_stmt (*gsi);
4253 /* Play safe and do not allow abnormals to be mentioned in
4254 newly created statements. See also maybe_push_res_to_seq.
4255 As an exception allow such uses if there was a use of the
4256 same SSA name on the old stmt. */
4257 if ((TREE_CODE (ops[0]) == SSA_NAME
4258 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
4259 && !has_use_on_stmt (ops[0], stmt))
4260 || (ops[1]
4261 && TREE_CODE (ops[1]) == SSA_NAME
4262 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
4263 && !has_use_on_stmt (ops[1], stmt))
4264 || (ops[2]
4265 && TREE_CODE (ops[2]) == SSA_NAME
4266 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
4267 && !has_use_on_stmt (ops[2], stmt))
4268 || (COMPARISON_CLASS_P (ops[0])
4269 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
4270 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
4271 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
4272 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
4273 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
4274 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
4275 return false;
4277 /* Don't insert new statements when INPLACE is true, even if we could
4278 reuse STMT for the final statement. */
4279 if (inplace && !gimple_seq_empty_p (*seq))
4280 return false;
4282 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4284 gcc_assert (rcode.is_tree_code ());
4285 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
4286 /* GIMPLE_CONDs condition may not throw. */
4287 && (!flag_exceptions
4288 || !cfun->can_throw_non_call_exceptions
4289 || !operation_could_trap_p (rcode,
4290 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4291 false, NULL_TREE)))
4292 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
4293 else if (rcode == SSA_NAME)
4294 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4295 build_zero_cst (TREE_TYPE (ops[0])));
4296 else if (rcode == INTEGER_CST)
4298 if (integer_zerop (ops[0]))
4299 gimple_cond_make_false (cond_stmt);
4300 else
4301 gimple_cond_make_true (cond_stmt);
4303 else if (!inplace)
4305 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
4306 ops, seq);
4307 if (!res)
4308 return false;
4309 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4310 build_zero_cst (TREE_TYPE (res)));
4312 else
4313 return false;
4314 if (dump_file && (dump_flags & TDF_DETAILS))
4316 fprintf (dump_file, "gimple_simplified to ");
4317 if (!gimple_seq_empty_p (*seq))
4318 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4319 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4320 0, TDF_SLIM);
4322 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4323 return true;
4325 else if (is_gimple_assign (stmt)
4326 && rcode.is_tree_code ())
4328 if (!inplace
4329 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
4331 maybe_build_generic_op (rcode,
4332 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
4333 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
4334 if (dump_file && (dump_flags & TDF_DETAILS))
4336 fprintf (dump_file, "gimple_simplified to ");
4337 if (!gimple_seq_empty_p (*seq))
4338 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4339 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4340 0, TDF_SLIM);
4342 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4343 return true;
4346 else if (rcode.is_fn_code ()
4347 && gimple_call_combined_fn (stmt) == rcode)
4349 unsigned i;
4350 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4352 gcc_assert (ops[i] != NULL_TREE);
4353 gimple_call_set_arg (stmt, i, ops[i]);
4355 if (i < 3)
4356 gcc_assert (ops[i] == NULL_TREE);
4357 if (dump_file && (dump_flags & TDF_DETAILS))
4359 fprintf (dump_file, "gimple_simplified to ");
4360 if (!gimple_seq_empty_p (*seq))
4361 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4362 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4364 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4365 return true;
4367 else if (!inplace)
4369 if (gimple_has_lhs (stmt))
4371 tree lhs = gimple_get_lhs (stmt);
4372 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
4373 ops, seq, lhs))
4374 return false;
4375 if (dump_file && (dump_flags & TDF_DETAILS))
4377 fprintf (dump_file, "gimple_simplified to ");
4378 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4380 gsi_replace_with_seq_vops (gsi, *seq);
4381 return true;
4383 else
4384 gcc_unreachable ();
4387 return false;
4390 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4392 static bool
4393 maybe_canonicalize_mem_ref_addr (tree *t)
4395 bool res = false;
4397 if (TREE_CODE (*t) == ADDR_EXPR)
4398 t = &TREE_OPERAND (*t, 0);
4400 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4401 generic vector extension. The actual vector referenced is
4402 view-converted to an array type for this purpose. If the index
4403 is constant the canonical representation in the middle-end is a
4404 BIT_FIELD_REF so re-write the former to the latter here. */
4405 if (TREE_CODE (*t) == ARRAY_REF
4406 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4407 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4408 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4410 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4411 if (VECTOR_TYPE_P (vtype))
4413 tree low = array_ref_low_bound (*t);
4414 if (TREE_CODE (low) == INTEGER_CST)
4416 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4418 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4419 wi::to_widest (low));
4420 idx = wi::mul (idx, wi::to_widest
4421 (TYPE_SIZE (TREE_TYPE (*t))));
4422 widest_int ext
4423 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4424 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4426 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4427 TREE_TYPE (*t),
4428 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4429 TYPE_SIZE (TREE_TYPE (*t)),
4430 wide_int_to_tree (bitsizetype, idx));
4431 res = true;
4438 while (handled_component_p (*t))
4439 t = &TREE_OPERAND (*t, 0);
4441 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4442 of invariant addresses into a SSA name MEM_REF address. */
4443 if (TREE_CODE (*t) == MEM_REF
4444 || TREE_CODE (*t) == TARGET_MEM_REF)
4446 tree addr = TREE_OPERAND (*t, 0);
4447 if (TREE_CODE (addr) == ADDR_EXPR
4448 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4449 || handled_component_p (TREE_OPERAND (addr, 0))))
4451 tree base;
4452 HOST_WIDE_INT coffset;
4453 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4454 &coffset);
4455 if (!base)
4456 gcc_unreachable ();
4458 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4459 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4460 TREE_OPERAND (*t, 1),
4461 size_int (coffset));
4462 res = true;
4464 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4465 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4468 /* Canonicalize back MEM_REFs to plain reference trees if the object
4469 accessed is a decl that has the same access semantics as the MEM_REF. */
4470 if (TREE_CODE (*t) == MEM_REF
4471 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4472 && integer_zerop (TREE_OPERAND (*t, 1))
4473 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4475 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4476 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4477 if (/* Same volatile qualification. */
4478 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4479 /* Same TBAA behavior with -fstrict-aliasing. */
4480 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4481 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4482 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4483 /* Same alignment. */
4484 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4485 /* We have to look out here to not drop a required conversion
4486 from the rhs to the lhs if *t appears on the lhs or vice-versa
4487 if it appears on the rhs. Thus require strict type
4488 compatibility. */
4489 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4491 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4492 res = true;
4496 /* Canonicalize TARGET_MEM_REF in particular with respect to
4497 the indexes becoming constant. */
4498 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4500 tree tem = maybe_fold_tmr (*t);
4501 if (tem)
4503 *t = tem;
4504 res = true;
4508 return res;
4511 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4512 distinguishes both cases. */
4514 static bool
4515 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4517 bool changed = false;
4518 gimple *stmt = gsi_stmt (*gsi);
4519 bool nowarning = gimple_no_warning_p (stmt);
4520 unsigned i;
4521 fold_defer_overflow_warnings ();
4523 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4524 after propagation.
4525 ??? This shouldn't be done in generic folding but in the
4526 propagation helpers which also know whether an address was
4527 propagated.
4528 Also canonicalize operand order. */
4529 switch (gimple_code (stmt))
4531 case GIMPLE_ASSIGN:
4532 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4534 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4535 if ((REFERENCE_CLASS_P (*rhs)
4536 || TREE_CODE (*rhs) == ADDR_EXPR)
4537 && maybe_canonicalize_mem_ref_addr (rhs))
4538 changed = true;
4539 tree *lhs = gimple_assign_lhs_ptr (stmt);
4540 if (REFERENCE_CLASS_P (*lhs)
4541 && maybe_canonicalize_mem_ref_addr (lhs))
4542 changed = true;
4544 else
4546 /* Canonicalize operand order. */
4547 enum tree_code code = gimple_assign_rhs_code (stmt);
4548 if (TREE_CODE_CLASS (code) == tcc_comparison
4549 || commutative_tree_code (code)
4550 || commutative_ternary_tree_code (code))
4552 tree rhs1 = gimple_assign_rhs1 (stmt);
4553 tree rhs2 = gimple_assign_rhs2 (stmt);
4554 if (tree_swap_operands_p (rhs1, rhs2))
4556 gimple_assign_set_rhs1 (stmt, rhs2);
4557 gimple_assign_set_rhs2 (stmt, rhs1);
4558 if (TREE_CODE_CLASS (code) == tcc_comparison)
4559 gimple_assign_set_rhs_code (stmt,
4560 swap_tree_comparison (code));
4561 changed = true;
4565 break;
4566 case GIMPLE_CALL:
4568 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4570 tree *arg = gimple_call_arg_ptr (stmt, i);
4571 if (REFERENCE_CLASS_P (*arg)
4572 && maybe_canonicalize_mem_ref_addr (arg))
4573 changed = true;
4575 tree *lhs = gimple_call_lhs_ptr (stmt);
4576 if (*lhs
4577 && REFERENCE_CLASS_P (*lhs)
4578 && maybe_canonicalize_mem_ref_addr (lhs))
4579 changed = true;
4580 break;
4582 case GIMPLE_ASM:
4584 gasm *asm_stmt = as_a <gasm *> (stmt);
4585 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4587 tree link = gimple_asm_output_op (asm_stmt, i);
4588 tree op = TREE_VALUE (link);
4589 if (REFERENCE_CLASS_P (op)
4590 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4591 changed = true;
4593 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4595 tree link = gimple_asm_input_op (asm_stmt, i);
4596 tree op = TREE_VALUE (link);
4597 if ((REFERENCE_CLASS_P (op)
4598 || TREE_CODE (op) == ADDR_EXPR)
4599 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4600 changed = true;
4603 break;
4604 case GIMPLE_DEBUG:
4605 if (gimple_debug_bind_p (stmt))
4607 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4608 if (*val
4609 && (REFERENCE_CLASS_P (*val)
4610 || TREE_CODE (*val) == ADDR_EXPR)
4611 && maybe_canonicalize_mem_ref_addr (val))
4612 changed = true;
4614 break;
4615 case GIMPLE_COND:
4617 /* Canonicalize operand order. */
4618 tree lhs = gimple_cond_lhs (stmt);
4619 tree rhs = gimple_cond_rhs (stmt);
4620 if (tree_swap_operands_p (lhs, rhs))
4622 gcond *gc = as_a <gcond *> (stmt);
4623 gimple_cond_set_lhs (gc, rhs);
4624 gimple_cond_set_rhs (gc, lhs);
4625 gimple_cond_set_code (gc,
4626 swap_tree_comparison (gimple_cond_code (gc)));
4627 changed = true;
4630 default:;
4633 /* Dispatch to pattern-based folding. */
4634 if (!inplace
4635 || is_gimple_assign (stmt)
4636 || gimple_code (stmt) == GIMPLE_COND)
4638 gimple_seq seq = NULL;
4639 code_helper rcode;
4640 tree ops[3] = {};
4641 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4642 valueize, valueize))
4644 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4645 changed = true;
4646 else
4647 gimple_seq_discard (seq);
4651 stmt = gsi_stmt (*gsi);
4653 /* Fold the main computation performed by the statement. */
4654 switch (gimple_code (stmt))
4656 case GIMPLE_ASSIGN:
4658 /* Try to canonicalize for boolean-typed X the comparisons
4659 X == 0, X == 1, X != 0, and X != 1. */
4660 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4661 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4663 tree lhs = gimple_assign_lhs (stmt);
4664 tree op1 = gimple_assign_rhs1 (stmt);
4665 tree op2 = gimple_assign_rhs2 (stmt);
4666 tree type = TREE_TYPE (op1);
4668 /* Check whether the comparison operands are of the same boolean
4669 type as the result type is.
4670 Check that second operand is an integer-constant with value
4671 one or zero. */
4672 if (TREE_CODE (op2) == INTEGER_CST
4673 && (integer_zerop (op2) || integer_onep (op2))
4674 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4676 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4677 bool is_logical_not = false;
4679 /* X == 0 and X != 1 is a logical-not.of X
4680 X == 1 and X != 0 is X */
4681 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4682 || (cmp_code == NE_EXPR && integer_onep (op2)))
4683 is_logical_not = true;
4685 if (is_logical_not == false)
4686 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4687 /* Only for one-bit precision typed X the transformation
4688 !X -> ~X is valied. */
4689 else if (TYPE_PRECISION (type) == 1)
4690 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4691 /* Otherwise we use !X -> X ^ 1. */
4692 else
4693 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4694 build_int_cst (type, 1));
4695 changed = true;
4696 break;
4700 unsigned old_num_ops = gimple_num_ops (stmt);
4701 tree lhs = gimple_assign_lhs (stmt);
4702 tree new_rhs = fold_gimple_assign (gsi);
4703 if (new_rhs
4704 && !useless_type_conversion_p (TREE_TYPE (lhs),
4705 TREE_TYPE (new_rhs)))
4706 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4707 if (new_rhs
4708 && (!inplace
4709 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4711 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4712 changed = true;
4714 break;
4717 case GIMPLE_CALL:
4718 changed |= gimple_fold_call (gsi, inplace);
4719 break;
4721 case GIMPLE_ASM:
4722 /* Fold *& in asm operands. */
4724 gasm *asm_stmt = as_a <gasm *> (stmt);
4725 size_t noutputs;
4726 const char **oconstraints;
4727 const char *constraint;
4728 bool allows_mem, allows_reg;
4730 noutputs = gimple_asm_noutputs (asm_stmt);
4731 oconstraints = XALLOCAVEC (const char *, noutputs);
4733 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4735 tree link = gimple_asm_output_op (asm_stmt, i);
4736 tree op = TREE_VALUE (link);
4737 oconstraints[i]
4738 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4739 if (REFERENCE_CLASS_P (op)
4740 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4742 TREE_VALUE (link) = op;
4743 changed = true;
4746 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4748 tree link = gimple_asm_input_op (asm_stmt, i);
4749 tree op = TREE_VALUE (link);
4750 constraint
4751 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4752 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4753 oconstraints, &allows_mem, &allows_reg);
4754 if (REFERENCE_CLASS_P (op)
4755 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4756 != NULL_TREE)
4758 TREE_VALUE (link) = op;
4759 changed = true;
4763 break;
4765 case GIMPLE_DEBUG:
4766 if (gimple_debug_bind_p (stmt))
4768 tree val = gimple_debug_bind_get_value (stmt);
4769 if (val
4770 && REFERENCE_CLASS_P (val))
4772 tree tem = maybe_fold_reference (val, false);
4773 if (tem)
4775 gimple_debug_bind_set_value (stmt, tem);
4776 changed = true;
4779 else if (val
4780 && TREE_CODE (val) == ADDR_EXPR)
4782 tree ref = TREE_OPERAND (val, 0);
4783 tree tem = maybe_fold_reference (ref, false);
4784 if (tem)
4786 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4787 gimple_debug_bind_set_value (stmt, tem);
4788 changed = true;
4792 break;
4794 case GIMPLE_RETURN:
4796 greturn *ret_stmt = as_a<greturn *> (stmt);
4797 tree ret = gimple_return_retval(ret_stmt);
4799 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4801 tree val = valueize (ret);
4802 if (val && val != ret
4803 && may_propagate_copy (ret, val))
4805 gimple_return_set_retval (ret_stmt, val);
4806 changed = true;
4810 break;
4812 default:;
4815 stmt = gsi_stmt (*gsi);
4817 /* Fold *& on the lhs. */
4818 if (gimple_has_lhs (stmt))
4820 tree lhs = gimple_get_lhs (stmt);
4821 if (lhs && REFERENCE_CLASS_P (lhs))
4823 tree new_lhs = maybe_fold_reference (lhs, true);
4824 if (new_lhs)
4826 gimple_set_lhs (stmt, new_lhs);
4827 changed = true;
4832 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4833 return changed;
4836 /* Valueziation callback that ends up not following SSA edges. */
4838 tree
4839 no_follow_ssa_edges (tree)
4841 return NULL_TREE;
4844 /* Valueization callback that ends up following single-use SSA edges only. */
4846 tree
4847 follow_single_use_edges (tree val)
4849 if (TREE_CODE (val) == SSA_NAME
4850 && !has_single_use (val))
4851 return NULL_TREE;
4852 return val;
4855 /* Fold the statement pointed to by GSI. In some cases, this function may
4856 replace the whole statement with a new one. Returns true iff folding
4857 makes any changes.
4858 The statement pointed to by GSI should be in valid gimple form but may
4859 be in unfolded state as resulting from for example constant propagation
4860 which can produce *&x = 0. */
4862 bool
4863 fold_stmt (gimple_stmt_iterator *gsi)
4865 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4868 bool
4869 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4871 return fold_stmt_1 (gsi, false, valueize);
4874 /* Perform the minimal folding on statement *GSI. Only operations like
4875 *&x created by constant propagation are handled. The statement cannot
4876 be replaced with a new one. Return true if the statement was
4877 changed, false otherwise.
4878 The statement *GSI should be in valid gimple form but may
4879 be in unfolded state as resulting from for example constant propagation
4880 which can produce *&x = 0. */
4882 bool
4883 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4885 gimple *stmt = gsi_stmt (*gsi);
4886 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4887 gcc_assert (gsi_stmt (*gsi) == stmt);
4888 return changed;
4891 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4892 if EXPR is null or we don't know how.
4893 If non-null, the result always has boolean type. */
4895 static tree
4896 canonicalize_bool (tree expr, bool invert)
4898 if (!expr)
4899 return NULL_TREE;
4900 else if (invert)
4902 if (integer_nonzerop (expr))
4903 return boolean_false_node;
4904 else if (integer_zerop (expr))
4905 return boolean_true_node;
4906 else if (TREE_CODE (expr) == SSA_NAME)
4907 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4908 build_int_cst (TREE_TYPE (expr), 0));
4909 else if (COMPARISON_CLASS_P (expr))
4910 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4911 boolean_type_node,
4912 TREE_OPERAND (expr, 0),
4913 TREE_OPERAND (expr, 1));
4914 else
4915 return NULL_TREE;
4917 else
4919 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4920 return expr;
4921 if (integer_nonzerop (expr))
4922 return boolean_true_node;
4923 else if (integer_zerop (expr))
4924 return boolean_false_node;
4925 else if (TREE_CODE (expr) == SSA_NAME)
4926 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4927 build_int_cst (TREE_TYPE (expr), 0));
4928 else if (COMPARISON_CLASS_P (expr))
4929 return fold_build2 (TREE_CODE (expr),
4930 boolean_type_node,
4931 TREE_OPERAND (expr, 0),
4932 TREE_OPERAND (expr, 1));
4933 else
4934 return NULL_TREE;
4938 /* Check to see if a boolean expression EXPR is logically equivalent to the
4939 comparison (OP1 CODE OP2). Check for various identities involving
4940 SSA_NAMEs. */
4942 static bool
4943 same_bool_comparison_p (const_tree expr, enum tree_code code,
4944 const_tree op1, const_tree op2)
4946 gimple *s;
4948 /* The obvious case. */
4949 if (TREE_CODE (expr) == code
4950 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4951 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4952 return true;
4954 /* Check for comparing (name, name != 0) and the case where expr
4955 is an SSA_NAME with a definition matching the comparison. */
4956 if (TREE_CODE (expr) == SSA_NAME
4957 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4959 if (operand_equal_p (expr, op1, 0))
4960 return ((code == NE_EXPR && integer_zerop (op2))
4961 || (code == EQ_EXPR && integer_nonzerop (op2)));
4962 s = SSA_NAME_DEF_STMT (expr);
4963 if (is_gimple_assign (s)
4964 && gimple_assign_rhs_code (s) == code
4965 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4966 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4967 return true;
4970 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4971 of name is a comparison, recurse. */
4972 if (TREE_CODE (op1) == SSA_NAME
4973 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4975 s = SSA_NAME_DEF_STMT (op1);
4976 if (is_gimple_assign (s)
4977 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
4979 enum tree_code c = gimple_assign_rhs_code (s);
4980 if ((c == NE_EXPR && integer_zerop (op2))
4981 || (c == EQ_EXPR && integer_nonzerop (op2)))
4982 return same_bool_comparison_p (expr, c,
4983 gimple_assign_rhs1 (s),
4984 gimple_assign_rhs2 (s));
4985 if ((c == EQ_EXPR && integer_zerop (op2))
4986 || (c == NE_EXPR && integer_nonzerop (op2)))
4987 return same_bool_comparison_p (expr,
4988 invert_tree_comparison (c, false),
4989 gimple_assign_rhs1 (s),
4990 gimple_assign_rhs2 (s));
4993 return false;
4996 /* Check to see if two boolean expressions OP1 and OP2 are logically
4997 equivalent. */
4999 static bool
5000 same_bool_result_p (const_tree op1, const_tree op2)
5002 /* Simple cases first. */
5003 if (operand_equal_p (op1, op2, 0))
5004 return true;
5006 /* Check the cases where at least one of the operands is a comparison.
5007 These are a bit smarter than operand_equal_p in that they apply some
5008 identifies on SSA_NAMEs. */
5009 if (COMPARISON_CLASS_P (op2)
5010 && same_bool_comparison_p (op1, TREE_CODE (op2),
5011 TREE_OPERAND (op2, 0),
5012 TREE_OPERAND (op2, 1)))
5013 return true;
5014 if (COMPARISON_CLASS_P (op1)
5015 && same_bool_comparison_p (op2, TREE_CODE (op1),
5016 TREE_OPERAND (op1, 0),
5017 TREE_OPERAND (op1, 1)))
5018 return true;
5020 /* Default case. */
5021 return false;
5024 /* Forward declarations for some mutually recursive functions. */
5026 static tree
5027 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5028 enum tree_code code2, tree op2a, tree op2b);
5029 static tree
5030 and_var_with_comparison (tree var, bool invert,
5031 enum tree_code code2, tree op2a, tree op2b);
5032 static tree
5033 and_var_with_comparison_1 (gimple *stmt,
5034 enum tree_code code2, tree op2a, tree op2b);
5035 static tree
5036 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5037 enum tree_code code2, tree op2a, tree op2b);
5038 static tree
5039 or_var_with_comparison (tree var, bool invert,
5040 enum tree_code code2, tree op2a, tree op2b);
5041 static tree
5042 or_var_with_comparison_1 (gimple *stmt,
5043 enum tree_code code2, tree op2a, tree op2b);
5045 /* Helper function for and_comparisons_1: try to simplify the AND of the
5046 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5047 If INVERT is true, invert the value of the VAR before doing the AND.
5048 Return NULL_EXPR if we can't simplify this to a single expression. */
5050 static tree
5051 and_var_with_comparison (tree var, bool invert,
5052 enum tree_code code2, tree op2a, tree op2b)
5054 tree t;
5055 gimple *stmt = SSA_NAME_DEF_STMT (var);
5057 /* We can only deal with variables whose definitions are assignments. */
5058 if (!is_gimple_assign (stmt))
5059 return NULL_TREE;
5061 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5062 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5063 Then we only have to consider the simpler non-inverted cases. */
5064 if (invert)
5065 t = or_var_with_comparison_1 (stmt,
5066 invert_tree_comparison (code2, false),
5067 op2a, op2b);
5068 else
5069 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5070 return canonicalize_bool (t, invert);
5073 /* Try to simplify the AND of the ssa variable defined by the assignment
5074 STMT with the comparison specified by (OP2A CODE2 OP2B).
5075 Return NULL_EXPR if we can't simplify this to a single expression. */
5077 static tree
5078 and_var_with_comparison_1 (gimple *stmt,
5079 enum tree_code code2, tree op2a, tree op2b)
5081 tree var = gimple_assign_lhs (stmt);
5082 tree true_test_var = NULL_TREE;
5083 tree false_test_var = NULL_TREE;
5084 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5086 /* Check for identities like (var AND (var == 0)) => false. */
5087 if (TREE_CODE (op2a) == SSA_NAME
5088 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5090 if ((code2 == NE_EXPR && integer_zerop (op2b))
5091 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5093 true_test_var = op2a;
5094 if (var == true_test_var)
5095 return var;
5097 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5098 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5100 false_test_var = op2a;
5101 if (var == false_test_var)
5102 return boolean_false_node;
5106 /* If the definition is a comparison, recurse on it. */
5107 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5109 tree t = and_comparisons_1 (innercode,
5110 gimple_assign_rhs1 (stmt),
5111 gimple_assign_rhs2 (stmt),
5112 code2,
5113 op2a,
5114 op2b);
5115 if (t)
5116 return t;
5119 /* If the definition is an AND or OR expression, we may be able to
5120 simplify by reassociating. */
5121 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5122 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5124 tree inner1 = gimple_assign_rhs1 (stmt);
5125 tree inner2 = gimple_assign_rhs2 (stmt);
5126 gimple *s;
5127 tree t;
5128 tree partial = NULL_TREE;
5129 bool is_and = (innercode == BIT_AND_EXPR);
5131 /* Check for boolean identities that don't require recursive examination
5132 of inner1/inner2:
5133 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5134 inner1 AND (inner1 OR inner2) => inner1
5135 !inner1 AND (inner1 AND inner2) => false
5136 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5137 Likewise for similar cases involving inner2. */
5138 if (inner1 == true_test_var)
5139 return (is_and ? var : inner1);
5140 else if (inner2 == true_test_var)
5141 return (is_and ? var : inner2);
5142 else if (inner1 == false_test_var)
5143 return (is_and
5144 ? boolean_false_node
5145 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5146 else if (inner2 == false_test_var)
5147 return (is_and
5148 ? boolean_false_node
5149 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5151 /* Next, redistribute/reassociate the AND across the inner tests.
5152 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5153 if (TREE_CODE (inner1) == SSA_NAME
5154 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5155 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5156 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5157 gimple_assign_rhs1 (s),
5158 gimple_assign_rhs2 (s),
5159 code2, op2a, op2b)))
5161 /* Handle the AND case, where we are reassociating:
5162 (inner1 AND inner2) AND (op2a code2 op2b)
5163 => (t AND inner2)
5164 If the partial result t is a constant, we win. Otherwise
5165 continue on to try reassociating with the other inner test. */
5166 if (is_and)
5168 if (integer_onep (t))
5169 return inner2;
5170 else if (integer_zerop (t))
5171 return boolean_false_node;
5174 /* Handle the OR case, where we are redistributing:
5175 (inner1 OR inner2) AND (op2a code2 op2b)
5176 => (t OR (inner2 AND (op2a code2 op2b))) */
5177 else if (integer_onep (t))
5178 return boolean_true_node;
5180 /* Save partial result for later. */
5181 partial = t;
5184 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5185 if (TREE_CODE (inner2) == SSA_NAME
5186 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5187 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5188 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5189 gimple_assign_rhs1 (s),
5190 gimple_assign_rhs2 (s),
5191 code2, op2a, op2b)))
5193 /* Handle the AND case, where we are reassociating:
5194 (inner1 AND inner2) AND (op2a code2 op2b)
5195 => (inner1 AND t) */
5196 if (is_and)
5198 if (integer_onep (t))
5199 return inner1;
5200 else if (integer_zerop (t))
5201 return boolean_false_node;
5202 /* If both are the same, we can apply the identity
5203 (x AND x) == x. */
5204 else if (partial && same_bool_result_p (t, partial))
5205 return t;
5208 /* Handle the OR case. where we are redistributing:
5209 (inner1 OR inner2) AND (op2a code2 op2b)
5210 => (t OR (inner1 AND (op2a code2 op2b)))
5211 => (t OR partial) */
5212 else
5214 if (integer_onep (t))
5215 return boolean_true_node;
5216 else if (partial)
5218 /* We already got a simplification for the other
5219 operand to the redistributed OR expression. The
5220 interesting case is when at least one is false.
5221 Or, if both are the same, we can apply the identity
5222 (x OR x) == x. */
5223 if (integer_zerop (partial))
5224 return t;
5225 else if (integer_zerop (t))
5226 return partial;
5227 else if (same_bool_result_p (t, partial))
5228 return t;
5233 return NULL_TREE;
5236 /* Try to simplify the AND of two comparisons defined by
5237 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5238 If this can be done without constructing an intermediate value,
5239 return the resulting tree; otherwise NULL_TREE is returned.
5240 This function is deliberately asymmetric as it recurses on SSA_DEFs
5241 in the first comparison but not the second. */
5243 static tree
5244 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5245 enum tree_code code2, tree op2a, tree op2b)
5247 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5249 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5250 if (operand_equal_p (op1a, op2a, 0)
5251 && operand_equal_p (op1b, op2b, 0))
5253 /* Result will be either NULL_TREE, or a combined comparison. */
5254 tree t = combine_comparisons (UNKNOWN_LOCATION,
5255 TRUTH_ANDIF_EXPR, code1, code2,
5256 truth_type, op1a, op1b);
5257 if (t)
5258 return t;
5261 /* Likewise the swapped case of the above. */
5262 if (operand_equal_p (op1a, op2b, 0)
5263 && operand_equal_p (op1b, op2a, 0))
5265 /* Result will be either NULL_TREE, or a combined comparison. */
5266 tree t = combine_comparisons (UNKNOWN_LOCATION,
5267 TRUTH_ANDIF_EXPR, code1,
5268 swap_tree_comparison (code2),
5269 truth_type, op1a, op1b);
5270 if (t)
5271 return t;
5274 /* If both comparisons are of the same value against constants, we might
5275 be able to merge them. */
5276 if (operand_equal_p (op1a, op2a, 0)
5277 && TREE_CODE (op1b) == INTEGER_CST
5278 && TREE_CODE (op2b) == INTEGER_CST)
5280 int cmp = tree_int_cst_compare (op1b, op2b);
5282 /* If we have (op1a == op1b), we should either be able to
5283 return that or FALSE, depending on whether the constant op1b
5284 also satisfies the other comparison against op2b. */
5285 if (code1 == EQ_EXPR)
5287 bool done = true;
5288 bool val;
5289 switch (code2)
5291 case EQ_EXPR: val = (cmp == 0); break;
5292 case NE_EXPR: val = (cmp != 0); break;
5293 case LT_EXPR: val = (cmp < 0); break;
5294 case GT_EXPR: val = (cmp > 0); break;
5295 case LE_EXPR: val = (cmp <= 0); break;
5296 case GE_EXPR: val = (cmp >= 0); break;
5297 default: done = false;
5299 if (done)
5301 if (val)
5302 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5303 else
5304 return boolean_false_node;
5307 /* Likewise if the second comparison is an == comparison. */
5308 else if (code2 == EQ_EXPR)
5310 bool done = true;
5311 bool val;
5312 switch (code1)
5314 case EQ_EXPR: val = (cmp == 0); break;
5315 case NE_EXPR: val = (cmp != 0); break;
5316 case LT_EXPR: val = (cmp > 0); break;
5317 case GT_EXPR: val = (cmp < 0); break;
5318 case LE_EXPR: val = (cmp >= 0); break;
5319 case GE_EXPR: val = (cmp <= 0); break;
5320 default: done = false;
5322 if (done)
5324 if (val)
5325 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5326 else
5327 return boolean_false_node;
5331 /* Same business with inequality tests. */
5332 else if (code1 == NE_EXPR)
5334 bool val;
5335 switch (code2)
5337 case EQ_EXPR: val = (cmp != 0); break;
5338 case NE_EXPR: val = (cmp == 0); break;
5339 case LT_EXPR: val = (cmp >= 0); break;
5340 case GT_EXPR: val = (cmp <= 0); break;
5341 case LE_EXPR: val = (cmp > 0); break;
5342 case GE_EXPR: val = (cmp < 0); break;
5343 default:
5344 val = false;
5346 if (val)
5347 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5349 else if (code2 == NE_EXPR)
5351 bool val;
5352 switch (code1)
5354 case EQ_EXPR: val = (cmp == 0); break;
5355 case NE_EXPR: val = (cmp != 0); break;
5356 case LT_EXPR: val = (cmp <= 0); break;
5357 case GT_EXPR: val = (cmp >= 0); break;
5358 case LE_EXPR: val = (cmp < 0); break;
5359 case GE_EXPR: val = (cmp > 0); break;
5360 default:
5361 val = false;
5363 if (val)
5364 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5367 /* Chose the more restrictive of two < or <= comparisons. */
5368 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5369 && (code2 == LT_EXPR || code2 == LE_EXPR))
5371 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5372 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5373 else
5374 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5377 /* Likewise chose the more restrictive of two > or >= comparisons. */
5378 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5379 && (code2 == GT_EXPR || code2 == GE_EXPR))
5381 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5382 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5383 else
5384 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5387 /* Check for singleton ranges. */
5388 else if (cmp == 0
5389 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5390 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5391 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5393 /* Check for disjoint ranges. */
5394 else if (cmp <= 0
5395 && (code1 == LT_EXPR || code1 == LE_EXPR)
5396 && (code2 == GT_EXPR || code2 == GE_EXPR))
5397 return boolean_false_node;
5398 else if (cmp >= 0
5399 && (code1 == GT_EXPR || code1 == GE_EXPR)
5400 && (code2 == LT_EXPR || code2 == LE_EXPR))
5401 return boolean_false_node;
5404 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5405 NAME's definition is a truth value. See if there are any simplifications
5406 that can be done against the NAME's definition. */
5407 if (TREE_CODE (op1a) == SSA_NAME
5408 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5409 && (integer_zerop (op1b) || integer_onep (op1b)))
5411 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5412 || (code1 == NE_EXPR && integer_onep (op1b)));
5413 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5414 switch (gimple_code (stmt))
5416 case GIMPLE_ASSIGN:
5417 /* Try to simplify by copy-propagating the definition. */
5418 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5420 case GIMPLE_PHI:
5421 /* If every argument to the PHI produces the same result when
5422 ANDed with the second comparison, we win.
5423 Do not do this unless the type is bool since we need a bool
5424 result here anyway. */
5425 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5427 tree result = NULL_TREE;
5428 unsigned i;
5429 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5431 tree arg = gimple_phi_arg_def (stmt, i);
5433 /* If this PHI has itself as an argument, ignore it.
5434 If all the other args produce the same result,
5435 we're still OK. */
5436 if (arg == gimple_phi_result (stmt))
5437 continue;
5438 else if (TREE_CODE (arg) == INTEGER_CST)
5440 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5442 if (!result)
5443 result = boolean_false_node;
5444 else if (!integer_zerop (result))
5445 return NULL_TREE;
5447 else if (!result)
5448 result = fold_build2 (code2, boolean_type_node,
5449 op2a, op2b);
5450 else if (!same_bool_comparison_p (result,
5451 code2, op2a, op2b))
5452 return NULL_TREE;
5454 else if (TREE_CODE (arg) == SSA_NAME
5455 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5457 tree temp;
5458 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5459 /* In simple cases we can look through PHI nodes,
5460 but we have to be careful with loops.
5461 See PR49073. */
5462 if (! dom_info_available_p (CDI_DOMINATORS)
5463 || gimple_bb (def_stmt) == gimple_bb (stmt)
5464 || dominated_by_p (CDI_DOMINATORS,
5465 gimple_bb (def_stmt),
5466 gimple_bb (stmt)))
5467 return NULL_TREE;
5468 temp = and_var_with_comparison (arg, invert, code2,
5469 op2a, op2b);
5470 if (!temp)
5471 return NULL_TREE;
5472 else if (!result)
5473 result = temp;
5474 else if (!same_bool_result_p (result, temp))
5475 return NULL_TREE;
5477 else
5478 return NULL_TREE;
5480 return result;
5483 default:
5484 break;
5487 return NULL_TREE;
5490 /* Try to simplify the AND of two comparisons, specified by
5491 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5492 If this can be simplified to a single expression (without requiring
5493 introducing more SSA variables to hold intermediate values),
5494 return the resulting tree. Otherwise return NULL_TREE.
5495 If the result expression is non-null, it has boolean type. */
5497 tree
5498 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5499 enum tree_code code2, tree op2a, tree op2b)
5501 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5502 if (t)
5503 return t;
5504 else
5505 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5508 /* Helper function for or_comparisons_1: try to simplify the OR of the
5509 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5510 If INVERT is true, invert the value of VAR before doing the OR.
5511 Return NULL_EXPR if we can't simplify this to a single expression. */
5513 static tree
5514 or_var_with_comparison (tree var, bool invert,
5515 enum tree_code code2, tree op2a, tree op2b)
5517 tree t;
5518 gimple *stmt = SSA_NAME_DEF_STMT (var);
5520 /* We can only deal with variables whose definitions are assignments. */
5521 if (!is_gimple_assign (stmt))
5522 return NULL_TREE;
5524 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5525 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5526 Then we only have to consider the simpler non-inverted cases. */
5527 if (invert)
5528 t = and_var_with_comparison_1 (stmt,
5529 invert_tree_comparison (code2, false),
5530 op2a, op2b);
5531 else
5532 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5533 return canonicalize_bool (t, invert);
5536 /* Try to simplify the OR of the ssa variable defined by the assignment
5537 STMT with the comparison specified by (OP2A CODE2 OP2B).
5538 Return NULL_EXPR if we can't simplify this to a single expression. */
5540 static tree
5541 or_var_with_comparison_1 (gimple *stmt,
5542 enum tree_code code2, tree op2a, tree op2b)
5544 tree var = gimple_assign_lhs (stmt);
5545 tree true_test_var = NULL_TREE;
5546 tree false_test_var = NULL_TREE;
5547 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5549 /* Check for identities like (var OR (var != 0)) => true . */
5550 if (TREE_CODE (op2a) == SSA_NAME
5551 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5553 if ((code2 == NE_EXPR && integer_zerop (op2b))
5554 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5556 true_test_var = op2a;
5557 if (var == true_test_var)
5558 return var;
5560 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5561 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5563 false_test_var = op2a;
5564 if (var == false_test_var)
5565 return boolean_true_node;
5569 /* If the definition is a comparison, recurse on it. */
5570 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5572 tree t = or_comparisons_1 (innercode,
5573 gimple_assign_rhs1 (stmt),
5574 gimple_assign_rhs2 (stmt),
5575 code2,
5576 op2a,
5577 op2b);
5578 if (t)
5579 return t;
5582 /* If the definition is an AND or OR expression, we may be able to
5583 simplify by reassociating. */
5584 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5585 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5587 tree inner1 = gimple_assign_rhs1 (stmt);
5588 tree inner2 = gimple_assign_rhs2 (stmt);
5589 gimple *s;
5590 tree t;
5591 tree partial = NULL_TREE;
5592 bool is_or = (innercode == BIT_IOR_EXPR);
5594 /* Check for boolean identities that don't require recursive examination
5595 of inner1/inner2:
5596 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5597 inner1 OR (inner1 AND inner2) => inner1
5598 !inner1 OR (inner1 OR inner2) => true
5599 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5601 if (inner1 == true_test_var)
5602 return (is_or ? var : inner1);
5603 else if (inner2 == true_test_var)
5604 return (is_or ? var : inner2);
5605 else if (inner1 == false_test_var)
5606 return (is_or
5607 ? boolean_true_node
5608 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5609 else if (inner2 == false_test_var)
5610 return (is_or
5611 ? boolean_true_node
5612 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5614 /* Next, redistribute/reassociate the OR across the inner tests.
5615 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5616 if (TREE_CODE (inner1) == SSA_NAME
5617 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5618 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5619 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5620 gimple_assign_rhs1 (s),
5621 gimple_assign_rhs2 (s),
5622 code2, op2a, op2b)))
5624 /* Handle the OR case, where we are reassociating:
5625 (inner1 OR inner2) OR (op2a code2 op2b)
5626 => (t OR inner2)
5627 If the partial result t is a constant, we win. Otherwise
5628 continue on to try reassociating with the other inner test. */
5629 if (is_or)
5631 if (integer_onep (t))
5632 return boolean_true_node;
5633 else if (integer_zerop (t))
5634 return inner2;
5637 /* Handle the AND case, where we are redistributing:
5638 (inner1 AND inner2) OR (op2a code2 op2b)
5639 => (t AND (inner2 OR (op2a code op2b))) */
5640 else if (integer_zerop (t))
5641 return boolean_false_node;
5643 /* Save partial result for later. */
5644 partial = t;
5647 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5648 if (TREE_CODE (inner2) == SSA_NAME
5649 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5650 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5651 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5652 gimple_assign_rhs1 (s),
5653 gimple_assign_rhs2 (s),
5654 code2, op2a, op2b)))
5656 /* Handle the OR case, where we are reassociating:
5657 (inner1 OR inner2) OR (op2a code2 op2b)
5658 => (inner1 OR t)
5659 => (t OR partial) */
5660 if (is_or)
5662 if (integer_zerop (t))
5663 return inner1;
5664 else if (integer_onep (t))
5665 return boolean_true_node;
5666 /* If both are the same, we can apply the identity
5667 (x OR x) == x. */
5668 else if (partial && same_bool_result_p (t, partial))
5669 return t;
5672 /* Handle the AND case, where we are redistributing:
5673 (inner1 AND inner2) OR (op2a code2 op2b)
5674 => (t AND (inner1 OR (op2a code2 op2b)))
5675 => (t AND partial) */
5676 else
5678 if (integer_zerop (t))
5679 return boolean_false_node;
5680 else if (partial)
5682 /* We already got a simplification for the other
5683 operand to the redistributed AND expression. The
5684 interesting case is when at least one is true.
5685 Or, if both are the same, we can apply the identity
5686 (x AND x) == x. */
5687 if (integer_onep (partial))
5688 return t;
5689 else if (integer_onep (t))
5690 return partial;
5691 else if (same_bool_result_p (t, partial))
5692 return t;
5697 return NULL_TREE;
5700 /* Try to simplify the OR of two comparisons defined by
5701 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5702 If this can be done without constructing an intermediate value,
5703 return the resulting tree; otherwise NULL_TREE is returned.
5704 This function is deliberately asymmetric as it recurses on SSA_DEFs
5705 in the first comparison but not the second. */
5707 static tree
5708 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5709 enum tree_code code2, tree op2a, tree op2b)
5711 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5713 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5714 if (operand_equal_p (op1a, op2a, 0)
5715 && operand_equal_p (op1b, op2b, 0))
5717 /* Result will be either NULL_TREE, or a combined comparison. */
5718 tree t = combine_comparisons (UNKNOWN_LOCATION,
5719 TRUTH_ORIF_EXPR, code1, code2,
5720 truth_type, op1a, op1b);
5721 if (t)
5722 return t;
5725 /* Likewise the swapped case of the above. */
5726 if (operand_equal_p (op1a, op2b, 0)
5727 && operand_equal_p (op1b, op2a, 0))
5729 /* Result will be either NULL_TREE, or a combined comparison. */
5730 tree t = combine_comparisons (UNKNOWN_LOCATION,
5731 TRUTH_ORIF_EXPR, code1,
5732 swap_tree_comparison (code2),
5733 truth_type, op1a, op1b);
5734 if (t)
5735 return t;
5738 /* If both comparisons are of the same value against constants, we might
5739 be able to merge them. */
5740 if (operand_equal_p (op1a, op2a, 0)
5741 && TREE_CODE (op1b) == INTEGER_CST
5742 && TREE_CODE (op2b) == INTEGER_CST)
5744 int cmp = tree_int_cst_compare (op1b, op2b);
5746 /* If we have (op1a != op1b), we should either be able to
5747 return that or TRUE, depending on whether the constant op1b
5748 also satisfies the other comparison against op2b. */
5749 if (code1 == NE_EXPR)
5751 bool done = true;
5752 bool val;
5753 switch (code2)
5755 case EQ_EXPR: val = (cmp == 0); break;
5756 case NE_EXPR: val = (cmp != 0); break;
5757 case LT_EXPR: val = (cmp < 0); break;
5758 case GT_EXPR: val = (cmp > 0); break;
5759 case LE_EXPR: val = (cmp <= 0); break;
5760 case GE_EXPR: val = (cmp >= 0); break;
5761 default: done = false;
5763 if (done)
5765 if (val)
5766 return boolean_true_node;
5767 else
5768 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5771 /* Likewise if the second comparison is a != comparison. */
5772 else if (code2 == NE_EXPR)
5774 bool done = true;
5775 bool val;
5776 switch (code1)
5778 case EQ_EXPR: val = (cmp == 0); break;
5779 case NE_EXPR: val = (cmp != 0); break;
5780 case LT_EXPR: val = (cmp > 0); break;
5781 case GT_EXPR: val = (cmp < 0); break;
5782 case LE_EXPR: val = (cmp >= 0); break;
5783 case GE_EXPR: val = (cmp <= 0); break;
5784 default: done = false;
5786 if (done)
5788 if (val)
5789 return boolean_true_node;
5790 else
5791 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5795 /* See if an equality test is redundant with the other comparison. */
5796 else if (code1 == EQ_EXPR)
5798 bool val;
5799 switch (code2)
5801 case EQ_EXPR: val = (cmp == 0); break;
5802 case NE_EXPR: val = (cmp != 0); break;
5803 case LT_EXPR: val = (cmp < 0); break;
5804 case GT_EXPR: val = (cmp > 0); break;
5805 case LE_EXPR: val = (cmp <= 0); break;
5806 case GE_EXPR: val = (cmp >= 0); break;
5807 default:
5808 val = false;
5810 if (val)
5811 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5813 else if (code2 == EQ_EXPR)
5815 bool val;
5816 switch (code1)
5818 case EQ_EXPR: val = (cmp == 0); break;
5819 case NE_EXPR: val = (cmp != 0); break;
5820 case LT_EXPR: val = (cmp > 0); break;
5821 case GT_EXPR: val = (cmp < 0); break;
5822 case LE_EXPR: val = (cmp >= 0); break;
5823 case GE_EXPR: val = (cmp <= 0); break;
5824 default:
5825 val = false;
5827 if (val)
5828 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5831 /* Chose the less restrictive of two < or <= comparisons. */
5832 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5833 && (code2 == LT_EXPR || code2 == LE_EXPR))
5835 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5836 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5837 else
5838 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5841 /* Likewise chose the less restrictive of two > or >= comparisons. */
5842 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5843 && (code2 == GT_EXPR || code2 == GE_EXPR))
5845 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5846 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5847 else
5848 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5851 /* Check for singleton ranges. */
5852 else if (cmp == 0
5853 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5854 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5855 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5857 /* Check for less/greater pairs that don't restrict the range at all. */
5858 else if (cmp >= 0
5859 && (code1 == LT_EXPR || code1 == LE_EXPR)
5860 && (code2 == GT_EXPR || code2 == GE_EXPR))
5861 return boolean_true_node;
5862 else if (cmp <= 0
5863 && (code1 == GT_EXPR || code1 == GE_EXPR)
5864 && (code2 == LT_EXPR || code2 == LE_EXPR))
5865 return boolean_true_node;
5868 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5869 NAME's definition is a truth value. See if there are any simplifications
5870 that can be done against the NAME's definition. */
5871 if (TREE_CODE (op1a) == SSA_NAME
5872 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5873 && (integer_zerop (op1b) || integer_onep (op1b)))
5875 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5876 || (code1 == NE_EXPR && integer_onep (op1b)));
5877 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5878 switch (gimple_code (stmt))
5880 case GIMPLE_ASSIGN:
5881 /* Try to simplify by copy-propagating the definition. */
5882 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5884 case GIMPLE_PHI:
5885 /* If every argument to the PHI produces the same result when
5886 ORed with the second comparison, we win.
5887 Do not do this unless the type is bool since we need a bool
5888 result here anyway. */
5889 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5891 tree result = NULL_TREE;
5892 unsigned i;
5893 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5895 tree arg = gimple_phi_arg_def (stmt, i);
5897 /* If this PHI has itself as an argument, ignore it.
5898 If all the other args produce the same result,
5899 we're still OK. */
5900 if (arg == gimple_phi_result (stmt))
5901 continue;
5902 else if (TREE_CODE (arg) == INTEGER_CST)
5904 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5906 if (!result)
5907 result = boolean_true_node;
5908 else if (!integer_onep (result))
5909 return NULL_TREE;
5911 else if (!result)
5912 result = fold_build2 (code2, boolean_type_node,
5913 op2a, op2b);
5914 else if (!same_bool_comparison_p (result,
5915 code2, op2a, op2b))
5916 return NULL_TREE;
5918 else if (TREE_CODE (arg) == SSA_NAME
5919 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5921 tree temp;
5922 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5923 /* In simple cases we can look through PHI nodes,
5924 but we have to be careful with loops.
5925 See PR49073. */
5926 if (! dom_info_available_p (CDI_DOMINATORS)
5927 || gimple_bb (def_stmt) == gimple_bb (stmt)
5928 || dominated_by_p (CDI_DOMINATORS,
5929 gimple_bb (def_stmt),
5930 gimple_bb (stmt)))
5931 return NULL_TREE;
5932 temp = or_var_with_comparison (arg, invert, code2,
5933 op2a, op2b);
5934 if (!temp)
5935 return NULL_TREE;
5936 else if (!result)
5937 result = temp;
5938 else if (!same_bool_result_p (result, temp))
5939 return NULL_TREE;
5941 else
5942 return NULL_TREE;
5944 return result;
5947 default:
5948 break;
5951 return NULL_TREE;
5954 /* Try to simplify the OR of two comparisons, specified by
5955 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5956 If this can be simplified to a single expression (without requiring
5957 introducing more SSA variables to hold intermediate values),
5958 return the resulting tree. Otherwise return NULL_TREE.
5959 If the result expression is non-null, it has boolean type. */
5961 tree
5962 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5963 enum tree_code code2, tree op2a, tree op2b)
5965 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5966 if (t)
5967 return t;
5968 else
5969 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5973 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5975 Either NULL_TREE, a simplified but non-constant or a constant
5976 is returned.
5978 ??? This should go into a gimple-fold-inline.h file to be eventually
5979 privatized with the single valueize function used in the various TUs
5980 to avoid the indirect function call overhead. */
5982 tree
5983 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
5984 tree (*gvalueize) (tree))
5986 code_helper rcode;
5987 tree ops[3] = {};
5988 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5989 edges if there are intermediate VARYING defs. For this reason
5990 do not follow SSA edges here even though SCCVN can technically
5991 just deal fine with that. */
5992 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
5994 tree res = NULL_TREE;
5995 if (gimple_simplified_result_is_gimple_val (rcode, ops))
5996 res = ops[0];
5997 else if (mprts_hook)
5998 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5999 if (res)
6001 if (dump_file && dump_flags & TDF_DETAILS)
6003 fprintf (dump_file, "Match-and-simplified ");
6004 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6005 fprintf (dump_file, " to ");
6006 print_generic_expr (dump_file, res);
6007 fprintf (dump_file, "\n");
6009 return res;
6013 location_t loc = gimple_location (stmt);
6014 switch (gimple_code (stmt))
6016 case GIMPLE_ASSIGN:
6018 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6020 switch (get_gimple_rhs_class (subcode))
6022 case GIMPLE_SINGLE_RHS:
6024 tree rhs = gimple_assign_rhs1 (stmt);
6025 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6027 if (TREE_CODE (rhs) == SSA_NAME)
6029 /* If the RHS is an SSA_NAME, return its known constant value,
6030 if any. */
6031 return (*valueize) (rhs);
6033 /* Handle propagating invariant addresses into address
6034 operations. */
6035 else if (TREE_CODE (rhs) == ADDR_EXPR
6036 && !is_gimple_min_invariant (rhs))
6038 HOST_WIDE_INT offset = 0;
6039 tree base;
6040 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6041 &offset,
6042 valueize);
6043 if (base
6044 && (CONSTANT_CLASS_P (base)
6045 || decl_address_invariant_p (base)))
6046 return build_invariant_address (TREE_TYPE (rhs),
6047 base, offset);
6049 else if (TREE_CODE (rhs) == CONSTRUCTOR
6050 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6051 && (CONSTRUCTOR_NELTS (rhs)
6052 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6054 unsigned i, nelts;
6055 tree val;
6057 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs));
6058 auto_vec<tree, 32> vec (nelts);
6059 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6061 val = (*valueize) (val);
6062 if (TREE_CODE (val) == INTEGER_CST
6063 || TREE_CODE (val) == REAL_CST
6064 || TREE_CODE (val) == FIXED_CST)
6065 vec.quick_push (val);
6066 else
6067 return NULL_TREE;
6070 return build_vector (TREE_TYPE (rhs), vec);
6072 if (subcode == OBJ_TYPE_REF)
6074 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6075 /* If callee is constant, we can fold away the wrapper. */
6076 if (is_gimple_min_invariant (val))
6077 return val;
6080 if (kind == tcc_reference)
6082 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6083 || TREE_CODE (rhs) == REALPART_EXPR
6084 || TREE_CODE (rhs) == IMAGPART_EXPR)
6085 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6087 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6088 return fold_unary_loc (EXPR_LOCATION (rhs),
6089 TREE_CODE (rhs),
6090 TREE_TYPE (rhs), val);
6092 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6093 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6095 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6096 return fold_ternary_loc (EXPR_LOCATION (rhs),
6097 TREE_CODE (rhs),
6098 TREE_TYPE (rhs), val,
6099 TREE_OPERAND (rhs, 1),
6100 TREE_OPERAND (rhs, 2));
6102 else if (TREE_CODE (rhs) == MEM_REF
6103 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6105 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6106 if (TREE_CODE (val) == ADDR_EXPR
6107 && is_gimple_min_invariant (val))
6109 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6110 unshare_expr (val),
6111 TREE_OPERAND (rhs, 1));
6112 if (tem)
6113 rhs = tem;
6116 return fold_const_aggregate_ref_1 (rhs, valueize);
6118 else if (kind == tcc_declaration)
6119 return get_symbol_constant_value (rhs);
6120 return rhs;
6123 case GIMPLE_UNARY_RHS:
6124 return NULL_TREE;
6126 case GIMPLE_BINARY_RHS:
6127 /* Translate &x + CST into an invariant form suitable for
6128 further propagation. */
6129 if (subcode == POINTER_PLUS_EXPR)
6131 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6132 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6133 if (TREE_CODE (op0) == ADDR_EXPR
6134 && TREE_CODE (op1) == INTEGER_CST)
6136 tree off = fold_convert (ptr_type_node, op1);
6137 return build_fold_addr_expr_loc
6138 (loc,
6139 fold_build2 (MEM_REF,
6140 TREE_TYPE (TREE_TYPE (op0)),
6141 unshare_expr (op0), off));
6144 /* Canonicalize bool != 0 and bool == 0 appearing after
6145 valueization. While gimple_simplify handles this
6146 it can get confused by the ~X == 1 -> X == 0 transform
6147 which we cant reduce to a SSA name or a constant
6148 (and we have no way to tell gimple_simplify to not
6149 consider those transforms in the first place). */
6150 else if (subcode == EQ_EXPR
6151 || subcode == NE_EXPR)
6153 tree lhs = gimple_assign_lhs (stmt);
6154 tree op0 = gimple_assign_rhs1 (stmt);
6155 if (useless_type_conversion_p (TREE_TYPE (lhs),
6156 TREE_TYPE (op0)))
6158 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6159 op0 = (*valueize) (op0);
6160 if (TREE_CODE (op0) == INTEGER_CST)
6161 std::swap (op0, op1);
6162 if (TREE_CODE (op1) == INTEGER_CST
6163 && ((subcode == NE_EXPR && integer_zerop (op1))
6164 || (subcode == EQ_EXPR && integer_onep (op1))))
6165 return op0;
6168 return NULL_TREE;
6170 case GIMPLE_TERNARY_RHS:
6172 /* Handle ternary operators that can appear in GIMPLE form. */
6173 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6174 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6175 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6176 return fold_ternary_loc (loc, subcode,
6177 gimple_expr_type (stmt), op0, op1, op2);
6180 default:
6181 gcc_unreachable ();
6185 case GIMPLE_CALL:
6187 tree fn;
6188 gcall *call_stmt = as_a <gcall *> (stmt);
6190 if (gimple_call_internal_p (stmt))
6192 enum tree_code subcode = ERROR_MARK;
6193 switch (gimple_call_internal_fn (stmt))
6195 case IFN_UBSAN_CHECK_ADD:
6196 subcode = PLUS_EXPR;
6197 break;
6198 case IFN_UBSAN_CHECK_SUB:
6199 subcode = MINUS_EXPR;
6200 break;
6201 case IFN_UBSAN_CHECK_MUL:
6202 subcode = MULT_EXPR;
6203 break;
6204 case IFN_BUILTIN_EXPECT:
6206 tree arg0 = gimple_call_arg (stmt, 0);
6207 tree op0 = (*valueize) (arg0);
6208 if (TREE_CODE (op0) == INTEGER_CST)
6209 return op0;
6210 return NULL_TREE;
6212 default:
6213 return NULL_TREE;
6215 tree arg0 = gimple_call_arg (stmt, 0);
6216 tree arg1 = gimple_call_arg (stmt, 1);
6217 tree op0 = (*valueize) (arg0);
6218 tree op1 = (*valueize) (arg1);
6220 if (TREE_CODE (op0) != INTEGER_CST
6221 || TREE_CODE (op1) != INTEGER_CST)
6223 switch (subcode)
6225 case MULT_EXPR:
6226 /* x * 0 = 0 * x = 0 without overflow. */
6227 if (integer_zerop (op0) || integer_zerop (op1))
6228 return build_zero_cst (TREE_TYPE (arg0));
6229 break;
6230 case MINUS_EXPR:
6231 /* y - y = 0 without overflow. */
6232 if (operand_equal_p (op0, op1, 0))
6233 return build_zero_cst (TREE_TYPE (arg0));
6234 break;
6235 default:
6236 break;
6239 tree res
6240 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6241 if (res
6242 && TREE_CODE (res) == INTEGER_CST
6243 && !TREE_OVERFLOW (res))
6244 return res;
6245 return NULL_TREE;
6248 fn = (*valueize) (gimple_call_fn (stmt));
6249 if (TREE_CODE (fn) == ADDR_EXPR
6250 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6251 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6252 && gimple_builtin_call_types_compatible_p (stmt,
6253 TREE_OPERAND (fn, 0)))
6255 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6256 tree retval;
6257 unsigned i;
6258 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6259 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6260 retval = fold_builtin_call_array (loc,
6261 gimple_call_return_type (call_stmt),
6262 fn, gimple_call_num_args (stmt), args);
6263 if (retval)
6265 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6266 STRIP_NOPS (retval);
6267 retval = fold_convert (gimple_call_return_type (call_stmt),
6268 retval);
6270 return retval;
6272 return NULL_TREE;
6275 default:
6276 return NULL_TREE;
6280 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6281 Returns NULL_TREE if folding to a constant is not possible, otherwise
6282 returns a constant according to is_gimple_min_invariant. */
6284 tree
6285 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6287 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6288 if (res && is_gimple_min_invariant (res))
6289 return res;
6290 return NULL_TREE;
6294 /* The following set of functions are supposed to fold references using
6295 their constant initializers. */
6297 /* See if we can find constructor defining value of BASE.
6298 When we know the consructor with constant offset (such as
6299 base is array[40] and we do know constructor of array), then
6300 BIT_OFFSET is adjusted accordingly.
6302 As a special case, return error_mark_node when constructor
6303 is not explicitly available, but it is known to be zero
6304 such as 'static const int a;'. */
6305 static tree
6306 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
6307 tree (*valueize)(tree))
6309 HOST_WIDE_INT bit_offset2, size, max_size;
6310 bool reverse;
6312 if (TREE_CODE (base) == MEM_REF)
6314 if (!integer_zerop (TREE_OPERAND (base, 1)))
6316 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
6317 return NULL_TREE;
6318 *bit_offset += (mem_ref_offset (base).to_short_addr ()
6319 * BITS_PER_UNIT);
6322 if (valueize
6323 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6324 base = valueize (TREE_OPERAND (base, 0));
6325 if (!base || TREE_CODE (base) != ADDR_EXPR)
6326 return NULL_TREE;
6327 base = TREE_OPERAND (base, 0);
6329 else if (valueize
6330 && TREE_CODE (base) == SSA_NAME)
6331 base = valueize (base);
6333 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6334 DECL_INITIAL. If BASE is a nested reference into another
6335 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6336 the inner reference. */
6337 switch (TREE_CODE (base))
6339 case VAR_DECL:
6340 case CONST_DECL:
6342 tree init = ctor_for_folding (base);
6344 /* Our semantic is exact opposite of ctor_for_folding;
6345 NULL means unknown, while error_mark_node is 0. */
6346 if (init == error_mark_node)
6347 return NULL_TREE;
6348 if (!init)
6349 return error_mark_node;
6350 return init;
6353 case VIEW_CONVERT_EXPR:
6354 return get_base_constructor (TREE_OPERAND (base, 0),
6355 bit_offset, valueize);
6357 case ARRAY_REF:
6358 case COMPONENT_REF:
6359 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6360 &reverse);
6361 if (max_size == -1 || size != max_size)
6362 return NULL_TREE;
6363 *bit_offset += bit_offset2;
6364 return get_base_constructor (base, bit_offset, valueize);
6366 case CONSTRUCTOR:
6367 return base;
6369 default:
6370 if (CONSTANT_CLASS_P (base))
6371 return base;
6373 return NULL_TREE;
6377 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6378 SIZE to the memory at bit OFFSET. */
6380 static tree
6381 fold_array_ctor_reference (tree type, tree ctor,
6382 unsigned HOST_WIDE_INT offset,
6383 unsigned HOST_WIDE_INT size,
6384 tree from_decl)
6386 offset_int low_bound;
6387 offset_int elt_size;
6388 offset_int access_index;
6389 tree domain_type = NULL_TREE;
6390 HOST_WIDE_INT inner_offset;
6392 /* Compute low bound and elt size. */
6393 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6394 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6395 if (domain_type && TYPE_MIN_VALUE (domain_type))
6397 /* Static constructors for variably sized objects makes no sense. */
6398 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6399 return NULL_TREE;
6400 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6402 else
6403 low_bound = 0;
6404 /* Static constructors for variably sized objects makes no sense. */
6405 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6406 return NULL_TREE;
6407 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6409 /* We can handle only constantly sized accesses that are known to not
6410 be larger than size of array element. */
6411 if (!TYPE_SIZE_UNIT (type)
6412 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6413 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6414 || elt_size == 0)
6415 return NULL_TREE;
6417 /* Compute the array index we look for. */
6418 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6419 elt_size);
6420 access_index += low_bound;
6422 /* And offset within the access. */
6423 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6425 /* See if the array field is large enough to span whole access. We do not
6426 care to fold accesses spanning multiple array indexes. */
6427 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6428 return NULL_TREE;
6429 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6430 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
6432 /* When memory is not explicitely mentioned in constructor,
6433 it is 0 (or out of range). */
6434 return build_zero_cst (type);
6437 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6438 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6440 static tree
6441 fold_nonarray_ctor_reference (tree type, tree ctor,
6442 unsigned HOST_WIDE_INT offset,
6443 unsigned HOST_WIDE_INT size,
6444 tree from_decl)
6446 unsigned HOST_WIDE_INT cnt;
6447 tree cfield, cval;
6449 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6450 cval)
6452 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6453 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6454 tree field_size = DECL_SIZE (cfield);
6455 offset_int bitoffset;
6456 offset_int bitoffset_end, access_end;
6458 /* Variable sized objects in static constructors makes no sense,
6459 but field_size can be NULL for flexible array members. */
6460 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6461 && TREE_CODE (byte_offset) == INTEGER_CST
6462 && (field_size != NULL_TREE
6463 ? TREE_CODE (field_size) == INTEGER_CST
6464 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6466 /* Compute bit offset of the field. */
6467 bitoffset = (wi::to_offset (field_offset)
6468 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6469 /* Compute bit offset where the field ends. */
6470 if (field_size != NULL_TREE)
6471 bitoffset_end = bitoffset + wi::to_offset (field_size);
6472 else
6473 bitoffset_end = 0;
6475 access_end = offset_int (offset) + size;
6477 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6478 [BITOFFSET, BITOFFSET_END)? */
6479 if (wi::cmps (access_end, bitoffset) > 0
6480 && (field_size == NULL_TREE
6481 || wi::lts_p (offset, bitoffset_end)))
6483 offset_int inner_offset = offset_int (offset) - bitoffset;
6484 /* We do have overlap. Now see if field is large enough to
6485 cover the access. Give up for accesses spanning multiple
6486 fields. */
6487 if (wi::cmps (access_end, bitoffset_end) > 0)
6488 return NULL_TREE;
6489 if (offset < bitoffset)
6490 return NULL_TREE;
6491 return fold_ctor_reference (type, cval,
6492 inner_offset.to_uhwi (), size,
6493 from_decl);
6496 /* When memory is not explicitely mentioned in constructor, it is 0. */
6497 return build_zero_cst (type);
6500 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
6501 to the memory at bit OFFSET. */
6503 tree
6504 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
6505 unsigned HOST_WIDE_INT size, tree from_decl)
6507 tree ret;
6509 /* We found the field with exact match. */
6510 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6511 && !offset)
6512 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6514 /* We are at the end of walk, see if we can view convert the
6515 result. */
6516 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6517 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6518 && !compare_tree_int (TYPE_SIZE (type), size)
6519 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6521 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6522 if (ret)
6524 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6525 if (ret)
6526 STRIP_USELESS_TYPE_CONVERSION (ret);
6528 return ret;
6530 /* For constants and byte-aligned/sized reads try to go through
6531 native_encode/interpret. */
6532 if (CONSTANT_CLASS_P (ctor)
6533 && BITS_PER_UNIT == 8
6534 && offset % BITS_PER_UNIT == 0
6535 && size % BITS_PER_UNIT == 0
6536 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6538 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6539 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6540 offset / BITS_PER_UNIT);
6541 if (len > 0)
6542 return native_interpret_expr (type, buf, len);
6544 if (TREE_CODE (ctor) == CONSTRUCTOR)
6547 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6548 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6549 return fold_array_ctor_reference (type, ctor, offset, size,
6550 from_decl);
6551 else
6552 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6553 from_decl);
6556 return NULL_TREE;
6559 /* Return the tree representing the element referenced by T if T is an
6560 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6561 names using VALUEIZE. Return NULL_TREE otherwise. */
6563 tree
6564 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6566 tree ctor, idx, base;
6567 HOST_WIDE_INT offset, size, max_size;
6568 tree tem;
6569 bool reverse;
6571 if (TREE_THIS_VOLATILE (t))
6572 return NULL_TREE;
6574 if (DECL_P (t))
6575 return get_symbol_constant_value (t);
6577 tem = fold_read_from_constant_string (t);
6578 if (tem)
6579 return tem;
6581 switch (TREE_CODE (t))
6583 case ARRAY_REF:
6584 case ARRAY_RANGE_REF:
6585 /* Constant indexes are handled well by get_base_constructor.
6586 Only special case variable offsets.
6587 FIXME: This code can't handle nested references with variable indexes
6588 (they will be handled only by iteration of ccp). Perhaps we can bring
6589 get_ref_base_and_extent here and make it use a valueize callback. */
6590 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6591 && valueize
6592 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6593 && TREE_CODE (idx) == INTEGER_CST)
6595 tree low_bound, unit_size;
6597 /* If the resulting bit-offset is constant, track it. */
6598 if ((low_bound = array_ref_low_bound (t),
6599 TREE_CODE (low_bound) == INTEGER_CST)
6600 && (unit_size = array_ref_element_size (t),
6601 tree_fits_uhwi_p (unit_size)))
6603 offset_int woffset
6604 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
6605 TYPE_PRECISION (TREE_TYPE (idx)));
6607 if (wi::fits_shwi_p (woffset))
6609 offset = woffset.to_shwi ();
6610 /* TODO: This code seems wrong, multiply then check
6611 to see if it fits. */
6612 offset *= tree_to_uhwi (unit_size);
6613 offset *= BITS_PER_UNIT;
6615 base = TREE_OPERAND (t, 0);
6616 ctor = get_base_constructor (base, &offset, valueize);
6617 /* Empty constructor. Always fold to 0. */
6618 if (ctor == error_mark_node)
6619 return build_zero_cst (TREE_TYPE (t));
6620 /* Out of bound array access. Value is undefined,
6621 but don't fold. */
6622 if (offset < 0)
6623 return NULL_TREE;
6624 /* We can not determine ctor. */
6625 if (!ctor)
6626 return NULL_TREE;
6627 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6628 tree_to_uhwi (unit_size)
6629 * BITS_PER_UNIT,
6630 base);
6634 /* Fallthru. */
6636 case COMPONENT_REF:
6637 case BIT_FIELD_REF:
6638 case TARGET_MEM_REF:
6639 case MEM_REF:
6640 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6641 ctor = get_base_constructor (base, &offset, valueize);
6643 /* Empty constructor. Always fold to 0. */
6644 if (ctor == error_mark_node)
6645 return build_zero_cst (TREE_TYPE (t));
6646 /* We do not know precise address. */
6647 if (max_size == -1 || max_size != size)
6648 return NULL_TREE;
6649 /* We can not determine ctor. */
6650 if (!ctor)
6651 return NULL_TREE;
6653 /* Out of bound array access. Value is undefined, but don't fold. */
6654 if (offset < 0)
6655 return NULL_TREE;
6657 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6658 base);
6660 case REALPART_EXPR:
6661 case IMAGPART_EXPR:
6663 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6664 if (c && TREE_CODE (c) == COMPLEX_CST)
6665 return fold_build1_loc (EXPR_LOCATION (t),
6666 TREE_CODE (t), TREE_TYPE (t), c);
6667 break;
6670 default:
6671 break;
6674 return NULL_TREE;
6677 tree
6678 fold_const_aggregate_ref (tree t)
6680 return fold_const_aggregate_ref_1 (t, NULL);
6683 /* Lookup virtual method with index TOKEN in a virtual table V
6684 at OFFSET.
6685 Set CAN_REFER if non-NULL to false if method
6686 is not referable or if the virtual table is ill-formed (such as rewriten
6687 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6689 tree
6690 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6691 tree v,
6692 unsigned HOST_WIDE_INT offset,
6693 bool *can_refer)
6695 tree vtable = v, init, fn;
6696 unsigned HOST_WIDE_INT size;
6697 unsigned HOST_WIDE_INT elt_size, access_index;
6698 tree domain_type;
6700 if (can_refer)
6701 *can_refer = true;
6703 /* First of all double check we have virtual table. */
6704 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6706 /* Pass down that we lost track of the target. */
6707 if (can_refer)
6708 *can_refer = false;
6709 return NULL_TREE;
6712 init = ctor_for_folding (v);
6714 /* The virtual tables should always be born with constructors
6715 and we always should assume that they are avaialble for
6716 folding. At the moment we do not stream them in all cases,
6717 but it should never happen that ctor seem unreachable. */
6718 gcc_assert (init);
6719 if (init == error_mark_node)
6721 /* Pass down that we lost track of the target. */
6722 if (can_refer)
6723 *can_refer = false;
6724 return NULL_TREE;
6726 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6727 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6728 offset *= BITS_PER_UNIT;
6729 offset += token * size;
6731 /* Lookup the value in the constructor that is assumed to be array.
6732 This is equivalent to
6733 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6734 offset, size, NULL);
6735 but in a constant time. We expect that frontend produced a simple
6736 array without indexed initializers. */
6738 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6739 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6740 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6741 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6743 access_index = offset / BITS_PER_UNIT / elt_size;
6744 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6746 /* This code makes an assumption that there are no
6747 indexed fileds produced by C++ FE, so we can directly index the array. */
6748 if (access_index < CONSTRUCTOR_NELTS (init))
6750 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6751 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6752 STRIP_NOPS (fn);
6754 else
6755 fn = NULL;
6757 /* For type inconsistent program we may end up looking up virtual method
6758 in virtual table that does not contain TOKEN entries. We may overrun
6759 the virtual table and pick up a constant or RTTI info pointer.
6760 In any case the call is undefined. */
6761 if (!fn
6762 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6763 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6764 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6765 else
6767 fn = TREE_OPERAND (fn, 0);
6769 /* When cgraph node is missing and function is not public, we cannot
6770 devirtualize. This can happen in WHOPR when the actual method
6771 ends up in other partition, because we found devirtualization
6772 possibility too late. */
6773 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6775 if (can_refer)
6777 *can_refer = false;
6778 return fn;
6780 return NULL_TREE;
6784 /* Make sure we create a cgraph node for functions we'll reference.
6785 They can be non-existent if the reference comes from an entry
6786 of an external vtable for example. */
6787 cgraph_node::get_create (fn);
6789 return fn;
6792 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6793 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6794 KNOWN_BINFO carries the binfo describing the true type of
6795 OBJ_TYPE_REF_OBJECT(REF).
6796 Set CAN_REFER if non-NULL to false if method
6797 is not referable or if the virtual table is ill-formed (such as rewriten
6798 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6800 tree
6801 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6802 bool *can_refer)
6804 unsigned HOST_WIDE_INT offset;
6805 tree v;
6807 v = BINFO_VTABLE (known_binfo);
6808 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6809 if (!v)
6810 return NULL_TREE;
6812 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6814 if (can_refer)
6815 *can_refer = false;
6816 return NULL_TREE;
6818 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6821 /* Given a pointer value T, return a simplified version of an
6822 indirection through T, or NULL_TREE if no simplification is
6823 possible. Note that the resulting type may be different from
6824 the type pointed to in the sense that it is still compatible
6825 from the langhooks point of view. */
6827 tree
6828 gimple_fold_indirect_ref (tree t)
6830 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6831 tree sub = t;
6832 tree subtype;
6834 STRIP_NOPS (sub);
6835 subtype = TREE_TYPE (sub);
6836 if (!POINTER_TYPE_P (subtype)
6837 || TYPE_REF_CAN_ALIAS_ALL (ptype))
6838 return NULL_TREE;
6840 if (TREE_CODE (sub) == ADDR_EXPR)
6842 tree op = TREE_OPERAND (sub, 0);
6843 tree optype = TREE_TYPE (op);
6844 /* *&p => p */
6845 if (useless_type_conversion_p (type, optype))
6846 return op;
6848 /* *(foo *)&fooarray => fooarray[0] */
6849 if (TREE_CODE (optype) == ARRAY_TYPE
6850 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6851 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6853 tree type_domain = TYPE_DOMAIN (optype);
6854 tree min_val = size_zero_node;
6855 if (type_domain && TYPE_MIN_VALUE (type_domain))
6856 min_val = TYPE_MIN_VALUE (type_domain);
6857 if (TREE_CODE (min_val) == INTEGER_CST)
6858 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6860 /* *(foo *)&complexfoo => __real__ complexfoo */
6861 else if (TREE_CODE (optype) == COMPLEX_TYPE
6862 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6863 return fold_build1 (REALPART_EXPR, type, op);
6864 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6865 else if (TREE_CODE (optype) == VECTOR_TYPE
6866 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6868 tree part_width = TYPE_SIZE (type);
6869 tree index = bitsize_int (0);
6870 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6874 /* *(p + CST) -> ... */
6875 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6876 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6878 tree addr = TREE_OPERAND (sub, 0);
6879 tree off = TREE_OPERAND (sub, 1);
6880 tree addrtype;
6882 STRIP_NOPS (addr);
6883 addrtype = TREE_TYPE (addr);
6885 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6886 if (TREE_CODE (addr) == ADDR_EXPR
6887 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6888 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
6889 && tree_fits_uhwi_p (off))
6891 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
6892 tree part_width = TYPE_SIZE (type);
6893 unsigned HOST_WIDE_INT part_widthi
6894 = tree_to_shwi (part_width) / BITS_PER_UNIT;
6895 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6896 tree index = bitsize_int (indexi);
6897 if (offset / part_widthi
6898 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
6899 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6900 part_width, index);
6903 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6904 if (TREE_CODE (addr) == ADDR_EXPR
6905 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6906 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6908 tree size = TYPE_SIZE_UNIT (type);
6909 if (tree_int_cst_equal (size, off))
6910 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6913 /* *(p + CST) -> MEM_REF <p, CST>. */
6914 if (TREE_CODE (addr) != ADDR_EXPR
6915 || DECL_P (TREE_OPERAND (addr, 0)))
6916 return fold_build2 (MEM_REF, type,
6917 addr,
6918 wide_int_to_tree (ptype, wi::to_wide (off)));
6921 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6922 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6923 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6924 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6926 tree type_domain;
6927 tree min_val = size_zero_node;
6928 tree osub = sub;
6929 sub = gimple_fold_indirect_ref (sub);
6930 if (! sub)
6931 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6932 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6933 if (type_domain && TYPE_MIN_VALUE (type_domain))
6934 min_val = TYPE_MIN_VALUE (type_domain);
6935 if (TREE_CODE (min_val) == INTEGER_CST)
6936 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6939 return NULL_TREE;
6942 /* Return true if CODE is an operation that when operating on signed
6943 integer types involves undefined behavior on overflow and the
6944 operation can be expressed with unsigned arithmetic. */
6946 bool
6947 arith_code_with_undefined_signed_overflow (tree_code code)
6949 switch (code)
6951 case PLUS_EXPR:
6952 case MINUS_EXPR:
6953 case MULT_EXPR:
6954 case NEGATE_EXPR:
6955 case POINTER_PLUS_EXPR:
6956 return true;
6957 default:
6958 return false;
6962 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6963 operation that can be transformed to unsigned arithmetic by converting
6964 its operand, carrying out the operation in the corresponding unsigned
6965 type and converting the result back to the original type.
6967 Returns a sequence of statements that replace STMT and also contain
6968 a modified form of STMT itself. */
6970 gimple_seq
6971 rewrite_to_defined_overflow (gimple *stmt)
6973 if (dump_file && (dump_flags & TDF_DETAILS))
6975 fprintf (dump_file, "rewriting stmt with undefined signed "
6976 "overflow ");
6977 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6980 tree lhs = gimple_assign_lhs (stmt);
6981 tree type = unsigned_type_for (TREE_TYPE (lhs));
6982 gimple_seq stmts = NULL;
6983 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6985 tree op = gimple_op (stmt, i);
6986 op = gimple_convert (&stmts, type, op);
6987 gimple_set_op (stmt, i, op);
6989 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6990 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6991 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6992 gimple_seq_add_stmt (&stmts, stmt);
6993 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6994 gimple_seq_add_stmt (&stmts, cvt);
6996 return stmts;
7000 /* The valueization hook we use for the gimple_build API simplification.
7001 This makes us match fold_buildN behavior by only combining with
7002 statements in the sequence(s) we are currently building. */
7004 static tree
7005 gimple_build_valueize (tree op)
7007 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7008 return op;
7009 return NULL_TREE;
7012 /* Build the expression CODE OP0 of type TYPE with location LOC,
7013 simplifying it first if possible. Returns the built
7014 expression value and appends statements possibly defining it
7015 to SEQ. */
7017 tree
7018 gimple_build (gimple_seq *seq, location_t loc,
7019 enum tree_code code, tree type, tree op0)
7021 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7022 if (!res)
7024 res = create_tmp_reg_or_ssa_name (type);
7025 gimple *stmt;
7026 if (code == REALPART_EXPR
7027 || code == IMAGPART_EXPR
7028 || code == VIEW_CONVERT_EXPR)
7029 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7030 else
7031 stmt = gimple_build_assign (res, code, op0);
7032 gimple_set_location (stmt, loc);
7033 gimple_seq_add_stmt_without_update (seq, stmt);
7035 return res;
7038 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7039 simplifying it first if possible. Returns the built
7040 expression value and appends statements possibly defining it
7041 to SEQ. */
7043 tree
7044 gimple_build (gimple_seq *seq, location_t loc,
7045 enum tree_code code, tree type, tree op0, tree op1)
7047 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7048 if (!res)
7050 res = create_tmp_reg_or_ssa_name (type);
7051 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7052 gimple_set_location (stmt, loc);
7053 gimple_seq_add_stmt_without_update (seq, stmt);
7055 return res;
7058 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7059 simplifying it first if possible. Returns the built
7060 expression value and appends statements possibly defining it
7061 to SEQ. */
7063 tree
7064 gimple_build (gimple_seq *seq, location_t loc,
7065 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7067 tree res = gimple_simplify (code, type, op0, op1, op2,
7068 seq, gimple_build_valueize);
7069 if (!res)
7071 res = create_tmp_reg_or_ssa_name (type);
7072 gimple *stmt;
7073 if (code == BIT_FIELD_REF)
7074 stmt = gimple_build_assign (res, code,
7075 build3 (code, type, op0, op1, op2));
7076 else
7077 stmt = gimple_build_assign (res, code, op0, op1, op2);
7078 gimple_set_location (stmt, loc);
7079 gimple_seq_add_stmt_without_update (seq, stmt);
7081 return res;
7084 /* Build the call FN (ARG0) with a result of type TYPE
7085 (or no result if TYPE is void) with location LOC,
7086 simplifying it first if possible. Returns the built
7087 expression value (or NULL_TREE if TYPE is void) and appends
7088 statements possibly defining it to SEQ. */
7090 tree
7091 gimple_build (gimple_seq *seq, location_t loc,
7092 enum built_in_function fn, tree type, tree arg0)
7094 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7095 if (!res)
7097 tree decl = builtin_decl_implicit (fn);
7098 gimple *stmt = gimple_build_call (decl, 1, arg0);
7099 if (!VOID_TYPE_P (type))
7101 res = create_tmp_reg_or_ssa_name (type);
7102 gimple_call_set_lhs (stmt, res);
7104 gimple_set_location (stmt, loc);
7105 gimple_seq_add_stmt_without_update (seq, stmt);
7107 return res;
7110 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7111 (or no result if TYPE is void) with location LOC,
7112 simplifying it first if possible. Returns the built
7113 expression value (or NULL_TREE if TYPE is void) and appends
7114 statements possibly defining it to SEQ. */
7116 tree
7117 gimple_build (gimple_seq *seq, location_t loc,
7118 enum built_in_function fn, tree type, tree arg0, tree arg1)
7120 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7121 if (!res)
7123 tree decl = builtin_decl_implicit (fn);
7124 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
7125 if (!VOID_TYPE_P (type))
7127 res = create_tmp_reg_or_ssa_name (type);
7128 gimple_call_set_lhs (stmt, res);
7130 gimple_set_location (stmt, loc);
7131 gimple_seq_add_stmt_without_update (seq, stmt);
7133 return res;
7136 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7137 (or no result if TYPE is void) with location LOC,
7138 simplifying it first if possible. Returns the built
7139 expression value (or NULL_TREE if TYPE is void) and appends
7140 statements possibly defining it to SEQ. */
7142 tree
7143 gimple_build (gimple_seq *seq, location_t loc,
7144 enum built_in_function fn, tree type,
7145 tree arg0, tree arg1, tree arg2)
7147 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7148 seq, gimple_build_valueize);
7149 if (!res)
7151 tree decl = builtin_decl_implicit (fn);
7152 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7153 if (!VOID_TYPE_P (type))
7155 res = create_tmp_reg_or_ssa_name (type);
7156 gimple_call_set_lhs (stmt, res);
7158 gimple_set_location (stmt, loc);
7159 gimple_seq_add_stmt_without_update (seq, stmt);
7161 return res;
7164 /* Build the conversion (TYPE) OP with a result of type TYPE
7165 with location LOC if such conversion is neccesary in GIMPLE,
7166 simplifying it first.
7167 Returns the built expression value and appends
7168 statements possibly defining it to SEQ. */
7170 tree
7171 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7173 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7174 return op;
7175 return gimple_build (seq, loc, NOP_EXPR, type, op);
7178 /* Build the conversion (ptrofftype) OP with a result of a type
7179 compatible with ptrofftype with location LOC if such conversion
7180 is neccesary in GIMPLE, simplifying it first.
7181 Returns the built expression value and appends
7182 statements possibly defining it to SEQ. */
7184 tree
7185 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7187 if (ptrofftype_p (TREE_TYPE (op)))
7188 return op;
7189 return gimple_convert (seq, loc, sizetype, op);
7192 /* Build a vector of type TYPE in which each element has the value OP.
7193 Return a gimple value for the result, appending any new statements
7194 to SEQ. */
7196 tree
7197 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7198 tree op)
7200 tree res, vec = build_vector_from_val (type, op);
7201 if (is_gimple_val (vec))
7202 return vec;
7203 if (gimple_in_ssa_p (cfun))
7204 res = make_ssa_name (type);
7205 else
7206 res = create_tmp_reg (type);
7207 gimple *stmt = gimple_build_assign (res, vec);
7208 gimple_set_location (stmt, loc);
7209 gimple_seq_add_stmt_without_update (seq, stmt);
7210 return res;
7213 /* Build a vector of type TYPE in which the elements have the values
7214 given by ELTS. Return a gimple value for the result, appending any
7215 new instructions to SEQ. */
7217 tree
7218 gimple_build_vector (gimple_seq *seq, location_t loc, tree type,
7219 vec<tree> elts)
7221 unsigned int nelts = elts.length ();
7222 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
7223 for (unsigned int i = 0; i < nelts; ++i)
7224 if (!TREE_CONSTANT (elts[i]))
7226 vec<constructor_elt, va_gc> *v;
7227 vec_alloc (v, nelts);
7228 for (i = 0; i < nelts; ++i)
7229 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[i]);
7231 tree res;
7232 if (gimple_in_ssa_p (cfun))
7233 res = make_ssa_name (type);
7234 else
7235 res = create_tmp_reg (type);
7236 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7237 gimple_set_location (stmt, loc);
7238 gimple_seq_add_stmt_without_update (seq, stmt);
7239 return res;
7241 return build_vector (type, elts);
7244 /* Return true if the result of assignment STMT is known to be non-negative.
7245 If the return value is based on the assumption that signed overflow is
7246 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7247 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7249 static bool
7250 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7251 int depth)
7253 enum tree_code code = gimple_assign_rhs_code (stmt);
7254 switch (get_gimple_rhs_class (code))
7256 case GIMPLE_UNARY_RHS:
7257 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7258 gimple_expr_type (stmt),
7259 gimple_assign_rhs1 (stmt),
7260 strict_overflow_p, depth);
7261 case GIMPLE_BINARY_RHS:
7262 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7263 gimple_expr_type (stmt),
7264 gimple_assign_rhs1 (stmt),
7265 gimple_assign_rhs2 (stmt),
7266 strict_overflow_p, depth);
7267 case GIMPLE_TERNARY_RHS:
7268 return false;
7269 case GIMPLE_SINGLE_RHS:
7270 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7271 strict_overflow_p, depth);
7272 case GIMPLE_INVALID_RHS:
7273 break;
7275 gcc_unreachable ();
7278 /* Return true if return value of call STMT is known to be non-negative.
7279 If the return value is based on the assumption that signed overflow is
7280 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7281 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7283 static bool
7284 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7285 int depth)
7287 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7288 gimple_call_arg (stmt, 0) : NULL_TREE;
7289 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7290 gimple_call_arg (stmt, 1) : NULL_TREE;
7292 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7293 gimple_call_combined_fn (stmt),
7294 arg0,
7295 arg1,
7296 strict_overflow_p, depth);
7299 /* Return true if return value of call STMT is known to be non-negative.
7300 If the return value is based on the assumption that signed overflow is
7301 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7302 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7304 static bool
7305 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7306 int depth)
7308 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7310 tree arg = gimple_phi_arg_def (stmt, i);
7311 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7312 return false;
7314 return true;
7317 /* Return true if STMT is known to compute a non-negative value.
7318 If the return value is based on the assumption that signed overflow is
7319 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7320 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7322 bool
7323 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7324 int depth)
7326 switch (gimple_code (stmt))
7328 case GIMPLE_ASSIGN:
7329 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7330 depth);
7331 case GIMPLE_CALL:
7332 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7333 depth);
7334 case GIMPLE_PHI:
7335 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7336 depth);
7337 default:
7338 return false;
7342 /* Return true if the floating-point value computed by assignment STMT
7343 is known to have an integer value. We also allow +Inf, -Inf and NaN
7344 to be considered integer values. Return false for signaling NaN.
7346 DEPTH is the current nesting depth of the query. */
7348 static bool
7349 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7351 enum tree_code code = gimple_assign_rhs_code (stmt);
7352 switch (get_gimple_rhs_class (code))
7354 case GIMPLE_UNARY_RHS:
7355 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7356 gimple_assign_rhs1 (stmt), depth);
7357 case GIMPLE_BINARY_RHS:
7358 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7359 gimple_assign_rhs1 (stmt),
7360 gimple_assign_rhs2 (stmt), depth);
7361 case GIMPLE_TERNARY_RHS:
7362 return false;
7363 case GIMPLE_SINGLE_RHS:
7364 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7365 case GIMPLE_INVALID_RHS:
7366 break;
7368 gcc_unreachable ();
7371 /* Return true if the floating-point value computed by call STMT is known
7372 to have an integer value. We also allow +Inf, -Inf and NaN to be
7373 considered integer values. Return false for signaling NaN.
7375 DEPTH is the current nesting depth of the query. */
7377 static bool
7378 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7380 tree arg0 = (gimple_call_num_args (stmt) > 0
7381 ? gimple_call_arg (stmt, 0)
7382 : NULL_TREE);
7383 tree arg1 = (gimple_call_num_args (stmt) > 1
7384 ? gimple_call_arg (stmt, 1)
7385 : NULL_TREE);
7386 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7387 arg0, arg1, depth);
7390 /* Return true if the floating-point result of phi STMT is known to have
7391 an integer value. We also allow +Inf, -Inf and NaN to be considered
7392 integer values. Return false for signaling NaN.
7394 DEPTH is the current nesting depth of the query. */
7396 static bool
7397 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7399 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7401 tree arg = gimple_phi_arg_def (stmt, i);
7402 if (!integer_valued_real_single_p (arg, depth + 1))
7403 return false;
7405 return true;
7408 /* Return true if the floating-point value computed by STMT is known
7409 to have an integer value. We also allow +Inf, -Inf and NaN to be
7410 considered integer values. Return false for signaling NaN.
7412 DEPTH is the current nesting depth of the query. */
7414 bool
7415 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7417 switch (gimple_code (stmt))
7419 case GIMPLE_ASSIGN:
7420 return gimple_assign_integer_valued_real_p (stmt, depth);
7421 case GIMPLE_CALL:
7422 return gimple_call_integer_valued_real_p (stmt, depth);
7423 case GIMPLE_PHI:
7424 return gimple_phi_integer_valued_real_p (stmt, depth);
7425 default:
7426 return false;