* doc/generic.texi (ANNOTATE_EXPR): Document 3rd operand.
[official-gcc.git] / gcc / gimple-fold.c
blobea8f92eab7bb7e23812f0a6959fdcf6d4b950fc1
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2017 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stmt.h"
35 #include "expr.h"
36 #include "stor-layout.h"
37 #include "dumpfile.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-object-size.h"
44 #include "tree-ssa.h"
45 #include "tree-ssa-propagate.h"
46 #include "ipa-utils.h"
47 #include "tree-ssa-address.h"
48 #include "langhooks.h"
49 #include "gimplify-me.h"
50 #include "dbgcnt.h"
51 #include "builtins.h"
52 #include "tree-eh.h"
53 #include "gimple-match.h"
54 #include "gomp-constants.h"
55 #include "optabs-query.h"
56 #include "omp-general.h"
57 #include "ipa-chkp.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
67 /* Return true when DECL can be referenced from current unit.
68 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
69 We can get declarations that are not possible to reference for various
70 reasons:
72 1) When analyzing C++ virtual tables.
73 C++ virtual tables do have known constructors even
74 when they are keyed to other compilation unit.
75 Those tables can contain pointers to methods and vars
76 in other units. Those methods have both STATIC and EXTERNAL
77 set.
78 2) In WHOPR mode devirtualization might lead to reference
79 to method that was partitioned elsehwere.
80 In this case we have static VAR_DECL or FUNCTION_DECL
81 that has no corresponding callgraph/varpool node
82 declaring the body.
83 3) COMDAT functions referred by external vtables that
84 we devirtualize only during final compilation stage.
85 At this time we already decided that we will not output
86 the function body and thus we can't reference the symbol
87 directly. */
89 static bool
90 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
92 varpool_node *vnode;
93 struct cgraph_node *node;
94 symtab_node *snode;
96 if (DECL_ABSTRACT_P (decl))
97 return false;
99 /* We are concerned only about static/external vars and functions. */
100 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
101 || !VAR_OR_FUNCTION_DECL_P (decl))
102 return true;
104 /* Static objects can be referred only if they was not optimized out yet. */
105 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
107 /* Before we start optimizing unreachable code we can be sure all
108 static objects are defined. */
109 if (symtab->function_flags_ready)
110 return true;
111 snode = symtab_node::get (decl);
112 if (!snode || !snode->definition)
113 return false;
114 node = dyn_cast <cgraph_node *> (snode);
115 return !node || !node->global.inlined_to;
118 /* We will later output the initializer, so we can refer to it.
119 So we are concerned only when DECL comes from initializer of
120 external var or var that has been optimized out. */
121 if (!from_decl
122 || !VAR_P (from_decl)
123 || (!DECL_EXTERNAL (from_decl)
124 && (vnode = varpool_node::get (from_decl)) != NULL
125 && vnode->definition)
126 || (flag_ltrans
127 && (vnode = varpool_node::get (from_decl)) != NULL
128 && vnode->in_other_partition))
129 return true;
130 /* We are folding reference from external vtable. The vtable may reffer
131 to a symbol keyed to other compilation unit. The other compilation
132 unit may be in separate DSO and the symbol may be hidden. */
133 if (DECL_VISIBILITY_SPECIFIED (decl)
134 && DECL_EXTERNAL (decl)
135 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
136 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
137 return false;
138 /* When function is public, we always can introduce new reference.
139 Exception are the COMDAT functions where introducing a direct
140 reference imply need to include function body in the curren tunit. */
141 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
142 return true;
143 /* We have COMDAT. We are going to check if we still have definition
144 or if the definition is going to be output in other partition.
145 Bypass this when gimplifying; all needed functions will be produced.
147 As observed in PR20991 for already optimized out comdat virtual functions
148 it may be tempting to not necessarily give up because the copy will be
149 output elsewhere when corresponding vtable is output.
150 This is however not possible - ABI specify that COMDATs are output in
151 units where they are used and when the other unit was compiled with LTO
152 it is possible that vtable was kept public while the function itself
153 was privatized. */
154 if (!symtab->function_flags_ready)
155 return true;
157 snode = symtab_node::get (decl);
158 if (!snode
159 || ((!snode->definition || DECL_EXTERNAL (decl))
160 && (!snode->in_other_partition
161 || (!snode->forced_by_abi && !snode->force_output))))
162 return false;
163 node = dyn_cast <cgraph_node *> (snode);
164 return !node || !node->global.inlined_to;
167 /* Create a temporary for TYPE for a statement STMT. If the current function
168 is in SSA form, a SSA name is created. Otherwise a temporary register
169 is made. */
171 tree
172 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
174 if (gimple_in_ssa_p (cfun))
175 return make_ssa_name (type, stmt);
176 else
177 return create_tmp_reg (type);
180 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
181 acceptable form for is_gimple_min_invariant.
182 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
184 tree
185 canonicalize_constructor_val (tree cval, tree from_decl)
187 tree orig_cval = cval;
188 STRIP_NOPS (cval);
189 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
190 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
192 tree ptr = TREE_OPERAND (cval, 0);
193 if (is_gimple_min_invariant (ptr))
194 cval = build1_loc (EXPR_LOCATION (cval),
195 ADDR_EXPR, TREE_TYPE (ptr),
196 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
197 ptr,
198 fold_convert (ptr_type_node,
199 TREE_OPERAND (cval, 1))));
201 if (TREE_CODE (cval) == ADDR_EXPR)
203 tree base = NULL_TREE;
204 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
206 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
207 if (base)
208 TREE_OPERAND (cval, 0) = base;
210 else
211 base = get_base_address (TREE_OPERAND (cval, 0));
212 if (!base)
213 return NULL_TREE;
215 if (VAR_OR_FUNCTION_DECL_P (base)
216 && !can_refer_decl_in_current_unit_p (base, from_decl))
217 return NULL_TREE;
218 if (TREE_TYPE (base) == error_mark_node)
219 return NULL_TREE;
220 if (VAR_P (base))
221 TREE_ADDRESSABLE (base) = 1;
222 else if (TREE_CODE (base) == FUNCTION_DECL)
224 /* Make sure we create a cgraph node for functions we'll reference.
225 They can be non-existent if the reference comes from an entry
226 of an external vtable for example. */
227 cgraph_node::get_create (base);
229 /* Fixup types in global initializers. */
230 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
231 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
233 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
234 cval = fold_convert (TREE_TYPE (orig_cval), cval);
235 return cval;
237 if (TREE_OVERFLOW_P (cval))
238 return drop_tree_overflow (cval);
239 return orig_cval;
242 /* If SYM is a constant variable with known value, return the value.
243 NULL_TREE is returned otherwise. */
245 tree
246 get_symbol_constant_value (tree sym)
248 tree val = ctor_for_folding (sym);
249 if (val != error_mark_node)
251 if (val)
253 val = canonicalize_constructor_val (unshare_expr (val), sym);
254 if (val && is_gimple_min_invariant (val))
255 return val;
256 else
257 return NULL_TREE;
259 /* Variables declared 'const' without an initializer
260 have zero as the initializer if they may not be
261 overridden at link or run time. */
262 if (!val
263 && is_gimple_reg_type (TREE_TYPE (sym)))
264 return build_zero_cst (TREE_TYPE (sym));
267 return NULL_TREE;
272 /* Subroutine of fold_stmt. We perform several simplifications of the
273 memory reference tree EXPR and make sure to re-gimplify them properly
274 after propagation of constant addresses. IS_LHS is true if the
275 reference is supposed to be an lvalue. */
277 static tree
278 maybe_fold_reference (tree expr, bool is_lhs)
280 tree result;
282 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
283 || TREE_CODE (expr) == REALPART_EXPR
284 || TREE_CODE (expr) == IMAGPART_EXPR)
285 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
286 return fold_unary_loc (EXPR_LOCATION (expr),
287 TREE_CODE (expr),
288 TREE_TYPE (expr),
289 TREE_OPERAND (expr, 0));
290 else if (TREE_CODE (expr) == BIT_FIELD_REF
291 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
292 return fold_ternary_loc (EXPR_LOCATION (expr),
293 TREE_CODE (expr),
294 TREE_TYPE (expr),
295 TREE_OPERAND (expr, 0),
296 TREE_OPERAND (expr, 1),
297 TREE_OPERAND (expr, 2));
299 if (!is_lhs
300 && (result = fold_const_aggregate_ref (expr))
301 && is_gimple_min_invariant (result))
302 return result;
304 return NULL_TREE;
308 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
309 replacement rhs for the statement or NULL_TREE if no simplification
310 could be made. It is assumed that the operands have been previously
311 folded. */
313 static tree
314 fold_gimple_assign (gimple_stmt_iterator *si)
316 gimple *stmt = gsi_stmt (*si);
317 enum tree_code subcode = gimple_assign_rhs_code (stmt);
318 location_t loc = gimple_location (stmt);
320 tree result = NULL_TREE;
322 switch (get_gimple_rhs_class (subcode))
324 case GIMPLE_SINGLE_RHS:
326 tree rhs = gimple_assign_rhs1 (stmt);
328 if (TREE_CLOBBER_P (rhs))
329 return NULL_TREE;
331 if (REFERENCE_CLASS_P (rhs))
332 return maybe_fold_reference (rhs, false);
334 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
336 tree val = OBJ_TYPE_REF_EXPR (rhs);
337 if (is_gimple_min_invariant (val))
338 return val;
339 else if (flag_devirtualize && virtual_method_call_p (rhs))
341 bool final;
342 vec <cgraph_node *>targets
343 = possible_polymorphic_call_targets (rhs, stmt, &final);
344 if (final && targets.length () <= 1 && dbg_cnt (devirt))
346 if (dump_enabled_p ())
348 location_t loc = gimple_location_safe (stmt);
349 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
350 "resolving virtual function address "
351 "reference to function %s\n",
352 targets.length () == 1
353 ? targets[0]->name ()
354 : "NULL");
356 if (targets.length () == 1)
358 val = fold_convert (TREE_TYPE (val),
359 build_fold_addr_expr_loc
360 (loc, targets[0]->decl));
361 STRIP_USELESS_TYPE_CONVERSION (val);
363 else
364 /* We can not use __builtin_unreachable here because it
365 can not have address taken. */
366 val = build_int_cst (TREE_TYPE (val), 0);
367 return val;
372 else if (TREE_CODE (rhs) == ADDR_EXPR)
374 tree ref = TREE_OPERAND (rhs, 0);
375 tree tem = maybe_fold_reference (ref, true);
376 if (tem
377 && TREE_CODE (tem) == MEM_REF
378 && integer_zerop (TREE_OPERAND (tem, 1)))
379 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
380 else if (tem)
381 result = fold_convert (TREE_TYPE (rhs),
382 build_fold_addr_expr_loc (loc, tem));
383 else if (TREE_CODE (ref) == MEM_REF
384 && integer_zerop (TREE_OPERAND (ref, 1)))
385 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
387 if (result)
389 /* Strip away useless type conversions. Both the
390 NON_LVALUE_EXPR that may have been added by fold, and
391 "useless" type conversions that might now be apparent
392 due to propagation. */
393 STRIP_USELESS_TYPE_CONVERSION (result);
395 if (result != rhs && valid_gimple_rhs_p (result))
396 return result;
400 else if (TREE_CODE (rhs) == CONSTRUCTOR
401 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
403 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
404 unsigned i;
405 tree val;
407 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
408 if (! CONSTANT_CLASS_P (val))
409 return NULL_TREE;
411 return build_vector_from_ctor (TREE_TYPE (rhs),
412 CONSTRUCTOR_ELTS (rhs));
415 else if (DECL_P (rhs))
416 return get_symbol_constant_value (rhs);
418 break;
420 case GIMPLE_UNARY_RHS:
421 break;
423 case GIMPLE_BINARY_RHS:
424 break;
426 case GIMPLE_TERNARY_RHS:
427 result = fold_ternary_loc (loc, subcode,
428 TREE_TYPE (gimple_assign_lhs (stmt)),
429 gimple_assign_rhs1 (stmt),
430 gimple_assign_rhs2 (stmt),
431 gimple_assign_rhs3 (stmt));
433 if (result)
435 STRIP_USELESS_TYPE_CONVERSION (result);
436 if (valid_gimple_rhs_p (result))
437 return result;
439 break;
441 case GIMPLE_INVALID_RHS:
442 gcc_unreachable ();
445 return NULL_TREE;
449 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
450 adjusting the replacement stmts location and virtual operands.
451 If the statement has a lhs the last stmt in the sequence is expected
452 to assign to that lhs. */
454 static void
455 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
457 gimple *stmt = gsi_stmt (*si_p);
459 if (gimple_has_location (stmt))
460 annotate_all_with_location (stmts, gimple_location (stmt));
462 /* First iterate over the replacement statements backward, assigning
463 virtual operands to their defining statements. */
464 gimple *laststore = NULL;
465 for (gimple_stmt_iterator i = gsi_last (stmts);
466 !gsi_end_p (i); gsi_prev (&i))
468 gimple *new_stmt = gsi_stmt (i);
469 if ((gimple_assign_single_p (new_stmt)
470 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
471 || (is_gimple_call (new_stmt)
472 && (gimple_call_flags (new_stmt)
473 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
475 tree vdef;
476 if (!laststore)
477 vdef = gimple_vdef (stmt);
478 else
479 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
480 gimple_set_vdef (new_stmt, vdef);
481 if (vdef && TREE_CODE (vdef) == SSA_NAME)
482 SSA_NAME_DEF_STMT (vdef) = new_stmt;
483 laststore = new_stmt;
487 /* Second iterate over the statements forward, assigning virtual
488 operands to their uses. */
489 tree reaching_vuse = gimple_vuse (stmt);
490 for (gimple_stmt_iterator i = gsi_start (stmts);
491 !gsi_end_p (i); gsi_next (&i))
493 gimple *new_stmt = gsi_stmt (i);
494 /* If the new statement possibly has a VUSE, update it with exact SSA
495 name we know will reach this one. */
496 if (gimple_has_mem_ops (new_stmt))
497 gimple_set_vuse (new_stmt, reaching_vuse);
498 gimple_set_modified (new_stmt, true);
499 if (gimple_vdef (new_stmt))
500 reaching_vuse = gimple_vdef (new_stmt);
503 /* If the new sequence does not do a store release the virtual
504 definition of the original statement. */
505 if (reaching_vuse
506 && reaching_vuse == gimple_vuse (stmt))
508 tree vdef = gimple_vdef (stmt);
509 if (vdef
510 && TREE_CODE (vdef) == SSA_NAME)
512 unlink_stmt_vdef (stmt);
513 release_ssa_name (vdef);
517 /* Finally replace the original statement with the sequence. */
518 gsi_replace_with_seq (si_p, stmts, false);
521 /* Convert EXPR into a GIMPLE value suitable for substitution on the
522 RHS of an assignment. Insert the necessary statements before
523 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
524 is replaced. If the call is expected to produces a result, then it
525 is replaced by an assignment of the new RHS to the result variable.
526 If the result is to be ignored, then the call is replaced by a
527 GIMPLE_NOP. A proper VDEF chain is retained by making the first
528 VUSE and the last VDEF of the whole sequence be the same as the replaced
529 statement and using new SSA names for stores in between. */
531 void
532 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
534 tree lhs;
535 gimple *stmt, *new_stmt;
536 gimple_stmt_iterator i;
537 gimple_seq stmts = NULL;
539 stmt = gsi_stmt (*si_p);
541 gcc_assert (is_gimple_call (stmt));
543 push_gimplify_context (gimple_in_ssa_p (cfun));
545 lhs = gimple_call_lhs (stmt);
546 if (lhs == NULL_TREE)
548 gimplify_and_add (expr, &stmts);
549 /* We can end up with folding a memcpy of an empty class assignment
550 which gets optimized away by C++ gimplification. */
551 if (gimple_seq_empty_p (stmts))
553 pop_gimplify_context (NULL);
554 if (gimple_in_ssa_p (cfun))
556 unlink_stmt_vdef (stmt);
557 release_defs (stmt);
559 gsi_replace (si_p, gimple_build_nop (), false);
560 return;
563 else
565 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
566 new_stmt = gimple_build_assign (lhs, tmp);
567 i = gsi_last (stmts);
568 gsi_insert_after_without_update (&i, new_stmt,
569 GSI_CONTINUE_LINKING);
572 pop_gimplify_context (NULL);
574 gsi_replace_with_seq_vops (si_p, stmts);
578 /* Replace the call at *GSI with the gimple value VAL. */
580 void
581 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
583 gimple *stmt = gsi_stmt (*gsi);
584 tree lhs = gimple_call_lhs (stmt);
585 gimple *repl;
586 if (lhs)
588 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
589 val = fold_convert (TREE_TYPE (lhs), val);
590 repl = gimple_build_assign (lhs, val);
592 else
593 repl = gimple_build_nop ();
594 tree vdef = gimple_vdef (stmt);
595 if (vdef && TREE_CODE (vdef) == SSA_NAME)
597 unlink_stmt_vdef (stmt);
598 release_ssa_name (vdef);
600 gsi_replace (gsi, repl, false);
603 /* Replace the call at *GSI with the new call REPL and fold that
604 again. */
606 static void
607 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
609 gimple *stmt = gsi_stmt (*gsi);
610 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
611 gimple_set_location (repl, gimple_location (stmt));
612 if (gimple_vdef (stmt)
613 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
615 gimple_set_vdef (repl, gimple_vdef (stmt));
616 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
618 if (gimple_vuse (stmt))
619 gimple_set_vuse (repl, gimple_vuse (stmt));
620 gsi_replace (gsi, repl, false);
621 fold_stmt (gsi);
624 /* Return true if VAR is a VAR_DECL or a component thereof. */
626 static bool
627 var_decl_component_p (tree var)
629 tree inner = var;
630 while (handled_component_p (inner))
631 inner = TREE_OPERAND (inner, 0);
632 return SSA_VAR_P (inner);
635 /* If the SIZE argument representing the size of an object is in a range
636 of values of which exactly one is valid (and that is zero), return
637 true, otherwise false. */
639 static bool
640 size_must_be_zero_p (tree size)
642 if (integer_zerop (size))
643 return true;
645 if (TREE_CODE (size) != SSA_NAME)
646 return false;
648 wide_int min, max;
649 enum value_range_type rtype = get_range_info (size, &min, &max);
650 if (rtype != VR_ANTI_RANGE)
651 return false;
653 tree type = TREE_TYPE (size);
654 int prec = TYPE_PRECISION (type);
656 wide_int wone = wi::one (prec);
658 /* Compute the value of SSIZE_MAX, the largest positive value that
659 can be stored in ssize_t, the signed counterpart of size_t. */
660 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
662 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
665 /* Fold function call to builtin mem{{,p}cpy,move}. Return
666 false if no simplification can be made.
667 If ENDP is 0, return DEST (like memcpy).
668 If ENDP is 1, return DEST+LEN (like mempcpy).
669 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
670 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
671 (memmove). */
673 static bool
674 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
675 tree dest, tree src, int endp)
677 gimple *stmt = gsi_stmt (*gsi);
678 tree lhs = gimple_call_lhs (stmt);
679 tree len = gimple_call_arg (stmt, 2);
680 tree destvar, srcvar;
681 location_t loc = gimple_location (stmt);
683 /* If the LEN parameter is a constant zero or in range where
684 the only valid value is zero, return DEST. */
685 if (size_must_be_zero_p (len))
687 gimple *repl;
688 if (gimple_call_lhs (stmt))
689 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
690 else
691 repl = gimple_build_nop ();
692 tree vdef = gimple_vdef (stmt);
693 if (vdef && TREE_CODE (vdef) == SSA_NAME)
695 unlink_stmt_vdef (stmt);
696 release_ssa_name (vdef);
698 gsi_replace (gsi, repl, false);
699 return true;
702 /* If SRC and DEST are the same (and not volatile), return
703 DEST{,+LEN,+LEN-1}. */
704 if (operand_equal_p (src, dest, 0))
706 unlink_stmt_vdef (stmt);
707 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
708 release_ssa_name (gimple_vdef (stmt));
709 if (!lhs)
711 gsi_replace (gsi, gimple_build_nop (), false);
712 return true;
714 goto done;
716 else
718 tree srctype, desttype;
719 unsigned int src_align, dest_align;
720 tree off0;
722 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
723 pointers as wide integer) and also may result in huge function
724 size because of inlined bounds copy. Thus don't inline for
725 functions we want to instrument. */
726 if (flag_check_pointer_bounds
727 && chkp_instrumentable_p (cfun->decl)
728 /* Even if data may contain pointers we can inline if copy
729 less than a pointer size. */
730 && (!tree_fits_uhwi_p (len)
731 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
732 return false;
734 /* Build accesses at offset zero with a ref-all character type. */
735 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
736 ptr_mode, true), 0);
738 /* If we can perform the copy efficiently with first doing all loads
739 and then all stores inline it that way. Currently efficiently
740 means that we can load all the memory into a single integer
741 register which is what MOVE_MAX gives us. */
742 src_align = get_pointer_alignment (src);
743 dest_align = get_pointer_alignment (dest);
744 if (tree_fits_uhwi_p (len)
745 && compare_tree_int (len, MOVE_MAX) <= 0
746 /* ??? Don't transform copies from strings with known length this
747 confuses the tree-ssa-strlen.c. This doesn't handle
748 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
749 reason. */
750 && !c_strlen (src, 2))
752 unsigned ilen = tree_to_uhwi (len);
753 if (pow2p_hwi (ilen))
755 scalar_int_mode mode;
756 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
757 if (type
758 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
759 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
760 /* If the destination pointer is not aligned we must be able
761 to emit an unaligned store. */
762 && (dest_align >= GET_MODE_ALIGNMENT (mode)
763 || !targetm.slow_unaligned_access (mode, dest_align)
764 || (optab_handler (movmisalign_optab, mode)
765 != CODE_FOR_nothing)))
767 tree srctype = type;
768 tree desttype = type;
769 if (src_align < GET_MODE_ALIGNMENT (mode))
770 srctype = build_aligned_type (type, src_align);
771 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
772 tree tem = fold_const_aggregate_ref (srcmem);
773 if (tem)
774 srcmem = tem;
775 else if (src_align < GET_MODE_ALIGNMENT (mode)
776 && targetm.slow_unaligned_access (mode, src_align)
777 && (optab_handler (movmisalign_optab, mode)
778 == CODE_FOR_nothing))
779 srcmem = NULL_TREE;
780 if (srcmem)
782 gimple *new_stmt;
783 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
785 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
786 srcmem
787 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
788 new_stmt);
789 gimple_assign_set_lhs (new_stmt, srcmem);
790 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
791 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
793 if (dest_align < GET_MODE_ALIGNMENT (mode))
794 desttype = build_aligned_type (type, dest_align);
795 new_stmt
796 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
797 dest, off0),
798 srcmem);
799 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
800 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
801 if (gimple_vdef (new_stmt)
802 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
803 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
804 if (!lhs)
806 gsi_replace (gsi, new_stmt, false);
807 return true;
809 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
810 goto done;
816 if (endp == 3)
818 /* Both DEST and SRC must be pointer types.
819 ??? This is what old code did. Is the testing for pointer types
820 really mandatory?
822 If either SRC is readonly or length is 1, we can use memcpy. */
823 if (!dest_align || !src_align)
824 return false;
825 if (readonly_data_expr (src)
826 || (tree_fits_uhwi_p (len)
827 && (MIN (src_align, dest_align) / BITS_PER_UNIT
828 >= tree_to_uhwi (len))))
830 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
831 if (!fn)
832 return false;
833 gimple_call_set_fndecl (stmt, fn);
834 gimple_call_set_arg (stmt, 0, dest);
835 gimple_call_set_arg (stmt, 1, src);
836 fold_stmt (gsi);
837 return true;
840 /* If *src and *dest can't overlap, optimize into memcpy as well. */
841 if (TREE_CODE (src) == ADDR_EXPR
842 && TREE_CODE (dest) == ADDR_EXPR)
844 tree src_base, dest_base, fn;
845 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
846 HOST_WIDE_INT maxsize;
848 srcvar = TREE_OPERAND (src, 0);
849 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
850 if (src_base == NULL)
851 src_base = srcvar;
852 destvar = TREE_OPERAND (dest, 0);
853 dest_base = get_addr_base_and_unit_offset (destvar,
854 &dest_offset);
855 if (dest_base == NULL)
856 dest_base = destvar;
857 if (tree_fits_uhwi_p (len))
858 maxsize = tree_to_uhwi (len);
859 else
860 maxsize = -1;
861 if (SSA_VAR_P (src_base)
862 && SSA_VAR_P (dest_base))
864 if (operand_equal_p (src_base, dest_base, 0)
865 && ranges_overlap_p (src_offset, maxsize,
866 dest_offset, maxsize))
867 return false;
869 else if (TREE_CODE (src_base) == MEM_REF
870 && TREE_CODE (dest_base) == MEM_REF)
872 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
873 TREE_OPERAND (dest_base, 0), 0))
874 return false;
875 offset_int off = mem_ref_offset (src_base) + src_offset;
876 if (!wi::fits_shwi_p (off))
877 return false;
878 src_offset = off.to_shwi ();
880 off = mem_ref_offset (dest_base) + dest_offset;
881 if (!wi::fits_shwi_p (off))
882 return false;
883 dest_offset = off.to_shwi ();
884 if (ranges_overlap_p (src_offset, maxsize,
885 dest_offset, maxsize))
886 return false;
888 else
889 return false;
891 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
892 if (!fn)
893 return false;
894 gimple_call_set_fndecl (stmt, fn);
895 gimple_call_set_arg (stmt, 0, dest);
896 gimple_call_set_arg (stmt, 1, src);
897 fold_stmt (gsi);
898 return true;
901 /* If the destination and source do not alias optimize into
902 memcpy as well. */
903 if ((is_gimple_min_invariant (dest)
904 || TREE_CODE (dest) == SSA_NAME)
905 && (is_gimple_min_invariant (src)
906 || TREE_CODE (src) == SSA_NAME))
908 ao_ref destr, srcr;
909 ao_ref_init_from_ptr_and_size (&destr, dest, len);
910 ao_ref_init_from_ptr_and_size (&srcr, src, len);
911 if (!refs_may_alias_p_1 (&destr, &srcr, false))
913 tree fn;
914 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
915 if (!fn)
916 return false;
917 gimple_call_set_fndecl (stmt, fn);
918 gimple_call_set_arg (stmt, 0, dest);
919 gimple_call_set_arg (stmt, 1, src);
920 fold_stmt (gsi);
921 return true;
925 return false;
928 if (!tree_fits_shwi_p (len))
929 return false;
930 /* FIXME:
931 This logic lose for arguments like (type *)malloc (sizeof (type)),
932 since we strip the casts of up to VOID return value from malloc.
933 Perhaps we ought to inherit type from non-VOID argument here? */
934 STRIP_NOPS (src);
935 STRIP_NOPS (dest);
936 if (!POINTER_TYPE_P (TREE_TYPE (src))
937 || !POINTER_TYPE_P (TREE_TYPE (dest)))
938 return false;
939 /* In the following try to find a type that is most natural to be
940 used for the memcpy source and destination and that allows
941 the most optimization when memcpy is turned into a plain assignment
942 using that type. In theory we could always use a char[len] type
943 but that only gains us that the destination and source possibly
944 no longer will have their address taken. */
945 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
946 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
948 tree tem = TREE_OPERAND (src, 0);
949 STRIP_NOPS (tem);
950 if (tem != TREE_OPERAND (src, 0))
951 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
953 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
955 tree tem = TREE_OPERAND (dest, 0);
956 STRIP_NOPS (tem);
957 if (tem != TREE_OPERAND (dest, 0))
958 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
960 srctype = TREE_TYPE (TREE_TYPE (src));
961 if (TREE_CODE (srctype) == ARRAY_TYPE
962 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
964 srctype = TREE_TYPE (srctype);
965 STRIP_NOPS (src);
966 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
968 desttype = TREE_TYPE (TREE_TYPE (dest));
969 if (TREE_CODE (desttype) == ARRAY_TYPE
970 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
972 desttype = TREE_TYPE (desttype);
973 STRIP_NOPS (dest);
974 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
976 if (TREE_ADDRESSABLE (srctype)
977 || TREE_ADDRESSABLE (desttype))
978 return false;
980 /* Make sure we are not copying using a floating-point mode or
981 a type whose size possibly does not match its precision. */
982 if (FLOAT_MODE_P (TYPE_MODE (desttype))
983 || TREE_CODE (desttype) == BOOLEAN_TYPE
984 || TREE_CODE (desttype) == ENUMERAL_TYPE)
985 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
986 if (FLOAT_MODE_P (TYPE_MODE (srctype))
987 || TREE_CODE (srctype) == BOOLEAN_TYPE
988 || TREE_CODE (srctype) == ENUMERAL_TYPE)
989 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
990 if (!srctype)
991 srctype = desttype;
992 if (!desttype)
993 desttype = srctype;
994 if (!srctype)
995 return false;
997 src_align = get_pointer_alignment (src);
998 dest_align = get_pointer_alignment (dest);
999 if (dest_align < TYPE_ALIGN (desttype)
1000 || src_align < TYPE_ALIGN (srctype))
1001 return false;
1003 destvar = dest;
1004 STRIP_NOPS (destvar);
1005 if (TREE_CODE (destvar) == ADDR_EXPR
1006 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1007 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1008 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1009 else
1010 destvar = NULL_TREE;
1012 srcvar = src;
1013 STRIP_NOPS (srcvar);
1014 if (TREE_CODE (srcvar) == ADDR_EXPR
1015 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1016 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1018 if (!destvar
1019 || src_align >= TYPE_ALIGN (desttype))
1020 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1021 srcvar, off0);
1022 else if (!STRICT_ALIGNMENT)
1024 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1025 src_align);
1026 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1028 else
1029 srcvar = NULL_TREE;
1031 else
1032 srcvar = NULL_TREE;
1034 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1035 return false;
1037 if (srcvar == NULL_TREE)
1039 STRIP_NOPS (src);
1040 if (src_align >= TYPE_ALIGN (desttype))
1041 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1042 else
1044 if (STRICT_ALIGNMENT)
1045 return false;
1046 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1047 src_align);
1048 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1051 else if (destvar == NULL_TREE)
1053 STRIP_NOPS (dest);
1054 if (dest_align >= TYPE_ALIGN (srctype))
1055 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1056 else
1058 if (STRICT_ALIGNMENT)
1059 return false;
1060 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1061 dest_align);
1062 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1066 gimple *new_stmt;
1067 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1069 tree tem = fold_const_aggregate_ref (srcvar);
1070 if (tem)
1071 srcvar = tem;
1072 if (! is_gimple_min_invariant (srcvar))
1074 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1075 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1076 new_stmt);
1077 gimple_assign_set_lhs (new_stmt, srcvar);
1078 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1079 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1082 new_stmt = gimple_build_assign (destvar, srcvar);
1083 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1084 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1085 if (gimple_vdef (new_stmt)
1086 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1087 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1088 if (!lhs)
1090 gsi_replace (gsi, new_stmt, false);
1091 return true;
1093 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1096 done:
1097 gimple_seq stmts = NULL;
1098 if (endp == 0 || endp == 3)
1099 len = NULL_TREE;
1100 else if (endp == 2)
1101 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1102 ssize_int (1));
1103 if (endp == 2 || endp == 1)
1105 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1106 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1107 TREE_TYPE (dest), dest, len);
1110 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1111 gimple *repl = gimple_build_assign (lhs, dest);
1112 gsi_replace (gsi, repl, false);
1113 return true;
1116 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1117 to built-in memcmp (a, b, len). */
1119 static bool
1120 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1122 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1124 if (!fn)
1125 return false;
1127 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1129 gimple *stmt = gsi_stmt (*gsi);
1130 tree a = gimple_call_arg (stmt, 0);
1131 tree b = gimple_call_arg (stmt, 1);
1132 tree len = gimple_call_arg (stmt, 2);
1134 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1135 replace_call_with_call_and_fold (gsi, repl);
1137 return true;
1140 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1141 to built-in memmove (dest, src, len). */
1143 static bool
1144 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1146 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1148 if (!fn)
1149 return false;
1151 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1152 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1153 len) into memmove (dest, src, len). */
1155 gimple *stmt = gsi_stmt (*gsi);
1156 tree src = gimple_call_arg (stmt, 0);
1157 tree dest = gimple_call_arg (stmt, 1);
1158 tree len = gimple_call_arg (stmt, 2);
1160 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1161 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1162 replace_call_with_call_and_fold (gsi, repl);
1164 return true;
1167 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1168 to built-in memset (dest, 0, len). */
1170 static bool
1171 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1173 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1175 if (!fn)
1176 return false;
1178 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1180 gimple *stmt = gsi_stmt (*gsi);
1181 tree dest = gimple_call_arg (stmt, 0);
1182 tree len = gimple_call_arg (stmt, 1);
1184 gimple_seq seq = NULL;
1185 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1186 gimple_seq_add_stmt_without_update (&seq, repl);
1187 gsi_replace_with_seq_vops (gsi, seq);
1188 fold_stmt (gsi);
1190 return true;
1193 /* Fold function call to builtin memset or bzero at *GSI setting the
1194 memory of size LEN to VAL. Return whether a simplification was made. */
1196 static bool
1197 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1199 gimple *stmt = gsi_stmt (*gsi);
1200 tree etype;
1201 unsigned HOST_WIDE_INT length, cval;
1203 /* If the LEN parameter is zero, return DEST. */
1204 if (integer_zerop (len))
1206 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1207 return true;
1210 if (! tree_fits_uhwi_p (len))
1211 return false;
1213 if (TREE_CODE (c) != INTEGER_CST)
1214 return false;
1216 tree dest = gimple_call_arg (stmt, 0);
1217 tree var = dest;
1218 if (TREE_CODE (var) != ADDR_EXPR)
1219 return false;
1221 var = TREE_OPERAND (var, 0);
1222 if (TREE_THIS_VOLATILE (var))
1223 return false;
1225 etype = TREE_TYPE (var);
1226 if (TREE_CODE (etype) == ARRAY_TYPE)
1227 etype = TREE_TYPE (etype);
1229 if (!INTEGRAL_TYPE_P (etype)
1230 && !POINTER_TYPE_P (etype))
1231 return NULL_TREE;
1233 if (! var_decl_component_p (var))
1234 return NULL_TREE;
1236 length = tree_to_uhwi (len);
1237 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1238 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1239 return NULL_TREE;
1241 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1242 return NULL_TREE;
1244 if (integer_zerop (c))
1245 cval = 0;
1246 else
1248 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1249 return NULL_TREE;
1251 cval = TREE_INT_CST_LOW (c);
1252 cval &= 0xff;
1253 cval |= cval << 8;
1254 cval |= cval << 16;
1255 cval |= (cval << 31) << 1;
1258 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1259 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1260 gimple_set_vuse (store, gimple_vuse (stmt));
1261 tree vdef = gimple_vdef (stmt);
1262 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1264 gimple_set_vdef (store, gimple_vdef (stmt));
1265 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1267 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1268 if (gimple_call_lhs (stmt))
1270 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1271 gsi_replace (gsi, asgn, false);
1273 else
1275 gimple_stmt_iterator gsi2 = *gsi;
1276 gsi_prev (gsi);
1277 gsi_remove (&gsi2, true);
1280 return true;
1284 /* Obtain the minimum and maximum string length or minimum and maximum
1285 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1286 If ARG is an SSA name variable, follow its use-def chains. When
1287 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1288 if we are unable to determine the length or value, return False.
1289 VISITED is a bitmap of visited variables.
1290 TYPE is 0 if string length should be obtained, 1 for maximum string
1291 length and 2 for maximum value ARG can have.
1292 When FUZZY is set and the length of a string cannot be determined,
1293 the function instead considers as the maximum possible length the
1294 size of a character array it may refer to.
1295 Set *FLEXP to true if the range of the string lengths has been
1296 obtained from the upper bound of an array at the end of a struct.
1297 Such an array may hold a string that's longer than its upper bound
1298 due to it being used as a poor-man's flexible array member. */
1300 static bool
1301 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1302 bool fuzzy, bool *flexp)
1304 tree var, val;
1305 gimple *def_stmt;
1307 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1308 but MINLEN may be cleared during the execution of the function. */
1309 tree *minlen = length;
1310 tree *const maxlen = length + 1;
1312 if (TREE_CODE (arg) != SSA_NAME)
1314 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1315 if (TREE_CODE (arg) == ADDR_EXPR
1316 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1317 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1319 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1320 if (TREE_CODE (aop0) == INDIRECT_REF
1321 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1322 return get_range_strlen (TREE_OPERAND (aop0, 0),
1323 length, visited, type, fuzzy, flexp);
1326 if (type == 2)
1328 val = arg;
1329 if (TREE_CODE (val) != INTEGER_CST
1330 || tree_int_cst_sgn (val) < 0)
1331 return false;
1333 else
1334 val = c_strlen (arg, 1);
1336 if (!val && fuzzy)
1338 if (TREE_CODE (arg) == ADDR_EXPR)
1339 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1340 visited, type, fuzzy, flexp);
1342 if (TREE_CODE (arg) == COMPONENT_REF
1343 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE)
1345 /* Use the type of the member array to determine the upper
1346 bound on the length of the array. This may be overly
1347 optimistic if the array itself isn't NUL-terminated and
1348 the caller relies on the subsequent member to contain
1349 the NUL.
1350 Set *FLEXP to true if the array whose bound is being
1351 used is at the end of a struct. */
1352 if (array_at_struct_end_p (arg))
1353 *flexp = true;
1355 arg = TREE_OPERAND (arg, 1);
1356 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1357 if (!val || integer_zerop (val))
1358 return false;
1359 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1360 integer_one_node);
1361 /* Set the minimum size to zero since the string in
1362 the array could have zero length. */
1363 *minlen = ssize_int (0);
1367 if (!val)
1368 return false;
1370 if (minlen
1371 && (!*minlen
1372 || (type > 0
1373 && TREE_CODE (*minlen) == INTEGER_CST
1374 && TREE_CODE (val) == INTEGER_CST
1375 && tree_int_cst_lt (val, *minlen))))
1376 *minlen = val;
1378 if (*maxlen)
1380 if (type > 0)
1382 if (TREE_CODE (*maxlen) != INTEGER_CST
1383 || TREE_CODE (val) != INTEGER_CST)
1384 return false;
1386 if (tree_int_cst_lt (*maxlen, val))
1387 *maxlen = val;
1388 return true;
1390 else if (simple_cst_equal (val, *maxlen) != 1)
1391 return false;
1394 *maxlen = val;
1395 return true;
1398 /* If ARG is registered for SSA update we cannot look at its defining
1399 statement. */
1400 if (name_registered_for_update_p (arg))
1401 return false;
1403 /* If we were already here, break the infinite cycle. */
1404 if (!*visited)
1405 *visited = BITMAP_ALLOC (NULL);
1406 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1407 return true;
1409 var = arg;
1410 def_stmt = SSA_NAME_DEF_STMT (var);
1412 switch (gimple_code (def_stmt))
1414 case GIMPLE_ASSIGN:
1415 /* The RHS of the statement defining VAR must either have a
1416 constant length or come from another SSA_NAME with a constant
1417 length. */
1418 if (gimple_assign_single_p (def_stmt)
1419 || gimple_assign_unary_nop_p (def_stmt))
1421 tree rhs = gimple_assign_rhs1 (def_stmt);
1422 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
1424 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1426 tree op2 = gimple_assign_rhs2 (def_stmt);
1427 tree op3 = gimple_assign_rhs3 (def_stmt);
1428 return get_range_strlen (op2, length, visited, type, fuzzy, flexp)
1429 && get_range_strlen (op3, length, visited, type, fuzzy, flexp);
1431 return false;
1433 case GIMPLE_PHI:
1435 /* All the arguments of the PHI node must have the same constant
1436 length. */
1437 unsigned i;
1439 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1441 tree arg = gimple_phi_arg (def_stmt, i)->def;
1443 /* If this PHI has itself as an argument, we cannot
1444 determine the string length of this argument. However,
1445 if we can find a constant string length for the other
1446 PHI args then we can still be sure that this is a
1447 constant string length. So be optimistic and just
1448 continue with the next argument. */
1449 if (arg == gimple_phi_result (def_stmt))
1450 continue;
1452 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
1454 if (fuzzy)
1455 *maxlen = build_all_ones_cst (size_type_node);
1456 else
1457 return false;
1461 return true;
1463 default:
1464 return false;
1468 /* Determine the minimum and maximum value or string length that ARG
1469 refers to and store each in the first two elements of MINMAXLEN.
1470 For expressions that point to strings of unknown lengths that are
1471 character arrays, use the upper bound of the array as the maximum
1472 length. For example, given an expression like 'x ? array : "xyz"'
1473 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1474 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1475 stored in array.
1476 Return true if the range of the string lengths has been obtained
1477 from the upper bound of an array at the end of a struct. Such
1478 an array may hold a string that's longer than its upper bound
1479 due to it being used as a poor-man's flexible array member. */
1481 bool
1482 get_range_strlen (tree arg, tree minmaxlen[2])
1484 bitmap visited = NULL;
1486 minmaxlen[0] = NULL_TREE;
1487 minmaxlen[1] = NULL_TREE;
1489 bool flexarray = false;
1490 get_range_strlen (arg, minmaxlen, &visited, 1, true, &flexarray);
1492 if (visited)
1493 BITMAP_FREE (visited);
1495 return flexarray;
1498 tree
1499 get_maxval_strlen (tree arg, int type)
1501 bitmap visited = NULL;
1502 tree len[2] = { NULL_TREE, NULL_TREE };
1504 bool dummy;
1505 if (!get_range_strlen (arg, len, &visited, type, false, &dummy))
1506 len[1] = NULL_TREE;
1507 if (visited)
1508 BITMAP_FREE (visited);
1510 return len[1];
1514 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1515 If LEN is not NULL, it represents the length of the string to be
1516 copied. Return NULL_TREE if no simplification can be made. */
1518 static bool
1519 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1520 tree dest, tree src)
1522 location_t loc = gimple_location (gsi_stmt (*gsi));
1523 tree fn;
1525 /* If SRC and DEST are the same (and not volatile), return DEST. */
1526 if (operand_equal_p (src, dest, 0))
1528 replace_call_with_value (gsi, dest);
1529 return true;
1532 if (optimize_function_for_size_p (cfun))
1533 return false;
1535 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1536 if (!fn)
1537 return false;
1539 tree len = get_maxval_strlen (src, 0);
1540 if (!len)
1541 return false;
1543 len = fold_convert_loc (loc, size_type_node, len);
1544 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1545 len = force_gimple_operand_gsi (gsi, len, true,
1546 NULL_TREE, true, GSI_SAME_STMT);
1547 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1548 replace_call_with_call_and_fold (gsi, repl);
1549 return true;
1552 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1553 If SLEN is not NULL, it represents the length of the source string.
1554 Return NULL_TREE if no simplification can be made. */
1556 static bool
1557 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1558 tree dest, tree src, tree len)
1560 gimple *stmt = gsi_stmt (*gsi);
1561 location_t loc = gimple_location (stmt);
1562 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1564 /* If the LEN parameter is zero, return DEST. */
1565 if (integer_zerop (len))
1567 /* Avoid warning if the destination refers to a an array/pointer
1568 decorate with attribute nonstring. */
1569 if (!nonstring)
1571 tree fndecl = gimple_call_fndecl (stmt);
1572 gcall *call = as_a <gcall *> (stmt);
1574 /* Warn about the lack of nul termination: the result is not
1575 a (nul-terminated) string. */
1576 tree slen = get_maxval_strlen (src, 0);
1577 if (slen && !integer_zerop (slen))
1578 warning_at (loc, OPT_Wstringop_truncation,
1579 "%G%qD destination unchanged after copying no bytes "
1580 "from a string of length %E",
1581 call, fndecl, slen);
1582 else
1583 warning_at (loc, OPT_Wstringop_truncation,
1584 "%G%qD destination unchanged after copying no bytes",
1585 call, fndecl);
1588 replace_call_with_value (gsi, dest);
1589 return true;
1592 /* We can't compare slen with len as constants below if len is not a
1593 constant. */
1594 if (TREE_CODE (len) != INTEGER_CST)
1595 return false;
1597 /* Now, we must be passed a constant src ptr parameter. */
1598 tree slen = get_maxval_strlen (src, 0);
1599 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1600 return false;
1602 /* The size of the source string including the terminating nul. */
1603 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1605 /* We do not support simplification of this case, though we do
1606 support it when expanding trees into RTL. */
1607 /* FIXME: generate a call to __builtin_memset. */
1608 if (tree_int_cst_lt (ssize, len))
1609 return false;
1611 if (!nonstring)
1613 if (tree_int_cst_lt (len, slen))
1615 tree fndecl = gimple_call_fndecl (stmt);
1616 gcall *call = as_a <gcall *> (stmt);
1618 warning_at (loc, OPT_Wstringop_truncation,
1619 (tree_int_cst_equal (size_one_node, len)
1620 ? G_("%G%qD output truncated copying %E byte "
1621 "from a string of length %E")
1622 : G_("%G%qD output truncated copying %E bytes "
1623 "from a string of length %E")),
1624 call, fndecl, len, slen);
1626 else if (tree_int_cst_equal (len, slen))
1628 tree fndecl = gimple_call_fndecl (stmt);
1629 gcall *call = as_a <gcall *> (stmt);
1631 warning_at (loc, OPT_Wstringop_truncation,
1632 (tree_int_cst_equal (size_one_node, len)
1633 ? G_("%G%qD output truncated before terminating nul "
1634 "copying %E byte from a string of the same "
1635 "length")
1636 : G_("%G%qD output truncated before terminating nul "
1637 "copying %E bytes from a string of the same "
1638 "length")),
1639 call, fndecl, len);
1643 /* OK transform into builtin memcpy. */
1644 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1645 if (!fn)
1646 return false;
1648 len = fold_convert_loc (loc, size_type_node, len);
1649 len = force_gimple_operand_gsi (gsi, len, true,
1650 NULL_TREE, true, GSI_SAME_STMT);
1651 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1652 replace_call_with_call_and_fold (gsi, repl);
1654 return true;
1657 /* Fold function call to builtin strchr or strrchr.
1658 If both arguments are constant, evaluate and fold the result,
1659 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1660 In general strlen is significantly faster than strchr
1661 due to being a simpler operation. */
1662 static bool
1663 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1665 gimple *stmt = gsi_stmt (*gsi);
1666 tree str = gimple_call_arg (stmt, 0);
1667 tree c = gimple_call_arg (stmt, 1);
1668 location_t loc = gimple_location (stmt);
1669 const char *p;
1670 char ch;
1672 if (!gimple_call_lhs (stmt))
1673 return false;
1675 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1677 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1679 if (p1 == NULL)
1681 replace_call_with_value (gsi, integer_zero_node);
1682 return true;
1685 tree len = build_int_cst (size_type_node, p1 - p);
1686 gimple_seq stmts = NULL;
1687 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1688 POINTER_PLUS_EXPR, str, len);
1689 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1690 gsi_replace_with_seq_vops (gsi, stmts);
1691 return true;
1694 if (!integer_zerop (c))
1695 return false;
1697 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1698 if (is_strrchr && optimize_function_for_size_p (cfun))
1700 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1702 if (strchr_fn)
1704 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1705 replace_call_with_call_and_fold (gsi, repl);
1706 return true;
1709 return false;
1712 tree len;
1713 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1715 if (!strlen_fn)
1716 return false;
1718 /* Create newstr = strlen (str). */
1719 gimple_seq stmts = NULL;
1720 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1721 gimple_set_location (new_stmt, loc);
1722 len = create_tmp_reg_or_ssa_name (size_type_node);
1723 gimple_call_set_lhs (new_stmt, len);
1724 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1726 /* Create (str p+ strlen (str)). */
1727 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1728 POINTER_PLUS_EXPR, str, len);
1729 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1730 gsi_replace_with_seq_vops (gsi, stmts);
1731 /* gsi now points at the assignment to the lhs, get a
1732 stmt iterator to the strlen.
1733 ??? We can't use gsi_for_stmt as that doesn't work when the
1734 CFG isn't built yet. */
1735 gimple_stmt_iterator gsi2 = *gsi;
1736 gsi_prev (&gsi2);
1737 fold_stmt (&gsi2);
1738 return true;
1741 /* Fold function call to builtin strstr.
1742 If both arguments are constant, evaluate and fold the result,
1743 additionally fold strstr (x, "") into x and strstr (x, "c")
1744 into strchr (x, 'c'). */
1745 static bool
1746 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1748 gimple *stmt = gsi_stmt (*gsi);
1749 tree haystack = gimple_call_arg (stmt, 0);
1750 tree needle = gimple_call_arg (stmt, 1);
1751 const char *p, *q;
1753 if (!gimple_call_lhs (stmt))
1754 return false;
1756 q = c_getstr (needle);
1757 if (q == NULL)
1758 return false;
1760 if ((p = c_getstr (haystack)))
1762 const char *r = strstr (p, q);
1764 if (r == NULL)
1766 replace_call_with_value (gsi, integer_zero_node);
1767 return true;
1770 tree len = build_int_cst (size_type_node, r - p);
1771 gimple_seq stmts = NULL;
1772 gimple *new_stmt
1773 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1774 haystack, len);
1775 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1776 gsi_replace_with_seq_vops (gsi, stmts);
1777 return true;
1780 /* For strstr (x, "") return x. */
1781 if (q[0] == '\0')
1783 replace_call_with_value (gsi, haystack);
1784 return true;
1787 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1788 if (q[1] == '\0')
1790 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1791 if (strchr_fn)
1793 tree c = build_int_cst (integer_type_node, q[0]);
1794 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1795 replace_call_with_call_and_fold (gsi, repl);
1796 return true;
1800 return false;
1803 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1804 to the call.
1806 Return NULL_TREE if no simplification was possible, otherwise return the
1807 simplified form of the call as a tree.
1809 The simplified form may be a constant or other expression which
1810 computes the same value, but in a more efficient manner (including
1811 calls to other builtin functions).
1813 The call may contain arguments which need to be evaluated, but
1814 which are not useful to determine the result of the call. In
1815 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1816 COMPOUND_EXPR will be an argument which must be evaluated.
1817 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1818 COMPOUND_EXPR in the chain will contain the tree for the simplified
1819 form of the builtin function call. */
1821 static bool
1822 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1824 gimple *stmt = gsi_stmt (*gsi);
1825 location_t loc = gimple_location (stmt);
1827 const char *p = c_getstr (src);
1829 /* If the string length is zero, return the dst parameter. */
1830 if (p && *p == '\0')
1832 replace_call_with_value (gsi, dst);
1833 return true;
1836 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1837 return false;
1839 /* See if we can store by pieces into (dst + strlen(dst)). */
1840 tree newdst;
1841 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1842 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1844 if (!strlen_fn || !memcpy_fn)
1845 return false;
1847 /* If the length of the source string isn't computable don't
1848 split strcat into strlen and memcpy. */
1849 tree len = get_maxval_strlen (src, 0);
1850 if (! len)
1851 return false;
1853 /* Create strlen (dst). */
1854 gimple_seq stmts = NULL, stmts2;
1855 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1856 gimple_set_location (repl, loc);
1857 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1858 gimple_call_set_lhs (repl, newdst);
1859 gimple_seq_add_stmt_without_update (&stmts, repl);
1861 /* Create (dst p+ strlen (dst)). */
1862 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1863 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1864 gimple_seq_add_seq_without_update (&stmts, stmts2);
1866 len = fold_convert_loc (loc, size_type_node, len);
1867 len = size_binop_loc (loc, PLUS_EXPR, len,
1868 build_int_cst (size_type_node, 1));
1869 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1870 gimple_seq_add_seq_without_update (&stmts, stmts2);
1872 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1873 gimple_seq_add_stmt_without_update (&stmts, repl);
1874 if (gimple_call_lhs (stmt))
1876 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1877 gimple_seq_add_stmt_without_update (&stmts, repl);
1878 gsi_replace_with_seq_vops (gsi, stmts);
1879 /* gsi now points at the assignment to the lhs, get a
1880 stmt iterator to the memcpy call.
1881 ??? We can't use gsi_for_stmt as that doesn't work when the
1882 CFG isn't built yet. */
1883 gimple_stmt_iterator gsi2 = *gsi;
1884 gsi_prev (&gsi2);
1885 fold_stmt (&gsi2);
1887 else
1889 gsi_replace_with_seq_vops (gsi, stmts);
1890 fold_stmt (gsi);
1892 return true;
1895 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1896 are the arguments to the call. */
1898 static bool
1899 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1901 gimple *stmt = gsi_stmt (*gsi);
1902 tree dest = gimple_call_arg (stmt, 0);
1903 tree src = gimple_call_arg (stmt, 1);
1904 tree size = gimple_call_arg (stmt, 2);
1905 tree fn;
1906 const char *p;
1909 p = c_getstr (src);
1910 /* If the SRC parameter is "", return DEST. */
1911 if (p && *p == '\0')
1913 replace_call_with_value (gsi, dest);
1914 return true;
1917 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1918 return false;
1920 /* If __builtin_strcat_chk is used, assume strcat is available. */
1921 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1922 if (!fn)
1923 return false;
1925 gimple *repl = gimple_build_call (fn, 2, dest, src);
1926 replace_call_with_call_and_fold (gsi, repl);
1927 return true;
1930 /* Simplify a call to the strncat builtin. */
1932 static bool
1933 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1935 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1936 tree dst = gimple_call_arg (stmt, 0);
1937 tree src = gimple_call_arg (stmt, 1);
1938 tree len = gimple_call_arg (stmt, 2);
1940 const char *p = c_getstr (src);
1942 /* If the requested length is zero, or the src parameter string
1943 length is zero, return the dst parameter. */
1944 if (integer_zerop (len) || (p && *p == '\0'))
1946 replace_call_with_value (gsi, dst);
1947 return true;
1950 if (TREE_CODE (len) != INTEGER_CST || !p)
1951 return false;
1953 unsigned srclen = strlen (p);
1955 int cmpsrc = compare_tree_int (len, srclen);
1957 /* Return early if the requested len is less than the string length.
1958 Warnings will be issued elsewhere later. */
1959 if (cmpsrc < 0)
1960 return false;
1962 unsigned HOST_WIDE_INT dstsize;
1964 bool nowarn = gimple_no_warning_p (stmt);
1966 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
1968 int cmpdst = compare_tree_int (len, dstsize);
1970 if (cmpdst >= 0)
1972 tree fndecl = gimple_call_fndecl (stmt);
1974 /* Strncat copies (at most) LEN bytes and always appends
1975 the terminating NUL so the specified bound should never
1976 be equal to (or greater than) the size of the destination.
1977 If it is, the copy could overflow. */
1978 location_t loc = gimple_location (stmt);
1979 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
1980 cmpdst == 0
1981 ? G_("%G%qD specified bound %E equals "
1982 "destination size")
1983 : G_("%G%qD specified bound %E exceeds "
1984 "destination size %wu"),
1985 stmt, fndecl, len, dstsize);
1986 if (nowarn)
1987 gimple_set_no_warning (stmt, true);
1991 if (!nowarn && cmpsrc == 0)
1993 tree fndecl = gimple_call_fndecl (stmt);
1995 /* To avoid certain truncation the specified bound should also
1996 not be equal to (or less than) the length of the source. */
1997 location_t loc = gimple_location (stmt);
1998 if (warning_at (loc, OPT_Wstringop_overflow_,
1999 "%G%qD specified bound %E equals source length",
2000 stmt, fndecl, len))
2001 gimple_set_no_warning (stmt, true);
2004 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2006 /* If the replacement _DECL isn't initialized, don't do the
2007 transformation. */
2008 if (!fn)
2009 return false;
2011 /* Otherwise, emit a call to strcat. */
2012 gcall *repl = gimple_build_call (fn, 2, dst, src);
2013 replace_call_with_call_and_fold (gsi, repl);
2014 return true;
2017 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2018 LEN, and SIZE. */
2020 static bool
2021 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2023 gimple *stmt = gsi_stmt (*gsi);
2024 tree dest = gimple_call_arg (stmt, 0);
2025 tree src = gimple_call_arg (stmt, 1);
2026 tree len = gimple_call_arg (stmt, 2);
2027 tree size = gimple_call_arg (stmt, 3);
2028 tree fn;
2029 const char *p;
2031 p = c_getstr (src);
2032 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2033 if ((p && *p == '\0')
2034 || integer_zerop (len))
2036 replace_call_with_value (gsi, dest);
2037 return true;
2040 if (! tree_fits_uhwi_p (size))
2041 return false;
2043 if (! integer_all_onesp (size))
2045 tree src_len = c_strlen (src, 1);
2046 if (src_len
2047 && tree_fits_uhwi_p (src_len)
2048 && tree_fits_uhwi_p (len)
2049 && ! tree_int_cst_lt (len, src_len))
2051 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2052 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2053 if (!fn)
2054 return false;
2056 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2057 replace_call_with_call_and_fold (gsi, repl);
2058 return true;
2060 return false;
2063 /* If __builtin_strncat_chk is used, assume strncat is available. */
2064 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2065 if (!fn)
2066 return false;
2068 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2069 replace_call_with_call_and_fold (gsi, repl);
2070 return true;
2073 /* Build and append gimple statements to STMTS that would load a first
2074 character of a memory location identified by STR. LOC is location
2075 of the statement. */
2077 static tree
2078 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2080 tree var;
2082 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2083 tree cst_uchar_ptr_node
2084 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2085 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2087 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2088 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2089 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2091 gimple_assign_set_lhs (stmt, var);
2092 gimple_seq_add_stmt_without_update (stmts, stmt);
2094 return var;
2097 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2098 FCODE is the name of the builtin. */
2100 static bool
2101 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2103 gimple *stmt = gsi_stmt (*gsi);
2104 tree callee = gimple_call_fndecl (stmt);
2105 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2107 tree type = integer_type_node;
2108 tree str1 = gimple_call_arg (stmt, 0);
2109 tree str2 = gimple_call_arg (stmt, 1);
2110 tree lhs = gimple_call_lhs (stmt);
2111 HOST_WIDE_INT length = -1;
2113 /* Handle strncmp and strncasecmp functions. */
2114 if (gimple_call_num_args (stmt) == 3)
2116 tree len = gimple_call_arg (stmt, 2);
2117 if (tree_fits_uhwi_p (len))
2118 length = tree_to_uhwi (len);
2121 /* If the LEN parameter is zero, return zero. */
2122 if (length == 0)
2124 replace_call_with_value (gsi, integer_zero_node);
2125 return true;
2128 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2129 if (operand_equal_p (str1, str2, 0))
2131 replace_call_with_value (gsi, integer_zero_node);
2132 return true;
2135 const char *p1 = c_getstr (str1);
2136 const char *p2 = c_getstr (str2);
2138 /* For known strings, return an immediate value. */
2139 if (p1 && p2)
2141 int r = 0;
2142 bool known_result = false;
2144 switch (fcode)
2146 case BUILT_IN_STRCMP:
2148 r = strcmp (p1, p2);
2149 known_result = true;
2150 break;
2152 case BUILT_IN_STRNCMP:
2154 if (length == -1)
2155 break;
2156 r = strncmp (p1, p2, length);
2157 known_result = true;
2158 break;
2160 /* Only handleable situation is where the string are equal (result 0),
2161 which is already handled by operand_equal_p case. */
2162 case BUILT_IN_STRCASECMP:
2163 break;
2164 case BUILT_IN_STRNCASECMP:
2166 if (length == -1)
2167 break;
2168 r = strncmp (p1, p2, length);
2169 if (r == 0)
2170 known_result = true;
2171 break;;
2173 default:
2174 gcc_unreachable ();
2177 if (known_result)
2179 replace_call_with_value (gsi, build_cmp_result (type, r));
2180 return true;
2184 bool nonzero_length = length >= 1
2185 || fcode == BUILT_IN_STRCMP
2186 || fcode == BUILT_IN_STRCASECMP;
2188 location_t loc = gimple_location (stmt);
2190 /* If the second arg is "", return *(const unsigned char*)arg1. */
2191 if (p2 && *p2 == '\0' && nonzero_length)
2193 gimple_seq stmts = NULL;
2194 tree var = gimple_load_first_char (loc, str1, &stmts);
2195 if (lhs)
2197 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2198 gimple_seq_add_stmt_without_update (&stmts, stmt);
2201 gsi_replace_with_seq_vops (gsi, stmts);
2202 return true;
2205 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2206 if (p1 && *p1 == '\0' && nonzero_length)
2208 gimple_seq stmts = NULL;
2209 tree var = gimple_load_first_char (loc, str2, &stmts);
2211 if (lhs)
2213 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2214 stmt = gimple_build_assign (c, NOP_EXPR, var);
2215 gimple_seq_add_stmt_without_update (&stmts, stmt);
2217 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2218 gimple_seq_add_stmt_without_update (&stmts, stmt);
2221 gsi_replace_with_seq_vops (gsi, stmts);
2222 return true;
2225 /* If len parameter is one, return an expression corresponding to
2226 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2227 if (fcode == BUILT_IN_STRNCMP && length == 1)
2229 gimple_seq stmts = NULL;
2230 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2231 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2233 if (lhs)
2235 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2236 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2237 gimple_seq_add_stmt_without_update (&stmts, convert1);
2239 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2240 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2241 gimple_seq_add_stmt_without_update (&stmts, convert2);
2243 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2244 gimple_seq_add_stmt_without_update (&stmts, stmt);
2247 gsi_replace_with_seq_vops (gsi, stmts);
2248 return true;
2251 /* If length is larger than the length of one constant string,
2252 replace strncmp with corresponding strcmp */
2253 if (fcode == BUILT_IN_STRNCMP
2254 && length > 0
2255 && ((p2 && (size_t) length > strlen (p2))
2256 || (p1 && (size_t) length > strlen (p1))))
2258 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2259 if (!fn)
2260 return false;
2261 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2262 replace_call_with_call_and_fold (gsi, repl);
2263 return true;
2266 return false;
2269 /* Fold a call to the memchr pointed by GSI iterator. */
2271 static bool
2272 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2274 gimple *stmt = gsi_stmt (*gsi);
2275 tree lhs = gimple_call_lhs (stmt);
2276 tree arg1 = gimple_call_arg (stmt, 0);
2277 tree arg2 = gimple_call_arg (stmt, 1);
2278 tree len = gimple_call_arg (stmt, 2);
2280 /* If the LEN parameter is zero, return zero. */
2281 if (integer_zerop (len))
2283 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2284 return true;
2287 char c;
2288 if (TREE_CODE (arg2) != INTEGER_CST
2289 || !tree_fits_uhwi_p (len)
2290 || !target_char_cst_p (arg2, &c))
2291 return false;
2293 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2294 unsigned HOST_WIDE_INT string_length;
2295 const char *p1 = c_getstr (arg1, &string_length);
2297 if (p1)
2299 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2300 if (r == NULL)
2302 if (length <= string_length)
2304 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2305 return true;
2308 else
2310 unsigned HOST_WIDE_INT offset = r - p1;
2311 gimple_seq stmts = NULL;
2312 if (lhs != NULL_TREE)
2314 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2315 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2316 arg1, offset_cst);
2317 gimple_seq_add_stmt_without_update (&stmts, stmt);
2319 else
2320 gimple_seq_add_stmt_without_update (&stmts,
2321 gimple_build_nop ());
2323 gsi_replace_with_seq_vops (gsi, stmts);
2324 return true;
2328 return false;
2331 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2332 to the call. IGNORE is true if the value returned
2333 by the builtin will be ignored. UNLOCKED is true is true if this
2334 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2335 the known length of the string. Return NULL_TREE if no simplification
2336 was possible. */
2338 static bool
2339 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2340 tree arg0, tree arg1,
2341 bool unlocked)
2343 gimple *stmt = gsi_stmt (*gsi);
2345 /* If we're using an unlocked function, assume the other unlocked
2346 functions exist explicitly. */
2347 tree const fn_fputc = (unlocked
2348 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2349 : builtin_decl_implicit (BUILT_IN_FPUTC));
2350 tree const fn_fwrite = (unlocked
2351 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2352 : builtin_decl_implicit (BUILT_IN_FWRITE));
2354 /* If the return value is used, don't do the transformation. */
2355 if (gimple_call_lhs (stmt))
2356 return false;
2358 /* Get the length of the string passed to fputs. If the length
2359 can't be determined, punt. */
2360 tree len = get_maxval_strlen (arg0, 0);
2361 if (!len
2362 || TREE_CODE (len) != INTEGER_CST)
2363 return false;
2365 switch (compare_tree_int (len, 1))
2367 case -1: /* length is 0, delete the call entirely . */
2368 replace_call_with_value (gsi, integer_zero_node);
2369 return true;
2371 case 0: /* length is 1, call fputc. */
2373 const char *p = c_getstr (arg0);
2374 if (p != NULL)
2376 if (!fn_fputc)
2377 return false;
2379 gimple *repl = gimple_build_call (fn_fputc, 2,
2380 build_int_cst
2381 (integer_type_node, p[0]), arg1);
2382 replace_call_with_call_and_fold (gsi, repl);
2383 return true;
2386 /* FALLTHROUGH */
2387 case 1: /* length is greater than 1, call fwrite. */
2389 /* If optimizing for size keep fputs. */
2390 if (optimize_function_for_size_p (cfun))
2391 return false;
2392 /* New argument list transforming fputs(string, stream) to
2393 fwrite(string, 1, len, stream). */
2394 if (!fn_fwrite)
2395 return false;
2397 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2398 size_one_node, len, arg1);
2399 replace_call_with_call_and_fold (gsi, repl);
2400 return true;
2402 default:
2403 gcc_unreachable ();
2405 return false;
2408 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2409 DEST, SRC, LEN, and SIZE are the arguments to the call.
2410 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2411 code of the builtin. If MAXLEN is not NULL, it is maximum length
2412 passed as third argument. */
2414 static bool
2415 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2416 tree dest, tree src, tree len, tree size,
2417 enum built_in_function fcode)
2419 gimple *stmt = gsi_stmt (*gsi);
2420 location_t loc = gimple_location (stmt);
2421 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2422 tree fn;
2424 /* If SRC and DEST are the same (and not volatile), return DEST
2425 (resp. DEST+LEN for __mempcpy_chk). */
2426 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2428 if (fcode != BUILT_IN_MEMPCPY_CHK)
2430 replace_call_with_value (gsi, dest);
2431 return true;
2433 else
2435 gimple_seq stmts = NULL;
2436 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2437 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2438 TREE_TYPE (dest), dest, len);
2439 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2440 replace_call_with_value (gsi, temp);
2441 return true;
2445 if (! tree_fits_uhwi_p (size))
2446 return false;
2448 tree maxlen = get_maxval_strlen (len, 2);
2449 if (! integer_all_onesp (size))
2451 if (! tree_fits_uhwi_p (len))
2453 /* If LEN is not constant, try MAXLEN too.
2454 For MAXLEN only allow optimizing into non-_ocs function
2455 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2456 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2458 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2460 /* (void) __mempcpy_chk () can be optimized into
2461 (void) __memcpy_chk (). */
2462 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2463 if (!fn)
2464 return false;
2466 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2467 replace_call_with_call_and_fold (gsi, repl);
2468 return true;
2470 return false;
2473 else
2474 maxlen = len;
2476 if (tree_int_cst_lt (size, maxlen))
2477 return false;
2480 fn = NULL_TREE;
2481 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2482 mem{cpy,pcpy,move,set} is available. */
2483 switch (fcode)
2485 case BUILT_IN_MEMCPY_CHK:
2486 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2487 break;
2488 case BUILT_IN_MEMPCPY_CHK:
2489 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2490 break;
2491 case BUILT_IN_MEMMOVE_CHK:
2492 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2493 break;
2494 case BUILT_IN_MEMSET_CHK:
2495 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2496 break;
2497 default:
2498 break;
2501 if (!fn)
2502 return false;
2504 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2505 replace_call_with_call_and_fold (gsi, repl);
2506 return true;
2509 /* Fold a call to the __st[rp]cpy_chk builtin.
2510 DEST, SRC, and SIZE are the arguments to the call.
2511 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2512 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2513 strings passed as second argument. */
2515 static bool
2516 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2517 tree dest,
2518 tree src, tree size,
2519 enum built_in_function fcode)
2521 gimple *stmt = gsi_stmt (*gsi);
2522 location_t loc = gimple_location (stmt);
2523 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2524 tree len, fn;
2526 /* If SRC and DEST are the same (and not volatile), return DEST. */
2527 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2529 replace_call_with_value (gsi, dest);
2530 return true;
2533 if (! tree_fits_uhwi_p (size))
2534 return false;
2536 tree maxlen = get_maxval_strlen (src, 1);
2537 if (! integer_all_onesp (size))
2539 len = c_strlen (src, 1);
2540 if (! len || ! tree_fits_uhwi_p (len))
2542 /* If LEN is not constant, try MAXLEN too.
2543 For MAXLEN only allow optimizing into non-_ocs function
2544 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2545 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2547 if (fcode == BUILT_IN_STPCPY_CHK)
2549 if (! ignore)
2550 return false;
2552 /* If return value of __stpcpy_chk is ignored,
2553 optimize into __strcpy_chk. */
2554 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2555 if (!fn)
2556 return false;
2558 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2559 replace_call_with_call_and_fold (gsi, repl);
2560 return true;
2563 if (! len || TREE_SIDE_EFFECTS (len))
2564 return false;
2566 /* If c_strlen returned something, but not a constant,
2567 transform __strcpy_chk into __memcpy_chk. */
2568 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2569 if (!fn)
2570 return false;
2572 gimple_seq stmts = NULL;
2573 len = gimple_convert (&stmts, loc, size_type_node, len);
2574 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2575 build_int_cst (size_type_node, 1));
2576 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2577 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2578 replace_call_with_call_and_fold (gsi, repl);
2579 return true;
2582 else
2583 maxlen = len;
2585 if (! tree_int_cst_lt (maxlen, size))
2586 return false;
2589 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2590 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2591 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2592 if (!fn)
2593 return false;
2595 gimple *repl = gimple_build_call (fn, 2, dest, src);
2596 replace_call_with_call_and_fold (gsi, repl);
2597 return true;
2600 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2601 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2602 length passed as third argument. IGNORE is true if return value can be
2603 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2605 static bool
2606 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2607 tree dest, tree src,
2608 tree len, tree size,
2609 enum built_in_function fcode)
2611 gimple *stmt = gsi_stmt (*gsi);
2612 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2613 tree fn;
2615 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2617 /* If return value of __stpncpy_chk is ignored,
2618 optimize into __strncpy_chk. */
2619 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2620 if (fn)
2622 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2623 replace_call_with_call_and_fold (gsi, repl);
2624 return true;
2628 if (! tree_fits_uhwi_p (size))
2629 return false;
2631 tree maxlen = get_maxval_strlen (len, 2);
2632 if (! integer_all_onesp (size))
2634 if (! tree_fits_uhwi_p (len))
2636 /* If LEN is not constant, try MAXLEN too.
2637 For MAXLEN only allow optimizing into non-_ocs function
2638 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2639 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2640 return false;
2642 else
2643 maxlen = len;
2645 if (tree_int_cst_lt (size, maxlen))
2646 return false;
2649 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2650 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2651 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2652 if (!fn)
2653 return false;
2655 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2656 replace_call_with_call_and_fold (gsi, repl);
2657 return true;
2660 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2661 Return NULL_TREE if no simplification can be made. */
2663 static bool
2664 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2666 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2667 location_t loc = gimple_location (stmt);
2668 tree dest = gimple_call_arg (stmt, 0);
2669 tree src = gimple_call_arg (stmt, 1);
2670 tree fn, len, lenp1;
2672 /* If the result is unused, replace stpcpy with strcpy. */
2673 if (gimple_call_lhs (stmt) == NULL_TREE)
2675 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2676 if (!fn)
2677 return false;
2678 gimple_call_set_fndecl (stmt, fn);
2679 fold_stmt (gsi);
2680 return true;
2683 len = c_strlen (src, 1);
2684 if (!len
2685 || TREE_CODE (len) != INTEGER_CST)
2686 return false;
2688 if (optimize_function_for_size_p (cfun)
2689 /* If length is zero it's small enough. */
2690 && !integer_zerop (len))
2691 return false;
2693 /* If the source has a known length replace stpcpy with memcpy. */
2694 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2695 if (!fn)
2696 return false;
2698 gimple_seq stmts = NULL;
2699 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2700 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2701 tem, build_int_cst (size_type_node, 1));
2702 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2703 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2704 gimple_set_vuse (repl, gimple_vuse (stmt));
2705 gimple_set_vdef (repl, gimple_vdef (stmt));
2706 if (gimple_vdef (repl)
2707 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2708 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2709 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2710 /* Replace the result with dest + len. */
2711 stmts = NULL;
2712 tem = gimple_convert (&stmts, loc, sizetype, len);
2713 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2714 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2715 POINTER_PLUS_EXPR, dest, tem);
2716 gsi_replace (gsi, ret, false);
2717 /* Finally fold the memcpy call. */
2718 gimple_stmt_iterator gsi2 = *gsi;
2719 gsi_prev (&gsi2);
2720 fold_stmt (&gsi2);
2721 return true;
2724 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2725 NULL_TREE if a normal call should be emitted rather than expanding
2726 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2727 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2728 passed as second argument. */
2730 static bool
2731 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2732 enum built_in_function fcode)
2734 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2735 tree dest, size, len, fn, fmt, flag;
2736 const char *fmt_str;
2738 /* Verify the required arguments in the original call. */
2739 if (gimple_call_num_args (stmt) < 5)
2740 return false;
2742 dest = gimple_call_arg (stmt, 0);
2743 len = gimple_call_arg (stmt, 1);
2744 flag = gimple_call_arg (stmt, 2);
2745 size = gimple_call_arg (stmt, 3);
2746 fmt = gimple_call_arg (stmt, 4);
2748 if (! tree_fits_uhwi_p (size))
2749 return false;
2751 if (! integer_all_onesp (size))
2753 tree maxlen = get_maxval_strlen (len, 2);
2754 if (! tree_fits_uhwi_p (len))
2756 /* If LEN is not constant, try MAXLEN too.
2757 For MAXLEN only allow optimizing into non-_ocs function
2758 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2759 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2760 return false;
2762 else
2763 maxlen = len;
2765 if (tree_int_cst_lt (size, maxlen))
2766 return false;
2769 if (!init_target_chars ())
2770 return false;
2772 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2773 or if format doesn't contain % chars or is "%s". */
2774 if (! integer_zerop (flag))
2776 fmt_str = c_getstr (fmt);
2777 if (fmt_str == NULL)
2778 return false;
2779 if (strchr (fmt_str, target_percent) != NULL
2780 && strcmp (fmt_str, target_percent_s))
2781 return false;
2784 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2785 available. */
2786 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2787 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2788 if (!fn)
2789 return false;
2791 /* Replace the called function and the first 5 argument by 3 retaining
2792 trailing varargs. */
2793 gimple_call_set_fndecl (stmt, fn);
2794 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2795 gimple_call_set_arg (stmt, 0, dest);
2796 gimple_call_set_arg (stmt, 1, len);
2797 gimple_call_set_arg (stmt, 2, fmt);
2798 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2799 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2800 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2801 fold_stmt (gsi);
2802 return true;
2805 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2806 Return NULL_TREE if a normal call should be emitted rather than
2807 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2808 or BUILT_IN_VSPRINTF_CHK. */
2810 static bool
2811 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2812 enum built_in_function fcode)
2814 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2815 tree dest, size, len, fn, fmt, flag;
2816 const char *fmt_str;
2817 unsigned nargs = gimple_call_num_args (stmt);
2819 /* Verify the required arguments in the original call. */
2820 if (nargs < 4)
2821 return false;
2822 dest = gimple_call_arg (stmt, 0);
2823 flag = gimple_call_arg (stmt, 1);
2824 size = gimple_call_arg (stmt, 2);
2825 fmt = gimple_call_arg (stmt, 3);
2827 if (! tree_fits_uhwi_p (size))
2828 return false;
2830 len = NULL_TREE;
2832 if (!init_target_chars ())
2833 return false;
2835 /* Check whether the format is a literal string constant. */
2836 fmt_str = c_getstr (fmt);
2837 if (fmt_str != NULL)
2839 /* If the format doesn't contain % args or %%, we know the size. */
2840 if (strchr (fmt_str, target_percent) == 0)
2842 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2843 len = build_int_cstu (size_type_node, strlen (fmt_str));
2845 /* If the format is "%s" and first ... argument is a string literal,
2846 we know the size too. */
2847 else if (fcode == BUILT_IN_SPRINTF_CHK
2848 && strcmp (fmt_str, target_percent_s) == 0)
2850 tree arg;
2852 if (nargs == 5)
2854 arg = gimple_call_arg (stmt, 4);
2855 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2857 len = c_strlen (arg, 1);
2858 if (! len || ! tree_fits_uhwi_p (len))
2859 len = NULL_TREE;
2865 if (! integer_all_onesp (size))
2867 if (! len || ! tree_int_cst_lt (len, size))
2868 return false;
2871 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2872 or if format doesn't contain % chars or is "%s". */
2873 if (! integer_zerop (flag))
2875 if (fmt_str == NULL)
2876 return false;
2877 if (strchr (fmt_str, target_percent) != NULL
2878 && strcmp (fmt_str, target_percent_s))
2879 return false;
2882 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2883 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2884 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2885 if (!fn)
2886 return false;
2888 /* Replace the called function and the first 4 argument by 2 retaining
2889 trailing varargs. */
2890 gimple_call_set_fndecl (stmt, fn);
2891 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2892 gimple_call_set_arg (stmt, 0, dest);
2893 gimple_call_set_arg (stmt, 1, fmt);
2894 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2895 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2896 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2897 fold_stmt (gsi);
2898 return true;
2901 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2902 ORIG may be null if this is a 2-argument call. We don't attempt to
2903 simplify calls with more than 3 arguments.
2905 Return true if simplification was possible, otherwise false. */
2907 bool
2908 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2910 gimple *stmt = gsi_stmt (*gsi);
2911 tree dest = gimple_call_arg (stmt, 0);
2912 tree fmt = gimple_call_arg (stmt, 1);
2913 tree orig = NULL_TREE;
2914 const char *fmt_str = NULL;
2916 /* Verify the required arguments in the original call. We deal with two
2917 types of sprintf() calls: 'sprintf (str, fmt)' and
2918 'sprintf (dest, "%s", orig)'. */
2919 if (gimple_call_num_args (stmt) > 3)
2920 return false;
2922 if (gimple_call_num_args (stmt) == 3)
2923 orig = gimple_call_arg (stmt, 2);
2925 /* Check whether the format is a literal string constant. */
2926 fmt_str = c_getstr (fmt);
2927 if (fmt_str == NULL)
2928 return false;
2930 if (!init_target_chars ())
2931 return false;
2933 /* If the format doesn't contain % args or %%, use strcpy. */
2934 if (strchr (fmt_str, target_percent) == NULL)
2936 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2938 if (!fn)
2939 return false;
2941 /* Don't optimize sprintf (buf, "abc", ptr++). */
2942 if (orig)
2943 return false;
2945 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2946 'format' is known to contain no % formats. */
2947 gimple_seq stmts = NULL;
2948 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2949 gimple_seq_add_stmt_without_update (&stmts, repl);
2950 if (gimple_call_lhs (stmt))
2952 repl = gimple_build_assign (gimple_call_lhs (stmt),
2953 build_int_cst (integer_type_node,
2954 strlen (fmt_str)));
2955 gimple_seq_add_stmt_without_update (&stmts, repl);
2956 gsi_replace_with_seq_vops (gsi, stmts);
2957 /* gsi now points at the assignment to the lhs, get a
2958 stmt iterator to the memcpy call.
2959 ??? We can't use gsi_for_stmt as that doesn't work when the
2960 CFG isn't built yet. */
2961 gimple_stmt_iterator gsi2 = *gsi;
2962 gsi_prev (&gsi2);
2963 fold_stmt (&gsi2);
2965 else
2967 gsi_replace_with_seq_vops (gsi, stmts);
2968 fold_stmt (gsi);
2970 return true;
2973 /* If the format is "%s", use strcpy if the result isn't used. */
2974 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2976 tree fn;
2977 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2979 if (!fn)
2980 return false;
2982 /* Don't crash on sprintf (str1, "%s"). */
2983 if (!orig)
2984 return false;
2986 tree orig_len = NULL_TREE;
2987 if (gimple_call_lhs (stmt))
2989 orig_len = get_maxval_strlen (orig, 0);
2990 if (!orig_len)
2991 return false;
2994 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2995 gimple_seq stmts = NULL;
2996 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2997 gimple_seq_add_stmt_without_update (&stmts, repl);
2998 if (gimple_call_lhs (stmt))
3000 if (!useless_type_conversion_p (integer_type_node,
3001 TREE_TYPE (orig_len)))
3002 orig_len = fold_convert (integer_type_node, orig_len);
3003 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3004 gimple_seq_add_stmt_without_update (&stmts, repl);
3005 gsi_replace_with_seq_vops (gsi, stmts);
3006 /* gsi now points at the assignment to the lhs, get a
3007 stmt iterator to the memcpy call.
3008 ??? We can't use gsi_for_stmt as that doesn't work when the
3009 CFG isn't built yet. */
3010 gimple_stmt_iterator gsi2 = *gsi;
3011 gsi_prev (&gsi2);
3012 fold_stmt (&gsi2);
3014 else
3016 gsi_replace_with_seq_vops (gsi, stmts);
3017 fold_stmt (gsi);
3019 return true;
3021 return false;
3024 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3025 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3026 attempt to simplify calls with more than 4 arguments.
3028 Return true if simplification was possible, otherwise false. */
3030 bool
3031 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3033 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3034 tree dest = gimple_call_arg (stmt, 0);
3035 tree destsize = gimple_call_arg (stmt, 1);
3036 tree fmt = gimple_call_arg (stmt, 2);
3037 tree orig = NULL_TREE;
3038 const char *fmt_str = NULL;
3040 if (gimple_call_num_args (stmt) > 4)
3041 return false;
3043 if (gimple_call_num_args (stmt) == 4)
3044 orig = gimple_call_arg (stmt, 3);
3046 if (!tree_fits_uhwi_p (destsize))
3047 return false;
3048 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3050 /* Check whether the format is a literal string constant. */
3051 fmt_str = c_getstr (fmt);
3052 if (fmt_str == NULL)
3053 return false;
3055 if (!init_target_chars ())
3056 return false;
3058 /* If the format doesn't contain % args or %%, use strcpy. */
3059 if (strchr (fmt_str, target_percent) == NULL)
3061 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3062 if (!fn)
3063 return false;
3065 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3066 if (orig)
3067 return false;
3069 /* We could expand this as
3070 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3071 or to
3072 memcpy (str, fmt_with_nul_at_cstm1, cst);
3073 but in the former case that might increase code size
3074 and in the latter case grow .rodata section too much.
3075 So punt for now. */
3076 size_t len = strlen (fmt_str);
3077 if (len >= destlen)
3078 return false;
3080 gimple_seq stmts = NULL;
3081 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3082 gimple_seq_add_stmt_without_update (&stmts, repl);
3083 if (gimple_call_lhs (stmt))
3085 repl = gimple_build_assign (gimple_call_lhs (stmt),
3086 build_int_cst (integer_type_node, len));
3087 gimple_seq_add_stmt_without_update (&stmts, repl);
3088 gsi_replace_with_seq_vops (gsi, stmts);
3089 /* gsi now points at the assignment to the lhs, get a
3090 stmt iterator to the memcpy call.
3091 ??? We can't use gsi_for_stmt as that doesn't work when the
3092 CFG isn't built yet. */
3093 gimple_stmt_iterator gsi2 = *gsi;
3094 gsi_prev (&gsi2);
3095 fold_stmt (&gsi2);
3097 else
3099 gsi_replace_with_seq_vops (gsi, stmts);
3100 fold_stmt (gsi);
3102 return true;
3105 /* If the format is "%s", use strcpy if the result isn't used. */
3106 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3108 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3109 if (!fn)
3110 return false;
3112 /* Don't crash on snprintf (str1, cst, "%s"). */
3113 if (!orig)
3114 return false;
3116 tree orig_len = get_maxval_strlen (orig, 0);
3117 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3118 return false;
3120 /* We could expand this as
3121 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3122 or to
3123 memcpy (str1, str2_with_nul_at_cstm1, cst);
3124 but in the former case that might increase code size
3125 and in the latter case grow .rodata section too much.
3126 So punt for now. */
3127 if (compare_tree_int (orig_len, destlen) >= 0)
3128 return false;
3130 /* Convert snprintf (str1, cst, "%s", str2) into
3131 strcpy (str1, str2) if strlen (str2) < cst. */
3132 gimple_seq stmts = NULL;
3133 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3134 gimple_seq_add_stmt_without_update (&stmts, repl);
3135 if (gimple_call_lhs (stmt))
3137 if (!useless_type_conversion_p (integer_type_node,
3138 TREE_TYPE (orig_len)))
3139 orig_len = fold_convert (integer_type_node, orig_len);
3140 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3141 gimple_seq_add_stmt_without_update (&stmts, repl);
3142 gsi_replace_with_seq_vops (gsi, stmts);
3143 /* gsi now points at the assignment to the lhs, get a
3144 stmt iterator to the memcpy call.
3145 ??? We can't use gsi_for_stmt as that doesn't work when the
3146 CFG isn't built yet. */
3147 gimple_stmt_iterator gsi2 = *gsi;
3148 gsi_prev (&gsi2);
3149 fold_stmt (&gsi2);
3151 else
3153 gsi_replace_with_seq_vops (gsi, stmts);
3154 fold_stmt (gsi);
3156 return true;
3158 return false;
3161 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3162 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3163 more than 3 arguments, and ARG may be null in the 2-argument case.
3165 Return NULL_TREE if no simplification was possible, otherwise return the
3166 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3167 code of the function to be simplified. */
3169 static bool
3170 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3171 tree fp, tree fmt, tree arg,
3172 enum built_in_function fcode)
3174 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3175 tree fn_fputc, fn_fputs;
3176 const char *fmt_str = NULL;
3178 /* If the return value is used, don't do the transformation. */
3179 if (gimple_call_lhs (stmt) != NULL_TREE)
3180 return false;
3182 /* Check whether the format is a literal string constant. */
3183 fmt_str = c_getstr (fmt);
3184 if (fmt_str == NULL)
3185 return false;
3187 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3189 /* If we're using an unlocked function, assume the other
3190 unlocked functions exist explicitly. */
3191 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3192 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3194 else
3196 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3197 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3200 if (!init_target_chars ())
3201 return false;
3203 /* If the format doesn't contain % args or %%, use strcpy. */
3204 if (strchr (fmt_str, target_percent) == NULL)
3206 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3207 && arg)
3208 return false;
3210 /* If the format specifier was "", fprintf does nothing. */
3211 if (fmt_str[0] == '\0')
3213 replace_call_with_value (gsi, NULL_TREE);
3214 return true;
3217 /* When "string" doesn't contain %, replace all cases of
3218 fprintf (fp, string) with fputs (string, fp). The fputs
3219 builtin will take care of special cases like length == 1. */
3220 if (fn_fputs)
3222 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3223 replace_call_with_call_and_fold (gsi, repl);
3224 return true;
3228 /* The other optimizations can be done only on the non-va_list variants. */
3229 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3230 return false;
3232 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3233 else if (strcmp (fmt_str, target_percent_s) == 0)
3235 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3236 return false;
3237 if (fn_fputs)
3239 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3240 replace_call_with_call_and_fold (gsi, repl);
3241 return true;
3245 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3246 else if (strcmp (fmt_str, target_percent_c) == 0)
3248 if (!arg
3249 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3250 return false;
3251 if (fn_fputc)
3253 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3254 replace_call_with_call_and_fold (gsi, repl);
3255 return true;
3259 return false;
3262 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3263 FMT and ARG are the arguments to the call; we don't fold cases with
3264 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3266 Return NULL_TREE if no simplification was possible, otherwise return the
3267 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3268 code of the function to be simplified. */
3270 static bool
3271 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3272 tree arg, enum built_in_function fcode)
3274 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3275 tree fn_putchar, fn_puts, newarg;
3276 const char *fmt_str = NULL;
3278 /* If the return value is used, don't do the transformation. */
3279 if (gimple_call_lhs (stmt) != NULL_TREE)
3280 return false;
3282 /* Check whether the format is a literal string constant. */
3283 fmt_str = c_getstr (fmt);
3284 if (fmt_str == NULL)
3285 return false;
3287 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3289 /* If we're using an unlocked function, assume the other
3290 unlocked functions exist explicitly. */
3291 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3292 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3294 else
3296 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3297 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3300 if (!init_target_chars ())
3301 return false;
3303 if (strcmp (fmt_str, target_percent_s) == 0
3304 || strchr (fmt_str, target_percent) == NULL)
3306 const char *str;
3308 if (strcmp (fmt_str, target_percent_s) == 0)
3310 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3311 return false;
3313 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3314 return false;
3316 str = c_getstr (arg);
3317 if (str == NULL)
3318 return false;
3320 else
3322 /* The format specifier doesn't contain any '%' characters. */
3323 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3324 && arg)
3325 return false;
3326 str = fmt_str;
3329 /* If the string was "", printf does nothing. */
3330 if (str[0] == '\0')
3332 replace_call_with_value (gsi, NULL_TREE);
3333 return true;
3336 /* If the string has length of 1, call putchar. */
3337 if (str[1] == '\0')
3339 /* Given printf("c"), (where c is any one character,)
3340 convert "c"[0] to an int and pass that to the replacement
3341 function. */
3342 newarg = build_int_cst (integer_type_node, str[0]);
3343 if (fn_putchar)
3345 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3346 replace_call_with_call_and_fold (gsi, repl);
3347 return true;
3350 else
3352 /* If the string was "string\n", call puts("string"). */
3353 size_t len = strlen (str);
3354 if ((unsigned char)str[len - 1] == target_newline
3355 && (size_t) (int) len == len
3356 && (int) len > 0)
3358 char *newstr;
3359 tree offset_node, string_cst;
3361 /* Create a NUL-terminated string that's one char shorter
3362 than the original, stripping off the trailing '\n'. */
3363 newarg = build_string_literal (len, str);
3364 string_cst = string_constant (newarg, &offset_node);
3365 gcc_checking_assert (string_cst
3366 && (TREE_STRING_LENGTH (string_cst)
3367 == (int) len)
3368 && integer_zerop (offset_node)
3369 && (unsigned char)
3370 TREE_STRING_POINTER (string_cst)[len - 1]
3371 == target_newline);
3372 /* build_string_literal creates a new STRING_CST,
3373 modify it in place to avoid double copying. */
3374 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3375 newstr[len - 1] = '\0';
3376 if (fn_puts)
3378 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3379 replace_call_with_call_and_fold (gsi, repl);
3380 return true;
3383 else
3384 /* We'd like to arrange to call fputs(string,stdout) here,
3385 but we need stdout and don't have a way to get it yet. */
3386 return false;
3390 /* The other optimizations can be done only on the non-va_list variants. */
3391 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3392 return false;
3394 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3395 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3397 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3398 return false;
3399 if (fn_puts)
3401 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3402 replace_call_with_call_and_fold (gsi, repl);
3403 return true;
3407 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3408 else if (strcmp (fmt_str, target_percent_c) == 0)
3410 if (!arg || ! useless_type_conversion_p (integer_type_node,
3411 TREE_TYPE (arg)))
3412 return false;
3413 if (fn_putchar)
3415 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3416 replace_call_with_call_and_fold (gsi, repl);
3417 return true;
3421 return false;
3426 /* Fold a call to __builtin_strlen with known length LEN. */
3428 static bool
3429 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3431 gimple *stmt = gsi_stmt (*gsi);
3432 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
3433 if (!len)
3434 return false;
3435 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
3436 replace_call_with_value (gsi, len);
3437 return true;
3440 /* Fold a call to __builtin_acc_on_device. */
3442 static bool
3443 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3445 /* Defer folding until we know which compiler we're in. */
3446 if (symtab->state != EXPANSION)
3447 return false;
3449 unsigned val_host = GOMP_DEVICE_HOST;
3450 unsigned val_dev = GOMP_DEVICE_NONE;
3452 #ifdef ACCEL_COMPILER
3453 val_host = GOMP_DEVICE_NOT_HOST;
3454 val_dev = ACCEL_COMPILER_acc_device;
3455 #endif
3457 location_t loc = gimple_location (gsi_stmt (*gsi));
3459 tree host_eq = make_ssa_name (boolean_type_node);
3460 gimple *host_ass = gimple_build_assign
3461 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3462 gimple_set_location (host_ass, loc);
3463 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3465 tree dev_eq = make_ssa_name (boolean_type_node);
3466 gimple *dev_ass = gimple_build_assign
3467 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3468 gimple_set_location (dev_ass, loc);
3469 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3471 tree result = make_ssa_name (boolean_type_node);
3472 gimple *result_ass = gimple_build_assign
3473 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3474 gimple_set_location (result_ass, loc);
3475 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3477 replace_call_with_value (gsi, result);
3479 return true;
3482 /* Fold realloc (0, n) -> malloc (n). */
3484 static bool
3485 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3487 gimple *stmt = gsi_stmt (*gsi);
3488 tree arg = gimple_call_arg (stmt, 0);
3489 tree size = gimple_call_arg (stmt, 1);
3491 if (operand_equal_p (arg, null_pointer_node, 0))
3493 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3494 if (fn_malloc)
3496 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3497 replace_call_with_call_and_fold (gsi, repl);
3498 return true;
3501 return false;
3504 /* Fold the non-target builtin at *GSI and return whether any simplification
3505 was made. */
3507 static bool
3508 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3510 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3511 tree callee = gimple_call_fndecl (stmt);
3513 /* Give up for always_inline inline builtins until they are
3514 inlined. */
3515 if (avoid_folding_inline_builtin (callee))
3516 return false;
3518 unsigned n = gimple_call_num_args (stmt);
3519 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3520 switch (fcode)
3522 case BUILT_IN_BCMP:
3523 return gimple_fold_builtin_bcmp (gsi);
3524 case BUILT_IN_BCOPY:
3525 return gimple_fold_builtin_bcopy (gsi);
3526 case BUILT_IN_BZERO:
3527 return gimple_fold_builtin_bzero (gsi);
3529 case BUILT_IN_MEMSET:
3530 return gimple_fold_builtin_memset (gsi,
3531 gimple_call_arg (stmt, 1),
3532 gimple_call_arg (stmt, 2));
3533 case BUILT_IN_MEMCPY:
3534 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3535 gimple_call_arg (stmt, 1), 0);
3536 case BUILT_IN_MEMPCPY:
3537 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3538 gimple_call_arg (stmt, 1), 1);
3539 case BUILT_IN_MEMMOVE:
3540 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3541 gimple_call_arg (stmt, 1), 3);
3542 case BUILT_IN_SPRINTF_CHK:
3543 case BUILT_IN_VSPRINTF_CHK:
3544 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3545 case BUILT_IN_STRCAT_CHK:
3546 return gimple_fold_builtin_strcat_chk (gsi);
3547 case BUILT_IN_STRNCAT_CHK:
3548 return gimple_fold_builtin_strncat_chk (gsi);
3549 case BUILT_IN_STRLEN:
3550 return gimple_fold_builtin_strlen (gsi);
3551 case BUILT_IN_STRCPY:
3552 return gimple_fold_builtin_strcpy (gsi,
3553 gimple_call_arg (stmt, 0),
3554 gimple_call_arg (stmt, 1));
3555 case BUILT_IN_STRNCPY:
3556 return gimple_fold_builtin_strncpy (gsi,
3557 gimple_call_arg (stmt, 0),
3558 gimple_call_arg (stmt, 1),
3559 gimple_call_arg (stmt, 2));
3560 case BUILT_IN_STRCAT:
3561 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3562 gimple_call_arg (stmt, 1));
3563 case BUILT_IN_STRNCAT:
3564 return gimple_fold_builtin_strncat (gsi);
3565 case BUILT_IN_INDEX:
3566 case BUILT_IN_STRCHR:
3567 return gimple_fold_builtin_strchr (gsi, false);
3568 case BUILT_IN_RINDEX:
3569 case BUILT_IN_STRRCHR:
3570 return gimple_fold_builtin_strchr (gsi, true);
3571 case BUILT_IN_STRSTR:
3572 return gimple_fold_builtin_strstr (gsi);
3573 case BUILT_IN_STRCMP:
3574 case BUILT_IN_STRCASECMP:
3575 case BUILT_IN_STRNCMP:
3576 case BUILT_IN_STRNCASECMP:
3577 return gimple_fold_builtin_string_compare (gsi);
3578 case BUILT_IN_MEMCHR:
3579 return gimple_fold_builtin_memchr (gsi);
3580 case BUILT_IN_FPUTS:
3581 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3582 gimple_call_arg (stmt, 1), false);
3583 case BUILT_IN_FPUTS_UNLOCKED:
3584 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3585 gimple_call_arg (stmt, 1), true);
3586 case BUILT_IN_MEMCPY_CHK:
3587 case BUILT_IN_MEMPCPY_CHK:
3588 case BUILT_IN_MEMMOVE_CHK:
3589 case BUILT_IN_MEMSET_CHK:
3590 return gimple_fold_builtin_memory_chk (gsi,
3591 gimple_call_arg (stmt, 0),
3592 gimple_call_arg (stmt, 1),
3593 gimple_call_arg (stmt, 2),
3594 gimple_call_arg (stmt, 3),
3595 fcode);
3596 case BUILT_IN_STPCPY:
3597 return gimple_fold_builtin_stpcpy (gsi);
3598 case BUILT_IN_STRCPY_CHK:
3599 case BUILT_IN_STPCPY_CHK:
3600 return gimple_fold_builtin_stxcpy_chk (gsi,
3601 gimple_call_arg (stmt, 0),
3602 gimple_call_arg (stmt, 1),
3603 gimple_call_arg (stmt, 2),
3604 fcode);
3605 case BUILT_IN_STRNCPY_CHK:
3606 case BUILT_IN_STPNCPY_CHK:
3607 return gimple_fold_builtin_stxncpy_chk (gsi,
3608 gimple_call_arg (stmt, 0),
3609 gimple_call_arg (stmt, 1),
3610 gimple_call_arg (stmt, 2),
3611 gimple_call_arg (stmt, 3),
3612 fcode);
3613 case BUILT_IN_SNPRINTF_CHK:
3614 case BUILT_IN_VSNPRINTF_CHK:
3615 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3617 case BUILT_IN_FPRINTF:
3618 case BUILT_IN_FPRINTF_UNLOCKED:
3619 case BUILT_IN_VFPRINTF:
3620 if (n == 2 || n == 3)
3621 return gimple_fold_builtin_fprintf (gsi,
3622 gimple_call_arg (stmt, 0),
3623 gimple_call_arg (stmt, 1),
3624 n == 3
3625 ? gimple_call_arg (stmt, 2)
3626 : NULL_TREE,
3627 fcode);
3628 break;
3629 case BUILT_IN_FPRINTF_CHK:
3630 case BUILT_IN_VFPRINTF_CHK:
3631 if (n == 3 || n == 4)
3632 return gimple_fold_builtin_fprintf (gsi,
3633 gimple_call_arg (stmt, 0),
3634 gimple_call_arg (stmt, 2),
3635 n == 4
3636 ? gimple_call_arg (stmt, 3)
3637 : NULL_TREE,
3638 fcode);
3639 break;
3640 case BUILT_IN_PRINTF:
3641 case BUILT_IN_PRINTF_UNLOCKED:
3642 case BUILT_IN_VPRINTF:
3643 if (n == 1 || n == 2)
3644 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3645 n == 2
3646 ? gimple_call_arg (stmt, 1)
3647 : NULL_TREE, fcode);
3648 break;
3649 case BUILT_IN_PRINTF_CHK:
3650 case BUILT_IN_VPRINTF_CHK:
3651 if (n == 2 || n == 3)
3652 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3653 n == 3
3654 ? gimple_call_arg (stmt, 2)
3655 : NULL_TREE, fcode);
3656 break;
3657 case BUILT_IN_ACC_ON_DEVICE:
3658 return gimple_fold_builtin_acc_on_device (gsi,
3659 gimple_call_arg (stmt, 0));
3660 case BUILT_IN_REALLOC:
3661 return gimple_fold_builtin_realloc (gsi);
3663 default:;
3666 /* Try the generic builtin folder. */
3667 bool ignore = (gimple_call_lhs (stmt) == NULL);
3668 tree result = fold_call_stmt (stmt, ignore);
3669 if (result)
3671 if (ignore)
3672 STRIP_NOPS (result);
3673 else
3674 result = fold_convert (gimple_call_return_type (stmt), result);
3675 if (!update_call_from_tree (gsi, result))
3676 gimplify_and_update_call_from_tree (gsi, result);
3677 return true;
3680 return false;
3683 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3684 function calls to constants, where possible. */
3686 static tree
3687 fold_internal_goacc_dim (const gimple *call)
3689 int axis = oacc_get_ifn_dim_arg (call);
3690 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3691 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
3692 tree result = NULL_TREE;
3694 /* If the size is 1, or we only want the size and it is not dynamic,
3695 we know the answer. */
3696 if (size == 1 || (!is_pos && size))
3698 tree type = TREE_TYPE (gimple_call_lhs (call));
3699 result = build_int_cst (type, size - is_pos);
3702 return result;
3705 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3706 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3707 &var where var is only addressable because of such calls. */
3709 bool
3710 optimize_atomic_compare_exchange_p (gimple *stmt)
3712 if (gimple_call_num_args (stmt) != 6
3713 || !flag_inline_atomics
3714 || !optimize
3715 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3716 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3717 || !gimple_vdef (stmt)
3718 || !gimple_vuse (stmt))
3719 return false;
3721 tree fndecl = gimple_call_fndecl (stmt);
3722 switch (DECL_FUNCTION_CODE (fndecl))
3724 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3725 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3726 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3727 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3728 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3729 break;
3730 default:
3731 return false;
3734 tree expected = gimple_call_arg (stmt, 1);
3735 if (TREE_CODE (expected) != ADDR_EXPR
3736 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3737 return false;
3739 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3740 if (!is_gimple_reg_type (etype)
3741 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3742 || TREE_THIS_VOLATILE (etype)
3743 || VECTOR_TYPE_P (etype)
3744 || TREE_CODE (etype) == COMPLEX_TYPE
3745 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3746 might not preserve all the bits. See PR71716. */
3747 || SCALAR_FLOAT_TYPE_P (etype)
3748 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype)))
3749 return false;
3751 tree weak = gimple_call_arg (stmt, 3);
3752 if (!integer_zerop (weak) && !integer_onep (weak))
3753 return false;
3755 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3756 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3757 machine_mode mode = TYPE_MODE (itype);
3759 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3760 == CODE_FOR_nothing
3761 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3762 return false;
3764 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode))
3765 return false;
3767 return true;
3770 /* Fold
3771 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3772 into
3773 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3774 i = IMAGPART_EXPR <t>;
3775 r = (_Bool) i;
3776 e = REALPART_EXPR <t>; */
3778 void
3779 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3781 gimple *stmt = gsi_stmt (*gsi);
3782 tree fndecl = gimple_call_fndecl (stmt);
3783 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3784 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3785 tree ctype = build_complex_type (itype);
3786 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3787 bool throws = false;
3788 edge e = NULL;
3789 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3790 expected);
3791 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3792 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3793 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3795 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3796 build1 (VIEW_CONVERT_EXPR, itype,
3797 gimple_assign_lhs (g)));
3798 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3800 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3801 + int_size_in_bytes (itype);
3802 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3803 gimple_call_arg (stmt, 0),
3804 gimple_assign_lhs (g),
3805 gimple_call_arg (stmt, 2),
3806 build_int_cst (integer_type_node, flag),
3807 gimple_call_arg (stmt, 4),
3808 gimple_call_arg (stmt, 5));
3809 tree lhs = make_ssa_name (ctype);
3810 gimple_call_set_lhs (g, lhs);
3811 gimple_set_vdef (g, gimple_vdef (stmt));
3812 gimple_set_vuse (g, gimple_vuse (stmt));
3813 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3814 tree oldlhs = gimple_call_lhs (stmt);
3815 if (stmt_can_throw_internal (stmt))
3817 throws = true;
3818 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3820 gimple_call_set_nothrow (as_a <gcall *> (g),
3821 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3822 gimple_call_set_lhs (stmt, NULL_TREE);
3823 gsi_replace (gsi, g, true);
3824 if (oldlhs)
3826 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3827 build1 (IMAGPART_EXPR, itype, lhs));
3828 if (throws)
3830 gsi_insert_on_edge_immediate (e, g);
3831 *gsi = gsi_for_stmt (g);
3833 else
3834 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3835 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3836 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3838 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3839 build1 (REALPART_EXPR, itype, lhs));
3840 if (throws && oldlhs == NULL_TREE)
3842 gsi_insert_on_edge_immediate (e, g);
3843 *gsi = gsi_for_stmt (g);
3845 else
3846 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3847 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3849 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3850 VIEW_CONVERT_EXPR,
3851 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3852 gimple_assign_lhs (g)));
3853 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3855 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3856 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3857 *gsi = gsiret;
3860 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3861 doesn't fit into TYPE. The test for overflow should be regardless of
3862 -fwrapv, and even for unsigned types. */
3864 bool
3865 arith_overflowed_p (enum tree_code code, const_tree type,
3866 const_tree arg0, const_tree arg1)
3868 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3869 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3870 widest2_int_cst;
3871 widest2_int warg0 = widest2_int_cst (arg0);
3872 widest2_int warg1 = widest2_int_cst (arg1);
3873 widest2_int wres;
3874 switch (code)
3876 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3877 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3878 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3879 default: gcc_unreachable ();
3881 signop sign = TYPE_SIGN (type);
3882 if (sign == UNSIGNED && wi::neg_p (wres))
3883 return true;
3884 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3887 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3888 The statement may be replaced by another statement, e.g., if the call
3889 simplifies to a constant value. Return true if any changes were made.
3890 It is assumed that the operands have been previously folded. */
3892 static bool
3893 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3895 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3896 tree callee;
3897 bool changed = false;
3898 unsigned i;
3900 /* Fold *& in call arguments. */
3901 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3902 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3904 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3905 if (tmp)
3907 gimple_call_set_arg (stmt, i, tmp);
3908 changed = true;
3912 /* Check for virtual calls that became direct calls. */
3913 callee = gimple_call_fn (stmt);
3914 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3916 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3918 if (dump_file && virtual_method_call_p (callee)
3919 && !possible_polymorphic_call_target_p
3920 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3921 (OBJ_TYPE_REF_EXPR (callee)))))
3923 fprintf (dump_file,
3924 "Type inheritance inconsistent devirtualization of ");
3925 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3926 fprintf (dump_file, " to ");
3927 print_generic_expr (dump_file, callee, TDF_SLIM);
3928 fprintf (dump_file, "\n");
3931 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3932 changed = true;
3934 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3936 bool final;
3937 vec <cgraph_node *>targets
3938 = possible_polymorphic_call_targets (callee, stmt, &final);
3939 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3941 tree lhs = gimple_call_lhs (stmt);
3942 if (dump_enabled_p ())
3944 location_t loc = gimple_location_safe (stmt);
3945 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3946 "folding virtual function call to %s\n",
3947 targets.length () == 1
3948 ? targets[0]->name ()
3949 : "__builtin_unreachable");
3951 if (targets.length () == 1)
3953 tree fndecl = targets[0]->decl;
3954 gimple_call_set_fndecl (stmt, fndecl);
3955 changed = true;
3956 /* If changing the call to __cxa_pure_virtual
3957 or similar noreturn function, adjust gimple_call_fntype
3958 too. */
3959 if (gimple_call_noreturn_p (stmt)
3960 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3961 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3962 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3963 == void_type_node))
3964 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
3965 /* If the call becomes noreturn, remove the lhs. */
3966 if (lhs
3967 && gimple_call_noreturn_p (stmt)
3968 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
3969 || should_remove_lhs_p (lhs)))
3971 if (TREE_CODE (lhs) == SSA_NAME)
3973 tree var = create_tmp_var (TREE_TYPE (lhs));
3974 tree def = get_or_create_ssa_default_def (cfun, var);
3975 gimple *new_stmt = gimple_build_assign (lhs, def);
3976 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3978 gimple_call_set_lhs (stmt, NULL_TREE);
3980 maybe_remove_unused_call_args (cfun, stmt);
3982 else
3984 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3985 gimple *new_stmt = gimple_build_call (fndecl, 0);
3986 gimple_set_location (new_stmt, gimple_location (stmt));
3987 /* If the call had a SSA name as lhs morph that into
3988 an uninitialized value. */
3989 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3991 tree var = create_tmp_var (TREE_TYPE (lhs));
3992 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
3993 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
3994 set_ssa_default_def (cfun, var, lhs);
3996 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3997 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3998 gsi_replace (gsi, new_stmt, false);
3999 return true;
4005 /* Check for indirect calls that became direct calls, and then
4006 no longer require a static chain. */
4007 if (gimple_call_chain (stmt))
4009 tree fn = gimple_call_fndecl (stmt);
4010 if (fn && !DECL_STATIC_CHAIN (fn))
4012 gimple_call_set_chain (stmt, NULL);
4013 changed = true;
4015 else
4017 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4018 if (tmp)
4020 gimple_call_set_chain (stmt, tmp);
4021 changed = true;
4026 if (inplace)
4027 return changed;
4029 /* Check for builtins that CCP can handle using information not
4030 available in the generic fold routines. */
4031 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4033 if (gimple_fold_builtin (gsi))
4034 changed = true;
4036 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4038 changed |= targetm.gimple_fold_builtin (gsi);
4040 else if (gimple_call_internal_p (stmt))
4042 enum tree_code subcode = ERROR_MARK;
4043 tree result = NULL_TREE;
4044 bool cplx_result = false;
4045 tree overflow = NULL_TREE;
4046 switch (gimple_call_internal_fn (stmt))
4048 case IFN_BUILTIN_EXPECT:
4049 result = fold_builtin_expect (gimple_location (stmt),
4050 gimple_call_arg (stmt, 0),
4051 gimple_call_arg (stmt, 1),
4052 gimple_call_arg (stmt, 2));
4053 break;
4054 case IFN_UBSAN_OBJECT_SIZE:
4056 tree offset = gimple_call_arg (stmt, 1);
4057 tree objsize = gimple_call_arg (stmt, 2);
4058 if (integer_all_onesp (objsize)
4059 || (TREE_CODE (offset) == INTEGER_CST
4060 && TREE_CODE (objsize) == INTEGER_CST
4061 && tree_int_cst_le (offset, objsize)))
4063 replace_call_with_value (gsi, NULL_TREE);
4064 return true;
4067 break;
4068 case IFN_UBSAN_PTR:
4069 if (integer_zerop (gimple_call_arg (stmt, 1)))
4071 replace_call_with_value (gsi, NULL_TREE);
4072 return true;
4074 break;
4075 case IFN_UBSAN_BOUNDS:
4077 tree index = gimple_call_arg (stmt, 1);
4078 tree bound = gimple_call_arg (stmt, 2);
4079 if (TREE_CODE (index) == INTEGER_CST
4080 && TREE_CODE (bound) == INTEGER_CST)
4082 index = fold_convert (TREE_TYPE (bound), index);
4083 if (TREE_CODE (index) == INTEGER_CST
4084 && tree_int_cst_le (index, bound))
4086 replace_call_with_value (gsi, NULL_TREE);
4087 return true;
4091 break;
4092 case IFN_GOACC_DIM_SIZE:
4093 case IFN_GOACC_DIM_POS:
4094 result = fold_internal_goacc_dim (stmt);
4095 break;
4096 case IFN_UBSAN_CHECK_ADD:
4097 subcode = PLUS_EXPR;
4098 break;
4099 case IFN_UBSAN_CHECK_SUB:
4100 subcode = MINUS_EXPR;
4101 break;
4102 case IFN_UBSAN_CHECK_MUL:
4103 subcode = MULT_EXPR;
4104 break;
4105 case IFN_ADD_OVERFLOW:
4106 subcode = PLUS_EXPR;
4107 cplx_result = true;
4108 break;
4109 case IFN_SUB_OVERFLOW:
4110 subcode = MINUS_EXPR;
4111 cplx_result = true;
4112 break;
4113 case IFN_MUL_OVERFLOW:
4114 subcode = MULT_EXPR;
4115 cplx_result = true;
4116 break;
4117 default:
4118 break;
4120 if (subcode != ERROR_MARK)
4122 tree arg0 = gimple_call_arg (stmt, 0);
4123 tree arg1 = gimple_call_arg (stmt, 1);
4124 tree type = TREE_TYPE (arg0);
4125 if (cplx_result)
4127 tree lhs = gimple_call_lhs (stmt);
4128 if (lhs == NULL_TREE)
4129 type = NULL_TREE;
4130 else
4131 type = TREE_TYPE (TREE_TYPE (lhs));
4133 if (type == NULL_TREE)
4135 /* x = y + 0; x = y - 0; x = y * 0; */
4136 else if (integer_zerop (arg1))
4137 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4138 /* x = 0 + y; x = 0 * y; */
4139 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4140 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4141 /* x = y - y; */
4142 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4143 result = integer_zero_node;
4144 /* x = y * 1; x = 1 * y; */
4145 else if (subcode == MULT_EXPR && integer_onep (arg1))
4146 result = arg0;
4147 else if (subcode == MULT_EXPR && integer_onep (arg0))
4148 result = arg1;
4149 else if (TREE_CODE (arg0) == INTEGER_CST
4150 && TREE_CODE (arg1) == INTEGER_CST)
4152 if (cplx_result)
4153 result = int_const_binop (subcode, fold_convert (type, arg0),
4154 fold_convert (type, arg1));
4155 else
4156 result = int_const_binop (subcode, arg0, arg1);
4157 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4159 if (cplx_result)
4160 overflow = build_one_cst (type);
4161 else
4162 result = NULL_TREE;
4165 if (result)
4167 if (result == integer_zero_node)
4168 result = build_zero_cst (type);
4169 else if (cplx_result && TREE_TYPE (result) != type)
4171 if (TREE_CODE (result) == INTEGER_CST)
4173 if (arith_overflowed_p (PLUS_EXPR, type, result,
4174 integer_zero_node))
4175 overflow = build_one_cst (type);
4177 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4178 && TYPE_UNSIGNED (type))
4179 || (TYPE_PRECISION (type)
4180 < (TYPE_PRECISION (TREE_TYPE (result))
4181 + (TYPE_UNSIGNED (TREE_TYPE (result))
4182 && !TYPE_UNSIGNED (type)))))
4183 result = NULL_TREE;
4184 if (result)
4185 result = fold_convert (type, result);
4190 if (result)
4192 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4193 result = drop_tree_overflow (result);
4194 if (cplx_result)
4196 if (overflow == NULL_TREE)
4197 overflow = build_zero_cst (TREE_TYPE (result));
4198 tree ctype = build_complex_type (TREE_TYPE (result));
4199 if (TREE_CODE (result) == INTEGER_CST
4200 && TREE_CODE (overflow) == INTEGER_CST)
4201 result = build_complex (ctype, result, overflow);
4202 else
4203 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4204 ctype, result, overflow);
4206 if (!update_call_from_tree (gsi, result))
4207 gimplify_and_update_call_from_tree (gsi, result);
4208 changed = true;
4212 return changed;
4216 /* Return true whether NAME has a use on STMT. */
4218 static bool
4219 has_use_on_stmt (tree name, gimple *stmt)
4221 imm_use_iterator iter;
4222 use_operand_p use_p;
4223 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4224 if (USE_STMT (use_p) == stmt)
4225 return true;
4226 return false;
4229 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4230 gimple_simplify.
4232 Replaces *GSI with the simplification result in RCODE and OPS
4233 and the associated statements in *SEQ. Does the replacement
4234 according to INPLACE and returns true if the operation succeeded. */
4236 static bool
4237 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4238 code_helper rcode, tree *ops,
4239 gimple_seq *seq, bool inplace)
4241 gimple *stmt = gsi_stmt (*gsi);
4243 /* Play safe and do not allow abnormals to be mentioned in
4244 newly created statements. See also maybe_push_res_to_seq.
4245 As an exception allow such uses if there was a use of the
4246 same SSA name on the old stmt. */
4247 if ((TREE_CODE (ops[0]) == SSA_NAME
4248 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
4249 && !has_use_on_stmt (ops[0], stmt))
4250 || (ops[1]
4251 && TREE_CODE (ops[1]) == SSA_NAME
4252 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
4253 && !has_use_on_stmt (ops[1], stmt))
4254 || (ops[2]
4255 && TREE_CODE (ops[2]) == SSA_NAME
4256 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
4257 && !has_use_on_stmt (ops[2], stmt))
4258 || (COMPARISON_CLASS_P (ops[0])
4259 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
4260 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
4261 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
4262 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
4263 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
4264 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
4265 return false;
4267 /* Don't insert new statements when INPLACE is true, even if we could
4268 reuse STMT for the final statement. */
4269 if (inplace && !gimple_seq_empty_p (*seq))
4270 return false;
4272 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4274 gcc_assert (rcode.is_tree_code ());
4275 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
4276 /* GIMPLE_CONDs condition may not throw. */
4277 && (!flag_exceptions
4278 || !cfun->can_throw_non_call_exceptions
4279 || !operation_could_trap_p (rcode,
4280 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4281 false, NULL_TREE)))
4282 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
4283 else if (rcode == SSA_NAME)
4284 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4285 build_zero_cst (TREE_TYPE (ops[0])));
4286 else if (rcode == INTEGER_CST)
4288 if (integer_zerop (ops[0]))
4289 gimple_cond_make_false (cond_stmt);
4290 else
4291 gimple_cond_make_true (cond_stmt);
4293 else if (!inplace)
4295 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
4296 ops, seq);
4297 if (!res)
4298 return false;
4299 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4300 build_zero_cst (TREE_TYPE (res)));
4302 else
4303 return false;
4304 if (dump_file && (dump_flags & TDF_DETAILS))
4306 fprintf (dump_file, "gimple_simplified to ");
4307 if (!gimple_seq_empty_p (*seq))
4308 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4309 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4310 0, TDF_SLIM);
4312 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4313 return true;
4315 else if (is_gimple_assign (stmt)
4316 && rcode.is_tree_code ())
4318 if (!inplace
4319 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
4321 maybe_build_generic_op (rcode,
4322 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
4323 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
4324 if (dump_file && (dump_flags & TDF_DETAILS))
4326 fprintf (dump_file, "gimple_simplified to ");
4327 if (!gimple_seq_empty_p (*seq))
4328 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4329 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4330 0, TDF_SLIM);
4332 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4333 return true;
4336 else if (rcode.is_fn_code ()
4337 && gimple_call_combined_fn (stmt) == rcode)
4339 unsigned i;
4340 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4342 gcc_assert (ops[i] != NULL_TREE);
4343 gimple_call_set_arg (stmt, i, ops[i]);
4345 if (i < 3)
4346 gcc_assert (ops[i] == NULL_TREE);
4347 if (dump_file && (dump_flags & TDF_DETAILS))
4349 fprintf (dump_file, "gimple_simplified to ");
4350 if (!gimple_seq_empty_p (*seq))
4351 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4352 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4354 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4355 return true;
4357 else if (!inplace)
4359 if (gimple_has_lhs (stmt))
4361 tree lhs = gimple_get_lhs (stmt);
4362 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
4363 ops, seq, lhs))
4364 return false;
4365 if (dump_file && (dump_flags & TDF_DETAILS))
4367 fprintf (dump_file, "gimple_simplified to ");
4368 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4370 gsi_replace_with_seq_vops (gsi, *seq);
4371 return true;
4373 else
4374 gcc_unreachable ();
4377 return false;
4380 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4382 static bool
4383 maybe_canonicalize_mem_ref_addr (tree *t)
4385 bool res = false;
4387 if (TREE_CODE (*t) == ADDR_EXPR)
4388 t = &TREE_OPERAND (*t, 0);
4390 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4391 generic vector extension. The actual vector referenced is
4392 view-converted to an array type for this purpose. If the index
4393 is constant the canonical representation in the middle-end is a
4394 BIT_FIELD_REF so re-write the former to the latter here. */
4395 if (TREE_CODE (*t) == ARRAY_REF
4396 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4397 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4398 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4400 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4401 if (VECTOR_TYPE_P (vtype))
4403 tree low = array_ref_low_bound (*t);
4404 if (TREE_CODE (low) == INTEGER_CST)
4406 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4408 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4409 wi::to_widest (low));
4410 idx = wi::mul (idx, wi::to_widest
4411 (TYPE_SIZE (TREE_TYPE (*t))));
4412 widest_int ext
4413 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4414 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4416 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4417 TREE_TYPE (*t),
4418 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4419 TYPE_SIZE (TREE_TYPE (*t)),
4420 wide_int_to_tree (bitsizetype, idx));
4421 res = true;
4428 while (handled_component_p (*t))
4429 t = &TREE_OPERAND (*t, 0);
4431 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4432 of invariant addresses into a SSA name MEM_REF address. */
4433 if (TREE_CODE (*t) == MEM_REF
4434 || TREE_CODE (*t) == TARGET_MEM_REF)
4436 tree addr = TREE_OPERAND (*t, 0);
4437 if (TREE_CODE (addr) == ADDR_EXPR
4438 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4439 || handled_component_p (TREE_OPERAND (addr, 0))))
4441 tree base;
4442 HOST_WIDE_INT coffset;
4443 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4444 &coffset);
4445 if (!base)
4446 gcc_unreachable ();
4448 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4449 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4450 TREE_OPERAND (*t, 1),
4451 size_int (coffset));
4452 res = true;
4454 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4455 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4458 /* Canonicalize back MEM_REFs to plain reference trees if the object
4459 accessed is a decl that has the same access semantics as the MEM_REF. */
4460 if (TREE_CODE (*t) == MEM_REF
4461 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4462 && integer_zerop (TREE_OPERAND (*t, 1))
4463 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4465 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4466 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4467 if (/* Same volatile qualification. */
4468 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4469 /* Same TBAA behavior with -fstrict-aliasing. */
4470 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4471 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4472 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4473 /* Same alignment. */
4474 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4475 /* We have to look out here to not drop a required conversion
4476 from the rhs to the lhs if *t appears on the lhs or vice-versa
4477 if it appears on the rhs. Thus require strict type
4478 compatibility. */
4479 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4481 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4482 res = true;
4486 /* Canonicalize TARGET_MEM_REF in particular with respect to
4487 the indexes becoming constant. */
4488 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4490 tree tem = maybe_fold_tmr (*t);
4491 if (tem)
4493 *t = tem;
4494 res = true;
4498 return res;
4501 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4502 distinguishes both cases. */
4504 static bool
4505 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4507 bool changed = false;
4508 gimple *stmt = gsi_stmt (*gsi);
4509 bool nowarning = gimple_no_warning_p (stmt);
4510 unsigned i;
4511 fold_defer_overflow_warnings ();
4513 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4514 after propagation.
4515 ??? This shouldn't be done in generic folding but in the
4516 propagation helpers which also know whether an address was
4517 propagated.
4518 Also canonicalize operand order. */
4519 switch (gimple_code (stmt))
4521 case GIMPLE_ASSIGN:
4522 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4524 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4525 if ((REFERENCE_CLASS_P (*rhs)
4526 || TREE_CODE (*rhs) == ADDR_EXPR)
4527 && maybe_canonicalize_mem_ref_addr (rhs))
4528 changed = true;
4529 tree *lhs = gimple_assign_lhs_ptr (stmt);
4530 if (REFERENCE_CLASS_P (*lhs)
4531 && maybe_canonicalize_mem_ref_addr (lhs))
4532 changed = true;
4534 else
4536 /* Canonicalize operand order. */
4537 enum tree_code code = gimple_assign_rhs_code (stmt);
4538 if (TREE_CODE_CLASS (code) == tcc_comparison
4539 || commutative_tree_code (code)
4540 || commutative_ternary_tree_code (code))
4542 tree rhs1 = gimple_assign_rhs1 (stmt);
4543 tree rhs2 = gimple_assign_rhs2 (stmt);
4544 if (tree_swap_operands_p (rhs1, rhs2))
4546 gimple_assign_set_rhs1 (stmt, rhs2);
4547 gimple_assign_set_rhs2 (stmt, rhs1);
4548 if (TREE_CODE_CLASS (code) == tcc_comparison)
4549 gimple_assign_set_rhs_code (stmt,
4550 swap_tree_comparison (code));
4551 changed = true;
4555 break;
4556 case GIMPLE_CALL:
4558 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4560 tree *arg = gimple_call_arg_ptr (stmt, i);
4561 if (REFERENCE_CLASS_P (*arg)
4562 && maybe_canonicalize_mem_ref_addr (arg))
4563 changed = true;
4565 tree *lhs = gimple_call_lhs_ptr (stmt);
4566 if (*lhs
4567 && REFERENCE_CLASS_P (*lhs)
4568 && maybe_canonicalize_mem_ref_addr (lhs))
4569 changed = true;
4570 break;
4572 case GIMPLE_ASM:
4574 gasm *asm_stmt = as_a <gasm *> (stmt);
4575 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4577 tree link = gimple_asm_output_op (asm_stmt, i);
4578 tree op = TREE_VALUE (link);
4579 if (REFERENCE_CLASS_P (op)
4580 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4581 changed = true;
4583 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4585 tree link = gimple_asm_input_op (asm_stmt, i);
4586 tree op = TREE_VALUE (link);
4587 if ((REFERENCE_CLASS_P (op)
4588 || TREE_CODE (op) == ADDR_EXPR)
4589 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4590 changed = true;
4593 break;
4594 case GIMPLE_DEBUG:
4595 if (gimple_debug_bind_p (stmt))
4597 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4598 if (*val
4599 && (REFERENCE_CLASS_P (*val)
4600 || TREE_CODE (*val) == ADDR_EXPR)
4601 && maybe_canonicalize_mem_ref_addr (val))
4602 changed = true;
4604 break;
4605 case GIMPLE_COND:
4607 /* Canonicalize operand order. */
4608 tree lhs = gimple_cond_lhs (stmt);
4609 tree rhs = gimple_cond_rhs (stmt);
4610 if (tree_swap_operands_p (lhs, rhs))
4612 gcond *gc = as_a <gcond *> (stmt);
4613 gimple_cond_set_lhs (gc, rhs);
4614 gimple_cond_set_rhs (gc, lhs);
4615 gimple_cond_set_code (gc,
4616 swap_tree_comparison (gimple_cond_code (gc)));
4617 changed = true;
4620 default:;
4623 /* Dispatch to pattern-based folding. */
4624 if (!inplace
4625 || is_gimple_assign (stmt)
4626 || gimple_code (stmt) == GIMPLE_COND)
4628 gimple_seq seq = NULL;
4629 code_helper rcode;
4630 tree ops[3] = {};
4631 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4632 valueize, valueize))
4634 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4635 changed = true;
4636 else
4637 gimple_seq_discard (seq);
4641 stmt = gsi_stmt (*gsi);
4643 /* Fold the main computation performed by the statement. */
4644 switch (gimple_code (stmt))
4646 case GIMPLE_ASSIGN:
4648 /* Try to canonicalize for boolean-typed X the comparisons
4649 X == 0, X == 1, X != 0, and X != 1. */
4650 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4651 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4653 tree lhs = gimple_assign_lhs (stmt);
4654 tree op1 = gimple_assign_rhs1 (stmt);
4655 tree op2 = gimple_assign_rhs2 (stmt);
4656 tree type = TREE_TYPE (op1);
4658 /* Check whether the comparison operands are of the same boolean
4659 type as the result type is.
4660 Check that second operand is an integer-constant with value
4661 one or zero. */
4662 if (TREE_CODE (op2) == INTEGER_CST
4663 && (integer_zerop (op2) || integer_onep (op2))
4664 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4666 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4667 bool is_logical_not = false;
4669 /* X == 0 and X != 1 is a logical-not.of X
4670 X == 1 and X != 0 is X */
4671 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4672 || (cmp_code == NE_EXPR && integer_onep (op2)))
4673 is_logical_not = true;
4675 if (is_logical_not == false)
4676 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4677 /* Only for one-bit precision typed X the transformation
4678 !X -> ~X is valied. */
4679 else if (TYPE_PRECISION (type) == 1)
4680 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4681 /* Otherwise we use !X -> X ^ 1. */
4682 else
4683 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4684 build_int_cst (type, 1));
4685 changed = true;
4686 break;
4690 unsigned old_num_ops = gimple_num_ops (stmt);
4691 tree lhs = gimple_assign_lhs (stmt);
4692 tree new_rhs = fold_gimple_assign (gsi);
4693 if (new_rhs
4694 && !useless_type_conversion_p (TREE_TYPE (lhs),
4695 TREE_TYPE (new_rhs)))
4696 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4697 if (new_rhs
4698 && (!inplace
4699 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4701 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4702 changed = true;
4704 break;
4707 case GIMPLE_CALL:
4708 changed |= gimple_fold_call (gsi, inplace);
4709 break;
4711 case GIMPLE_ASM:
4712 /* Fold *& in asm operands. */
4714 gasm *asm_stmt = as_a <gasm *> (stmt);
4715 size_t noutputs;
4716 const char **oconstraints;
4717 const char *constraint;
4718 bool allows_mem, allows_reg;
4720 noutputs = gimple_asm_noutputs (asm_stmt);
4721 oconstraints = XALLOCAVEC (const char *, noutputs);
4723 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4725 tree link = gimple_asm_output_op (asm_stmt, i);
4726 tree op = TREE_VALUE (link);
4727 oconstraints[i]
4728 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4729 if (REFERENCE_CLASS_P (op)
4730 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4732 TREE_VALUE (link) = op;
4733 changed = true;
4736 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4738 tree link = gimple_asm_input_op (asm_stmt, i);
4739 tree op = TREE_VALUE (link);
4740 constraint
4741 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4742 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4743 oconstraints, &allows_mem, &allows_reg);
4744 if (REFERENCE_CLASS_P (op)
4745 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4746 != NULL_TREE)
4748 TREE_VALUE (link) = op;
4749 changed = true;
4753 break;
4755 case GIMPLE_DEBUG:
4756 if (gimple_debug_bind_p (stmt))
4758 tree val = gimple_debug_bind_get_value (stmt);
4759 if (val
4760 && REFERENCE_CLASS_P (val))
4762 tree tem = maybe_fold_reference (val, false);
4763 if (tem)
4765 gimple_debug_bind_set_value (stmt, tem);
4766 changed = true;
4769 else if (val
4770 && TREE_CODE (val) == ADDR_EXPR)
4772 tree ref = TREE_OPERAND (val, 0);
4773 tree tem = maybe_fold_reference (ref, false);
4774 if (tem)
4776 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4777 gimple_debug_bind_set_value (stmt, tem);
4778 changed = true;
4782 break;
4784 case GIMPLE_RETURN:
4786 greturn *ret_stmt = as_a<greturn *> (stmt);
4787 tree ret = gimple_return_retval(ret_stmt);
4789 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4791 tree val = valueize (ret);
4792 if (val && val != ret
4793 && may_propagate_copy (ret, val))
4795 gimple_return_set_retval (ret_stmt, val);
4796 changed = true;
4800 break;
4802 default:;
4805 stmt = gsi_stmt (*gsi);
4807 /* Fold *& on the lhs. */
4808 if (gimple_has_lhs (stmt))
4810 tree lhs = gimple_get_lhs (stmt);
4811 if (lhs && REFERENCE_CLASS_P (lhs))
4813 tree new_lhs = maybe_fold_reference (lhs, true);
4814 if (new_lhs)
4816 gimple_set_lhs (stmt, new_lhs);
4817 changed = true;
4822 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4823 return changed;
4826 /* Valueziation callback that ends up not following SSA edges. */
4828 tree
4829 no_follow_ssa_edges (tree)
4831 return NULL_TREE;
4834 /* Valueization callback that ends up following single-use SSA edges only. */
4836 tree
4837 follow_single_use_edges (tree val)
4839 if (TREE_CODE (val) == SSA_NAME
4840 && !has_single_use (val))
4841 return NULL_TREE;
4842 return val;
4845 /* Fold the statement pointed to by GSI. In some cases, this function may
4846 replace the whole statement with a new one. Returns true iff folding
4847 makes any changes.
4848 The statement pointed to by GSI should be in valid gimple form but may
4849 be in unfolded state as resulting from for example constant propagation
4850 which can produce *&x = 0. */
4852 bool
4853 fold_stmt (gimple_stmt_iterator *gsi)
4855 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4858 bool
4859 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4861 return fold_stmt_1 (gsi, false, valueize);
4864 /* Perform the minimal folding on statement *GSI. Only operations like
4865 *&x created by constant propagation are handled. The statement cannot
4866 be replaced with a new one. Return true if the statement was
4867 changed, false otherwise.
4868 The statement *GSI should be in valid gimple form but may
4869 be in unfolded state as resulting from for example constant propagation
4870 which can produce *&x = 0. */
4872 bool
4873 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4875 gimple *stmt = gsi_stmt (*gsi);
4876 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4877 gcc_assert (gsi_stmt (*gsi) == stmt);
4878 return changed;
4881 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4882 if EXPR is null or we don't know how.
4883 If non-null, the result always has boolean type. */
4885 static tree
4886 canonicalize_bool (tree expr, bool invert)
4888 if (!expr)
4889 return NULL_TREE;
4890 else if (invert)
4892 if (integer_nonzerop (expr))
4893 return boolean_false_node;
4894 else if (integer_zerop (expr))
4895 return boolean_true_node;
4896 else if (TREE_CODE (expr) == SSA_NAME)
4897 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4898 build_int_cst (TREE_TYPE (expr), 0));
4899 else if (COMPARISON_CLASS_P (expr))
4900 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4901 boolean_type_node,
4902 TREE_OPERAND (expr, 0),
4903 TREE_OPERAND (expr, 1));
4904 else
4905 return NULL_TREE;
4907 else
4909 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4910 return expr;
4911 if (integer_nonzerop (expr))
4912 return boolean_true_node;
4913 else if (integer_zerop (expr))
4914 return boolean_false_node;
4915 else if (TREE_CODE (expr) == SSA_NAME)
4916 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4917 build_int_cst (TREE_TYPE (expr), 0));
4918 else if (COMPARISON_CLASS_P (expr))
4919 return fold_build2 (TREE_CODE (expr),
4920 boolean_type_node,
4921 TREE_OPERAND (expr, 0),
4922 TREE_OPERAND (expr, 1));
4923 else
4924 return NULL_TREE;
4928 /* Check to see if a boolean expression EXPR is logically equivalent to the
4929 comparison (OP1 CODE OP2). Check for various identities involving
4930 SSA_NAMEs. */
4932 static bool
4933 same_bool_comparison_p (const_tree expr, enum tree_code code,
4934 const_tree op1, const_tree op2)
4936 gimple *s;
4938 /* The obvious case. */
4939 if (TREE_CODE (expr) == code
4940 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4941 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4942 return true;
4944 /* Check for comparing (name, name != 0) and the case where expr
4945 is an SSA_NAME with a definition matching the comparison. */
4946 if (TREE_CODE (expr) == SSA_NAME
4947 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4949 if (operand_equal_p (expr, op1, 0))
4950 return ((code == NE_EXPR && integer_zerop (op2))
4951 || (code == EQ_EXPR && integer_nonzerop (op2)));
4952 s = SSA_NAME_DEF_STMT (expr);
4953 if (is_gimple_assign (s)
4954 && gimple_assign_rhs_code (s) == code
4955 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4956 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4957 return true;
4960 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4961 of name is a comparison, recurse. */
4962 if (TREE_CODE (op1) == SSA_NAME
4963 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4965 s = SSA_NAME_DEF_STMT (op1);
4966 if (is_gimple_assign (s)
4967 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
4969 enum tree_code c = gimple_assign_rhs_code (s);
4970 if ((c == NE_EXPR && integer_zerop (op2))
4971 || (c == EQ_EXPR && integer_nonzerop (op2)))
4972 return same_bool_comparison_p (expr, c,
4973 gimple_assign_rhs1 (s),
4974 gimple_assign_rhs2 (s));
4975 if ((c == EQ_EXPR && integer_zerop (op2))
4976 || (c == NE_EXPR && integer_nonzerop (op2)))
4977 return same_bool_comparison_p (expr,
4978 invert_tree_comparison (c, false),
4979 gimple_assign_rhs1 (s),
4980 gimple_assign_rhs2 (s));
4983 return false;
4986 /* Check to see if two boolean expressions OP1 and OP2 are logically
4987 equivalent. */
4989 static bool
4990 same_bool_result_p (const_tree op1, const_tree op2)
4992 /* Simple cases first. */
4993 if (operand_equal_p (op1, op2, 0))
4994 return true;
4996 /* Check the cases where at least one of the operands is a comparison.
4997 These are a bit smarter than operand_equal_p in that they apply some
4998 identifies on SSA_NAMEs. */
4999 if (COMPARISON_CLASS_P (op2)
5000 && same_bool_comparison_p (op1, TREE_CODE (op2),
5001 TREE_OPERAND (op2, 0),
5002 TREE_OPERAND (op2, 1)))
5003 return true;
5004 if (COMPARISON_CLASS_P (op1)
5005 && same_bool_comparison_p (op2, TREE_CODE (op1),
5006 TREE_OPERAND (op1, 0),
5007 TREE_OPERAND (op1, 1)))
5008 return true;
5010 /* Default case. */
5011 return false;
5014 /* Forward declarations for some mutually recursive functions. */
5016 static tree
5017 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5018 enum tree_code code2, tree op2a, tree op2b);
5019 static tree
5020 and_var_with_comparison (tree var, bool invert,
5021 enum tree_code code2, tree op2a, tree op2b);
5022 static tree
5023 and_var_with_comparison_1 (gimple *stmt,
5024 enum tree_code code2, tree op2a, tree op2b);
5025 static tree
5026 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5027 enum tree_code code2, tree op2a, tree op2b);
5028 static tree
5029 or_var_with_comparison (tree var, bool invert,
5030 enum tree_code code2, tree op2a, tree op2b);
5031 static tree
5032 or_var_with_comparison_1 (gimple *stmt,
5033 enum tree_code code2, tree op2a, tree op2b);
5035 /* Helper function for and_comparisons_1: try to simplify the AND of the
5036 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5037 If INVERT is true, invert the value of the VAR before doing the AND.
5038 Return NULL_EXPR if we can't simplify this to a single expression. */
5040 static tree
5041 and_var_with_comparison (tree var, bool invert,
5042 enum tree_code code2, tree op2a, tree op2b)
5044 tree t;
5045 gimple *stmt = SSA_NAME_DEF_STMT (var);
5047 /* We can only deal with variables whose definitions are assignments. */
5048 if (!is_gimple_assign (stmt))
5049 return NULL_TREE;
5051 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5052 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5053 Then we only have to consider the simpler non-inverted cases. */
5054 if (invert)
5055 t = or_var_with_comparison_1 (stmt,
5056 invert_tree_comparison (code2, false),
5057 op2a, op2b);
5058 else
5059 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5060 return canonicalize_bool (t, invert);
5063 /* Try to simplify the AND of the ssa variable defined by the assignment
5064 STMT with the comparison specified by (OP2A CODE2 OP2B).
5065 Return NULL_EXPR if we can't simplify this to a single expression. */
5067 static tree
5068 and_var_with_comparison_1 (gimple *stmt,
5069 enum tree_code code2, tree op2a, tree op2b)
5071 tree var = gimple_assign_lhs (stmt);
5072 tree true_test_var = NULL_TREE;
5073 tree false_test_var = NULL_TREE;
5074 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5076 /* Check for identities like (var AND (var == 0)) => false. */
5077 if (TREE_CODE (op2a) == SSA_NAME
5078 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5080 if ((code2 == NE_EXPR && integer_zerop (op2b))
5081 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5083 true_test_var = op2a;
5084 if (var == true_test_var)
5085 return var;
5087 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5088 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5090 false_test_var = op2a;
5091 if (var == false_test_var)
5092 return boolean_false_node;
5096 /* If the definition is a comparison, recurse on it. */
5097 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5099 tree t = and_comparisons_1 (innercode,
5100 gimple_assign_rhs1 (stmt),
5101 gimple_assign_rhs2 (stmt),
5102 code2,
5103 op2a,
5104 op2b);
5105 if (t)
5106 return t;
5109 /* If the definition is an AND or OR expression, we may be able to
5110 simplify by reassociating. */
5111 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5112 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5114 tree inner1 = gimple_assign_rhs1 (stmt);
5115 tree inner2 = gimple_assign_rhs2 (stmt);
5116 gimple *s;
5117 tree t;
5118 tree partial = NULL_TREE;
5119 bool is_and = (innercode == BIT_AND_EXPR);
5121 /* Check for boolean identities that don't require recursive examination
5122 of inner1/inner2:
5123 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5124 inner1 AND (inner1 OR inner2) => inner1
5125 !inner1 AND (inner1 AND inner2) => false
5126 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5127 Likewise for similar cases involving inner2. */
5128 if (inner1 == true_test_var)
5129 return (is_and ? var : inner1);
5130 else if (inner2 == true_test_var)
5131 return (is_and ? var : inner2);
5132 else if (inner1 == false_test_var)
5133 return (is_and
5134 ? boolean_false_node
5135 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5136 else if (inner2 == false_test_var)
5137 return (is_and
5138 ? boolean_false_node
5139 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5141 /* Next, redistribute/reassociate the AND across the inner tests.
5142 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5143 if (TREE_CODE (inner1) == SSA_NAME
5144 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5145 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5146 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5147 gimple_assign_rhs1 (s),
5148 gimple_assign_rhs2 (s),
5149 code2, op2a, op2b)))
5151 /* Handle the AND case, where we are reassociating:
5152 (inner1 AND inner2) AND (op2a code2 op2b)
5153 => (t AND inner2)
5154 If the partial result t is a constant, we win. Otherwise
5155 continue on to try reassociating with the other inner test. */
5156 if (is_and)
5158 if (integer_onep (t))
5159 return inner2;
5160 else if (integer_zerop (t))
5161 return boolean_false_node;
5164 /* Handle the OR case, where we are redistributing:
5165 (inner1 OR inner2) AND (op2a code2 op2b)
5166 => (t OR (inner2 AND (op2a code2 op2b))) */
5167 else if (integer_onep (t))
5168 return boolean_true_node;
5170 /* Save partial result for later. */
5171 partial = t;
5174 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5175 if (TREE_CODE (inner2) == SSA_NAME
5176 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5177 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5178 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5179 gimple_assign_rhs1 (s),
5180 gimple_assign_rhs2 (s),
5181 code2, op2a, op2b)))
5183 /* Handle the AND case, where we are reassociating:
5184 (inner1 AND inner2) AND (op2a code2 op2b)
5185 => (inner1 AND t) */
5186 if (is_and)
5188 if (integer_onep (t))
5189 return inner1;
5190 else if (integer_zerop (t))
5191 return boolean_false_node;
5192 /* If both are the same, we can apply the identity
5193 (x AND x) == x. */
5194 else if (partial && same_bool_result_p (t, partial))
5195 return t;
5198 /* Handle the OR case. where we are redistributing:
5199 (inner1 OR inner2) AND (op2a code2 op2b)
5200 => (t OR (inner1 AND (op2a code2 op2b)))
5201 => (t OR partial) */
5202 else
5204 if (integer_onep (t))
5205 return boolean_true_node;
5206 else if (partial)
5208 /* We already got a simplification for the other
5209 operand to the redistributed OR expression. The
5210 interesting case is when at least one is false.
5211 Or, if both are the same, we can apply the identity
5212 (x OR x) == x. */
5213 if (integer_zerop (partial))
5214 return t;
5215 else if (integer_zerop (t))
5216 return partial;
5217 else if (same_bool_result_p (t, partial))
5218 return t;
5223 return NULL_TREE;
5226 /* Try to simplify the AND of two comparisons defined by
5227 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5228 If this can be done without constructing an intermediate value,
5229 return the resulting tree; otherwise NULL_TREE is returned.
5230 This function is deliberately asymmetric as it recurses on SSA_DEFs
5231 in the first comparison but not the second. */
5233 static tree
5234 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5235 enum tree_code code2, tree op2a, tree op2b)
5237 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5239 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5240 if (operand_equal_p (op1a, op2a, 0)
5241 && operand_equal_p (op1b, op2b, 0))
5243 /* Result will be either NULL_TREE, or a combined comparison. */
5244 tree t = combine_comparisons (UNKNOWN_LOCATION,
5245 TRUTH_ANDIF_EXPR, code1, code2,
5246 truth_type, op1a, op1b);
5247 if (t)
5248 return t;
5251 /* Likewise the swapped case of the above. */
5252 if (operand_equal_p (op1a, op2b, 0)
5253 && operand_equal_p (op1b, op2a, 0))
5255 /* Result will be either NULL_TREE, or a combined comparison. */
5256 tree t = combine_comparisons (UNKNOWN_LOCATION,
5257 TRUTH_ANDIF_EXPR, code1,
5258 swap_tree_comparison (code2),
5259 truth_type, op1a, op1b);
5260 if (t)
5261 return t;
5264 /* If both comparisons are of the same value against constants, we might
5265 be able to merge them. */
5266 if (operand_equal_p (op1a, op2a, 0)
5267 && TREE_CODE (op1b) == INTEGER_CST
5268 && TREE_CODE (op2b) == INTEGER_CST)
5270 int cmp = tree_int_cst_compare (op1b, op2b);
5272 /* If we have (op1a == op1b), we should either be able to
5273 return that or FALSE, depending on whether the constant op1b
5274 also satisfies the other comparison against op2b. */
5275 if (code1 == EQ_EXPR)
5277 bool done = true;
5278 bool val;
5279 switch (code2)
5281 case EQ_EXPR: val = (cmp == 0); break;
5282 case NE_EXPR: val = (cmp != 0); break;
5283 case LT_EXPR: val = (cmp < 0); break;
5284 case GT_EXPR: val = (cmp > 0); break;
5285 case LE_EXPR: val = (cmp <= 0); break;
5286 case GE_EXPR: val = (cmp >= 0); break;
5287 default: done = false;
5289 if (done)
5291 if (val)
5292 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5293 else
5294 return boolean_false_node;
5297 /* Likewise if the second comparison is an == comparison. */
5298 else if (code2 == EQ_EXPR)
5300 bool done = true;
5301 bool val;
5302 switch (code1)
5304 case EQ_EXPR: val = (cmp == 0); break;
5305 case NE_EXPR: val = (cmp != 0); break;
5306 case LT_EXPR: val = (cmp > 0); break;
5307 case GT_EXPR: val = (cmp < 0); break;
5308 case LE_EXPR: val = (cmp >= 0); break;
5309 case GE_EXPR: val = (cmp <= 0); break;
5310 default: done = false;
5312 if (done)
5314 if (val)
5315 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5316 else
5317 return boolean_false_node;
5321 /* Same business with inequality tests. */
5322 else if (code1 == NE_EXPR)
5324 bool val;
5325 switch (code2)
5327 case EQ_EXPR: val = (cmp != 0); break;
5328 case NE_EXPR: val = (cmp == 0); break;
5329 case LT_EXPR: val = (cmp >= 0); break;
5330 case GT_EXPR: val = (cmp <= 0); break;
5331 case LE_EXPR: val = (cmp > 0); break;
5332 case GE_EXPR: val = (cmp < 0); break;
5333 default:
5334 val = false;
5336 if (val)
5337 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5339 else if (code2 == NE_EXPR)
5341 bool val;
5342 switch (code1)
5344 case EQ_EXPR: val = (cmp == 0); break;
5345 case NE_EXPR: val = (cmp != 0); break;
5346 case LT_EXPR: val = (cmp <= 0); break;
5347 case GT_EXPR: val = (cmp >= 0); break;
5348 case LE_EXPR: val = (cmp < 0); break;
5349 case GE_EXPR: val = (cmp > 0); break;
5350 default:
5351 val = false;
5353 if (val)
5354 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5357 /* Chose the more restrictive of two < or <= comparisons. */
5358 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5359 && (code2 == LT_EXPR || code2 == LE_EXPR))
5361 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5362 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5363 else
5364 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5367 /* Likewise chose the more restrictive of two > or >= comparisons. */
5368 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5369 && (code2 == GT_EXPR || code2 == GE_EXPR))
5371 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5372 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5373 else
5374 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5377 /* Check for singleton ranges. */
5378 else if (cmp == 0
5379 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5380 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5381 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5383 /* Check for disjoint ranges. */
5384 else if (cmp <= 0
5385 && (code1 == LT_EXPR || code1 == LE_EXPR)
5386 && (code2 == GT_EXPR || code2 == GE_EXPR))
5387 return boolean_false_node;
5388 else if (cmp >= 0
5389 && (code1 == GT_EXPR || code1 == GE_EXPR)
5390 && (code2 == LT_EXPR || code2 == LE_EXPR))
5391 return boolean_false_node;
5394 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5395 NAME's definition is a truth value. See if there are any simplifications
5396 that can be done against the NAME's definition. */
5397 if (TREE_CODE (op1a) == SSA_NAME
5398 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5399 && (integer_zerop (op1b) || integer_onep (op1b)))
5401 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5402 || (code1 == NE_EXPR && integer_onep (op1b)));
5403 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5404 switch (gimple_code (stmt))
5406 case GIMPLE_ASSIGN:
5407 /* Try to simplify by copy-propagating the definition. */
5408 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5410 case GIMPLE_PHI:
5411 /* If every argument to the PHI produces the same result when
5412 ANDed with the second comparison, we win.
5413 Do not do this unless the type is bool since we need a bool
5414 result here anyway. */
5415 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5417 tree result = NULL_TREE;
5418 unsigned i;
5419 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5421 tree arg = gimple_phi_arg_def (stmt, i);
5423 /* If this PHI has itself as an argument, ignore it.
5424 If all the other args produce the same result,
5425 we're still OK. */
5426 if (arg == gimple_phi_result (stmt))
5427 continue;
5428 else if (TREE_CODE (arg) == INTEGER_CST)
5430 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5432 if (!result)
5433 result = boolean_false_node;
5434 else if (!integer_zerop (result))
5435 return NULL_TREE;
5437 else if (!result)
5438 result = fold_build2 (code2, boolean_type_node,
5439 op2a, op2b);
5440 else if (!same_bool_comparison_p (result,
5441 code2, op2a, op2b))
5442 return NULL_TREE;
5444 else if (TREE_CODE (arg) == SSA_NAME
5445 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5447 tree temp;
5448 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5449 /* In simple cases we can look through PHI nodes,
5450 but we have to be careful with loops.
5451 See PR49073. */
5452 if (! dom_info_available_p (CDI_DOMINATORS)
5453 || gimple_bb (def_stmt) == gimple_bb (stmt)
5454 || dominated_by_p (CDI_DOMINATORS,
5455 gimple_bb (def_stmt),
5456 gimple_bb (stmt)))
5457 return NULL_TREE;
5458 temp = and_var_with_comparison (arg, invert, code2,
5459 op2a, op2b);
5460 if (!temp)
5461 return NULL_TREE;
5462 else if (!result)
5463 result = temp;
5464 else if (!same_bool_result_p (result, temp))
5465 return NULL_TREE;
5467 else
5468 return NULL_TREE;
5470 return result;
5473 default:
5474 break;
5477 return NULL_TREE;
5480 /* Try to simplify the AND of two comparisons, specified by
5481 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5482 If this can be simplified to a single expression (without requiring
5483 introducing more SSA variables to hold intermediate values),
5484 return the resulting tree. Otherwise return NULL_TREE.
5485 If the result expression is non-null, it has boolean type. */
5487 tree
5488 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5489 enum tree_code code2, tree op2a, tree op2b)
5491 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5492 if (t)
5493 return t;
5494 else
5495 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5498 /* Helper function for or_comparisons_1: try to simplify the OR of the
5499 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5500 If INVERT is true, invert the value of VAR before doing the OR.
5501 Return NULL_EXPR if we can't simplify this to a single expression. */
5503 static tree
5504 or_var_with_comparison (tree var, bool invert,
5505 enum tree_code code2, tree op2a, tree op2b)
5507 tree t;
5508 gimple *stmt = SSA_NAME_DEF_STMT (var);
5510 /* We can only deal with variables whose definitions are assignments. */
5511 if (!is_gimple_assign (stmt))
5512 return NULL_TREE;
5514 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5515 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5516 Then we only have to consider the simpler non-inverted cases. */
5517 if (invert)
5518 t = and_var_with_comparison_1 (stmt,
5519 invert_tree_comparison (code2, false),
5520 op2a, op2b);
5521 else
5522 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5523 return canonicalize_bool (t, invert);
5526 /* Try to simplify the OR of the ssa variable defined by the assignment
5527 STMT with the comparison specified by (OP2A CODE2 OP2B).
5528 Return NULL_EXPR if we can't simplify this to a single expression. */
5530 static tree
5531 or_var_with_comparison_1 (gimple *stmt,
5532 enum tree_code code2, tree op2a, tree op2b)
5534 tree var = gimple_assign_lhs (stmt);
5535 tree true_test_var = NULL_TREE;
5536 tree false_test_var = NULL_TREE;
5537 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5539 /* Check for identities like (var OR (var != 0)) => true . */
5540 if (TREE_CODE (op2a) == SSA_NAME
5541 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5543 if ((code2 == NE_EXPR && integer_zerop (op2b))
5544 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5546 true_test_var = op2a;
5547 if (var == true_test_var)
5548 return var;
5550 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5551 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5553 false_test_var = op2a;
5554 if (var == false_test_var)
5555 return boolean_true_node;
5559 /* If the definition is a comparison, recurse on it. */
5560 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5562 tree t = or_comparisons_1 (innercode,
5563 gimple_assign_rhs1 (stmt),
5564 gimple_assign_rhs2 (stmt),
5565 code2,
5566 op2a,
5567 op2b);
5568 if (t)
5569 return t;
5572 /* If the definition is an AND or OR expression, we may be able to
5573 simplify by reassociating. */
5574 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5575 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5577 tree inner1 = gimple_assign_rhs1 (stmt);
5578 tree inner2 = gimple_assign_rhs2 (stmt);
5579 gimple *s;
5580 tree t;
5581 tree partial = NULL_TREE;
5582 bool is_or = (innercode == BIT_IOR_EXPR);
5584 /* Check for boolean identities that don't require recursive examination
5585 of inner1/inner2:
5586 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5587 inner1 OR (inner1 AND inner2) => inner1
5588 !inner1 OR (inner1 OR inner2) => true
5589 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5591 if (inner1 == true_test_var)
5592 return (is_or ? var : inner1);
5593 else if (inner2 == true_test_var)
5594 return (is_or ? var : inner2);
5595 else if (inner1 == false_test_var)
5596 return (is_or
5597 ? boolean_true_node
5598 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5599 else if (inner2 == false_test_var)
5600 return (is_or
5601 ? boolean_true_node
5602 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5604 /* Next, redistribute/reassociate the OR across the inner tests.
5605 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5606 if (TREE_CODE (inner1) == SSA_NAME
5607 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5608 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5609 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5610 gimple_assign_rhs1 (s),
5611 gimple_assign_rhs2 (s),
5612 code2, op2a, op2b)))
5614 /* Handle the OR case, where we are reassociating:
5615 (inner1 OR inner2) OR (op2a code2 op2b)
5616 => (t OR inner2)
5617 If the partial result t is a constant, we win. Otherwise
5618 continue on to try reassociating with the other inner test. */
5619 if (is_or)
5621 if (integer_onep (t))
5622 return boolean_true_node;
5623 else if (integer_zerop (t))
5624 return inner2;
5627 /* Handle the AND case, where we are redistributing:
5628 (inner1 AND inner2) OR (op2a code2 op2b)
5629 => (t AND (inner2 OR (op2a code op2b))) */
5630 else if (integer_zerop (t))
5631 return boolean_false_node;
5633 /* Save partial result for later. */
5634 partial = t;
5637 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5638 if (TREE_CODE (inner2) == SSA_NAME
5639 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5640 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5641 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5642 gimple_assign_rhs1 (s),
5643 gimple_assign_rhs2 (s),
5644 code2, op2a, op2b)))
5646 /* Handle the OR case, where we are reassociating:
5647 (inner1 OR inner2) OR (op2a code2 op2b)
5648 => (inner1 OR t)
5649 => (t OR partial) */
5650 if (is_or)
5652 if (integer_zerop (t))
5653 return inner1;
5654 else if (integer_onep (t))
5655 return boolean_true_node;
5656 /* If both are the same, we can apply the identity
5657 (x OR x) == x. */
5658 else if (partial && same_bool_result_p (t, partial))
5659 return t;
5662 /* Handle the AND case, where we are redistributing:
5663 (inner1 AND inner2) OR (op2a code2 op2b)
5664 => (t AND (inner1 OR (op2a code2 op2b)))
5665 => (t AND partial) */
5666 else
5668 if (integer_zerop (t))
5669 return boolean_false_node;
5670 else if (partial)
5672 /* We already got a simplification for the other
5673 operand to the redistributed AND expression. The
5674 interesting case is when at least one is true.
5675 Or, if both are the same, we can apply the identity
5676 (x AND x) == x. */
5677 if (integer_onep (partial))
5678 return t;
5679 else if (integer_onep (t))
5680 return partial;
5681 else if (same_bool_result_p (t, partial))
5682 return t;
5687 return NULL_TREE;
5690 /* Try to simplify the OR of two comparisons defined by
5691 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5692 If this can be done without constructing an intermediate value,
5693 return the resulting tree; otherwise NULL_TREE is returned.
5694 This function is deliberately asymmetric as it recurses on SSA_DEFs
5695 in the first comparison but not the second. */
5697 static tree
5698 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5699 enum tree_code code2, tree op2a, tree op2b)
5701 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5703 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5704 if (operand_equal_p (op1a, op2a, 0)
5705 && operand_equal_p (op1b, op2b, 0))
5707 /* Result will be either NULL_TREE, or a combined comparison. */
5708 tree t = combine_comparisons (UNKNOWN_LOCATION,
5709 TRUTH_ORIF_EXPR, code1, code2,
5710 truth_type, op1a, op1b);
5711 if (t)
5712 return t;
5715 /* Likewise the swapped case of the above. */
5716 if (operand_equal_p (op1a, op2b, 0)
5717 && operand_equal_p (op1b, op2a, 0))
5719 /* Result will be either NULL_TREE, or a combined comparison. */
5720 tree t = combine_comparisons (UNKNOWN_LOCATION,
5721 TRUTH_ORIF_EXPR, code1,
5722 swap_tree_comparison (code2),
5723 truth_type, op1a, op1b);
5724 if (t)
5725 return t;
5728 /* If both comparisons are of the same value against constants, we might
5729 be able to merge them. */
5730 if (operand_equal_p (op1a, op2a, 0)
5731 && TREE_CODE (op1b) == INTEGER_CST
5732 && TREE_CODE (op2b) == INTEGER_CST)
5734 int cmp = tree_int_cst_compare (op1b, op2b);
5736 /* If we have (op1a != op1b), we should either be able to
5737 return that or TRUE, depending on whether the constant op1b
5738 also satisfies the other comparison against op2b. */
5739 if (code1 == NE_EXPR)
5741 bool done = true;
5742 bool val;
5743 switch (code2)
5745 case EQ_EXPR: val = (cmp == 0); break;
5746 case NE_EXPR: val = (cmp != 0); break;
5747 case LT_EXPR: val = (cmp < 0); break;
5748 case GT_EXPR: val = (cmp > 0); break;
5749 case LE_EXPR: val = (cmp <= 0); break;
5750 case GE_EXPR: val = (cmp >= 0); break;
5751 default: done = false;
5753 if (done)
5755 if (val)
5756 return boolean_true_node;
5757 else
5758 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5761 /* Likewise if the second comparison is a != comparison. */
5762 else if (code2 == NE_EXPR)
5764 bool done = true;
5765 bool val;
5766 switch (code1)
5768 case EQ_EXPR: val = (cmp == 0); break;
5769 case NE_EXPR: val = (cmp != 0); break;
5770 case LT_EXPR: val = (cmp > 0); break;
5771 case GT_EXPR: val = (cmp < 0); break;
5772 case LE_EXPR: val = (cmp >= 0); break;
5773 case GE_EXPR: val = (cmp <= 0); break;
5774 default: done = false;
5776 if (done)
5778 if (val)
5779 return boolean_true_node;
5780 else
5781 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5785 /* See if an equality test is redundant with the other comparison. */
5786 else if (code1 == EQ_EXPR)
5788 bool val;
5789 switch (code2)
5791 case EQ_EXPR: val = (cmp == 0); break;
5792 case NE_EXPR: val = (cmp != 0); break;
5793 case LT_EXPR: val = (cmp < 0); break;
5794 case GT_EXPR: val = (cmp > 0); break;
5795 case LE_EXPR: val = (cmp <= 0); break;
5796 case GE_EXPR: val = (cmp >= 0); break;
5797 default:
5798 val = false;
5800 if (val)
5801 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5803 else if (code2 == EQ_EXPR)
5805 bool val;
5806 switch (code1)
5808 case EQ_EXPR: val = (cmp == 0); break;
5809 case NE_EXPR: val = (cmp != 0); break;
5810 case LT_EXPR: val = (cmp > 0); break;
5811 case GT_EXPR: val = (cmp < 0); break;
5812 case LE_EXPR: val = (cmp >= 0); break;
5813 case GE_EXPR: val = (cmp <= 0); break;
5814 default:
5815 val = false;
5817 if (val)
5818 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5821 /* Chose the less restrictive of two < or <= comparisons. */
5822 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5823 && (code2 == LT_EXPR || code2 == LE_EXPR))
5825 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5826 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5827 else
5828 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5831 /* Likewise chose the less restrictive of two > or >= comparisons. */
5832 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5833 && (code2 == GT_EXPR || code2 == GE_EXPR))
5835 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5836 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5837 else
5838 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5841 /* Check for singleton ranges. */
5842 else if (cmp == 0
5843 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5844 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5845 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5847 /* Check for less/greater pairs that don't restrict the range at all. */
5848 else if (cmp >= 0
5849 && (code1 == LT_EXPR || code1 == LE_EXPR)
5850 && (code2 == GT_EXPR || code2 == GE_EXPR))
5851 return boolean_true_node;
5852 else if (cmp <= 0
5853 && (code1 == GT_EXPR || code1 == GE_EXPR)
5854 && (code2 == LT_EXPR || code2 == LE_EXPR))
5855 return boolean_true_node;
5858 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5859 NAME's definition is a truth value. See if there are any simplifications
5860 that can be done against the NAME's definition. */
5861 if (TREE_CODE (op1a) == SSA_NAME
5862 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5863 && (integer_zerop (op1b) || integer_onep (op1b)))
5865 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5866 || (code1 == NE_EXPR && integer_onep (op1b)));
5867 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5868 switch (gimple_code (stmt))
5870 case GIMPLE_ASSIGN:
5871 /* Try to simplify by copy-propagating the definition. */
5872 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5874 case GIMPLE_PHI:
5875 /* If every argument to the PHI produces the same result when
5876 ORed with the second comparison, we win.
5877 Do not do this unless the type is bool since we need a bool
5878 result here anyway. */
5879 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5881 tree result = NULL_TREE;
5882 unsigned i;
5883 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5885 tree arg = gimple_phi_arg_def (stmt, i);
5887 /* If this PHI has itself as an argument, ignore it.
5888 If all the other args produce the same result,
5889 we're still OK. */
5890 if (arg == gimple_phi_result (stmt))
5891 continue;
5892 else if (TREE_CODE (arg) == INTEGER_CST)
5894 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5896 if (!result)
5897 result = boolean_true_node;
5898 else if (!integer_onep (result))
5899 return NULL_TREE;
5901 else if (!result)
5902 result = fold_build2 (code2, boolean_type_node,
5903 op2a, op2b);
5904 else if (!same_bool_comparison_p (result,
5905 code2, op2a, op2b))
5906 return NULL_TREE;
5908 else if (TREE_CODE (arg) == SSA_NAME
5909 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5911 tree temp;
5912 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5913 /* In simple cases we can look through PHI nodes,
5914 but we have to be careful with loops.
5915 See PR49073. */
5916 if (! dom_info_available_p (CDI_DOMINATORS)
5917 || gimple_bb (def_stmt) == gimple_bb (stmt)
5918 || dominated_by_p (CDI_DOMINATORS,
5919 gimple_bb (def_stmt),
5920 gimple_bb (stmt)))
5921 return NULL_TREE;
5922 temp = or_var_with_comparison (arg, invert, code2,
5923 op2a, op2b);
5924 if (!temp)
5925 return NULL_TREE;
5926 else if (!result)
5927 result = temp;
5928 else if (!same_bool_result_p (result, temp))
5929 return NULL_TREE;
5931 else
5932 return NULL_TREE;
5934 return result;
5937 default:
5938 break;
5941 return NULL_TREE;
5944 /* Try to simplify the OR of two comparisons, specified by
5945 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5946 If this can be simplified to a single expression (without requiring
5947 introducing more SSA variables to hold intermediate values),
5948 return the resulting tree. Otherwise return NULL_TREE.
5949 If the result expression is non-null, it has boolean type. */
5951 tree
5952 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5953 enum tree_code code2, tree op2a, tree op2b)
5955 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5956 if (t)
5957 return t;
5958 else
5959 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5963 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5965 Either NULL_TREE, a simplified but non-constant or a constant
5966 is returned.
5968 ??? This should go into a gimple-fold-inline.h file to be eventually
5969 privatized with the single valueize function used in the various TUs
5970 to avoid the indirect function call overhead. */
5972 tree
5973 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
5974 tree (*gvalueize) (tree))
5976 code_helper rcode;
5977 tree ops[3] = {};
5978 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5979 edges if there are intermediate VARYING defs. For this reason
5980 do not follow SSA edges here even though SCCVN can technically
5981 just deal fine with that. */
5982 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
5984 tree res = NULL_TREE;
5985 if (gimple_simplified_result_is_gimple_val (rcode, ops))
5986 res = ops[0];
5987 else if (mprts_hook)
5988 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5989 if (res)
5991 if (dump_file && dump_flags & TDF_DETAILS)
5993 fprintf (dump_file, "Match-and-simplified ");
5994 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5995 fprintf (dump_file, " to ");
5996 print_generic_expr (dump_file, res);
5997 fprintf (dump_file, "\n");
5999 return res;
6003 location_t loc = gimple_location (stmt);
6004 switch (gimple_code (stmt))
6006 case GIMPLE_ASSIGN:
6008 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6010 switch (get_gimple_rhs_class (subcode))
6012 case GIMPLE_SINGLE_RHS:
6014 tree rhs = gimple_assign_rhs1 (stmt);
6015 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6017 if (TREE_CODE (rhs) == SSA_NAME)
6019 /* If the RHS is an SSA_NAME, return its known constant value,
6020 if any. */
6021 return (*valueize) (rhs);
6023 /* Handle propagating invariant addresses into address
6024 operations. */
6025 else if (TREE_CODE (rhs) == ADDR_EXPR
6026 && !is_gimple_min_invariant (rhs))
6028 HOST_WIDE_INT offset = 0;
6029 tree base;
6030 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6031 &offset,
6032 valueize);
6033 if (base
6034 && (CONSTANT_CLASS_P (base)
6035 || decl_address_invariant_p (base)))
6036 return build_invariant_address (TREE_TYPE (rhs),
6037 base, offset);
6039 else if (TREE_CODE (rhs) == CONSTRUCTOR
6040 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6041 && (CONSTRUCTOR_NELTS (rhs)
6042 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6044 unsigned i, nelts;
6045 tree val;
6047 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs));
6048 auto_vec<tree, 32> vec (nelts);
6049 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6051 val = (*valueize) (val);
6052 if (TREE_CODE (val) == INTEGER_CST
6053 || TREE_CODE (val) == REAL_CST
6054 || TREE_CODE (val) == FIXED_CST)
6055 vec.quick_push (val);
6056 else
6057 return NULL_TREE;
6060 return build_vector (TREE_TYPE (rhs), vec);
6062 if (subcode == OBJ_TYPE_REF)
6064 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6065 /* If callee is constant, we can fold away the wrapper. */
6066 if (is_gimple_min_invariant (val))
6067 return val;
6070 if (kind == tcc_reference)
6072 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6073 || TREE_CODE (rhs) == REALPART_EXPR
6074 || TREE_CODE (rhs) == IMAGPART_EXPR)
6075 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6077 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6078 return fold_unary_loc (EXPR_LOCATION (rhs),
6079 TREE_CODE (rhs),
6080 TREE_TYPE (rhs), val);
6082 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6083 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6085 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6086 return fold_ternary_loc (EXPR_LOCATION (rhs),
6087 TREE_CODE (rhs),
6088 TREE_TYPE (rhs), val,
6089 TREE_OPERAND (rhs, 1),
6090 TREE_OPERAND (rhs, 2));
6092 else if (TREE_CODE (rhs) == MEM_REF
6093 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6095 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6096 if (TREE_CODE (val) == ADDR_EXPR
6097 && is_gimple_min_invariant (val))
6099 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6100 unshare_expr (val),
6101 TREE_OPERAND (rhs, 1));
6102 if (tem)
6103 rhs = tem;
6106 return fold_const_aggregate_ref_1 (rhs, valueize);
6108 else if (kind == tcc_declaration)
6109 return get_symbol_constant_value (rhs);
6110 return rhs;
6113 case GIMPLE_UNARY_RHS:
6114 return NULL_TREE;
6116 case GIMPLE_BINARY_RHS:
6117 /* Translate &x + CST into an invariant form suitable for
6118 further propagation. */
6119 if (subcode == POINTER_PLUS_EXPR)
6121 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6122 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6123 if (TREE_CODE (op0) == ADDR_EXPR
6124 && TREE_CODE (op1) == INTEGER_CST)
6126 tree off = fold_convert (ptr_type_node, op1);
6127 return build_fold_addr_expr_loc
6128 (loc,
6129 fold_build2 (MEM_REF,
6130 TREE_TYPE (TREE_TYPE (op0)),
6131 unshare_expr (op0), off));
6134 /* Canonicalize bool != 0 and bool == 0 appearing after
6135 valueization. While gimple_simplify handles this
6136 it can get confused by the ~X == 1 -> X == 0 transform
6137 which we cant reduce to a SSA name or a constant
6138 (and we have no way to tell gimple_simplify to not
6139 consider those transforms in the first place). */
6140 else if (subcode == EQ_EXPR
6141 || subcode == NE_EXPR)
6143 tree lhs = gimple_assign_lhs (stmt);
6144 tree op0 = gimple_assign_rhs1 (stmt);
6145 if (useless_type_conversion_p (TREE_TYPE (lhs),
6146 TREE_TYPE (op0)))
6148 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6149 op0 = (*valueize) (op0);
6150 if (TREE_CODE (op0) == INTEGER_CST)
6151 std::swap (op0, op1);
6152 if (TREE_CODE (op1) == INTEGER_CST
6153 && ((subcode == NE_EXPR && integer_zerop (op1))
6154 || (subcode == EQ_EXPR && integer_onep (op1))))
6155 return op0;
6158 return NULL_TREE;
6160 case GIMPLE_TERNARY_RHS:
6162 /* Handle ternary operators that can appear in GIMPLE form. */
6163 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6164 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6165 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6166 return fold_ternary_loc (loc, subcode,
6167 gimple_expr_type (stmt), op0, op1, op2);
6170 default:
6171 gcc_unreachable ();
6175 case GIMPLE_CALL:
6177 tree fn;
6178 gcall *call_stmt = as_a <gcall *> (stmt);
6180 if (gimple_call_internal_p (stmt))
6182 enum tree_code subcode = ERROR_MARK;
6183 switch (gimple_call_internal_fn (stmt))
6185 case IFN_UBSAN_CHECK_ADD:
6186 subcode = PLUS_EXPR;
6187 break;
6188 case IFN_UBSAN_CHECK_SUB:
6189 subcode = MINUS_EXPR;
6190 break;
6191 case IFN_UBSAN_CHECK_MUL:
6192 subcode = MULT_EXPR;
6193 break;
6194 case IFN_BUILTIN_EXPECT:
6196 tree arg0 = gimple_call_arg (stmt, 0);
6197 tree op0 = (*valueize) (arg0);
6198 if (TREE_CODE (op0) == INTEGER_CST)
6199 return op0;
6200 return NULL_TREE;
6202 default:
6203 return NULL_TREE;
6205 tree arg0 = gimple_call_arg (stmt, 0);
6206 tree arg1 = gimple_call_arg (stmt, 1);
6207 tree op0 = (*valueize) (arg0);
6208 tree op1 = (*valueize) (arg1);
6210 if (TREE_CODE (op0) != INTEGER_CST
6211 || TREE_CODE (op1) != INTEGER_CST)
6213 switch (subcode)
6215 case MULT_EXPR:
6216 /* x * 0 = 0 * x = 0 without overflow. */
6217 if (integer_zerop (op0) || integer_zerop (op1))
6218 return build_zero_cst (TREE_TYPE (arg0));
6219 break;
6220 case MINUS_EXPR:
6221 /* y - y = 0 without overflow. */
6222 if (operand_equal_p (op0, op1, 0))
6223 return build_zero_cst (TREE_TYPE (arg0));
6224 break;
6225 default:
6226 break;
6229 tree res
6230 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6231 if (res
6232 && TREE_CODE (res) == INTEGER_CST
6233 && !TREE_OVERFLOW (res))
6234 return res;
6235 return NULL_TREE;
6238 fn = (*valueize) (gimple_call_fn (stmt));
6239 if (TREE_CODE (fn) == ADDR_EXPR
6240 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6241 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6242 && gimple_builtin_call_types_compatible_p (stmt,
6243 TREE_OPERAND (fn, 0)))
6245 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6246 tree retval;
6247 unsigned i;
6248 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6249 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6250 retval = fold_builtin_call_array (loc,
6251 gimple_call_return_type (call_stmt),
6252 fn, gimple_call_num_args (stmt), args);
6253 if (retval)
6255 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6256 STRIP_NOPS (retval);
6257 retval = fold_convert (gimple_call_return_type (call_stmt),
6258 retval);
6260 return retval;
6262 return NULL_TREE;
6265 default:
6266 return NULL_TREE;
6270 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6271 Returns NULL_TREE if folding to a constant is not possible, otherwise
6272 returns a constant according to is_gimple_min_invariant. */
6274 tree
6275 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6277 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6278 if (res && is_gimple_min_invariant (res))
6279 return res;
6280 return NULL_TREE;
6284 /* The following set of functions are supposed to fold references using
6285 their constant initializers. */
6287 /* See if we can find constructor defining value of BASE.
6288 When we know the consructor with constant offset (such as
6289 base is array[40] and we do know constructor of array), then
6290 BIT_OFFSET is adjusted accordingly.
6292 As a special case, return error_mark_node when constructor
6293 is not explicitly available, but it is known to be zero
6294 such as 'static const int a;'. */
6295 static tree
6296 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
6297 tree (*valueize)(tree))
6299 HOST_WIDE_INT bit_offset2, size, max_size;
6300 bool reverse;
6302 if (TREE_CODE (base) == MEM_REF)
6304 if (!integer_zerop (TREE_OPERAND (base, 1)))
6306 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
6307 return NULL_TREE;
6308 *bit_offset += (mem_ref_offset (base).to_short_addr ()
6309 * BITS_PER_UNIT);
6312 if (valueize
6313 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6314 base = valueize (TREE_OPERAND (base, 0));
6315 if (!base || TREE_CODE (base) != ADDR_EXPR)
6316 return NULL_TREE;
6317 base = TREE_OPERAND (base, 0);
6319 else if (valueize
6320 && TREE_CODE (base) == SSA_NAME)
6321 base = valueize (base);
6323 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6324 DECL_INITIAL. If BASE is a nested reference into another
6325 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6326 the inner reference. */
6327 switch (TREE_CODE (base))
6329 case VAR_DECL:
6330 case CONST_DECL:
6332 tree init = ctor_for_folding (base);
6334 /* Our semantic is exact opposite of ctor_for_folding;
6335 NULL means unknown, while error_mark_node is 0. */
6336 if (init == error_mark_node)
6337 return NULL_TREE;
6338 if (!init)
6339 return error_mark_node;
6340 return init;
6343 case VIEW_CONVERT_EXPR:
6344 return get_base_constructor (TREE_OPERAND (base, 0),
6345 bit_offset, valueize);
6347 case ARRAY_REF:
6348 case COMPONENT_REF:
6349 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6350 &reverse);
6351 if (max_size == -1 || size != max_size)
6352 return NULL_TREE;
6353 *bit_offset += bit_offset2;
6354 return get_base_constructor (base, bit_offset, valueize);
6356 case CONSTRUCTOR:
6357 return base;
6359 default:
6360 if (CONSTANT_CLASS_P (base))
6361 return base;
6363 return NULL_TREE;
6367 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6368 SIZE to the memory at bit OFFSET. */
6370 static tree
6371 fold_array_ctor_reference (tree type, tree ctor,
6372 unsigned HOST_WIDE_INT offset,
6373 unsigned HOST_WIDE_INT size,
6374 tree from_decl)
6376 offset_int low_bound;
6377 offset_int elt_size;
6378 offset_int access_index;
6379 tree domain_type = NULL_TREE;
6380 HOST_WIDE_INT inner_offset;
6382 /* Compute low bound and elt size. */
6383 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6384 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6385 if (domain_type && TYPE_MIN_VALUE (domain_type))
6387 /* Static constructors for variably sized objects makes no sense. */
6388 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6389 return NULL_TREE;
6390 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6392 else
6393 low_bound = 0;
6394 /* Static constructors for variably sized objects makes no sense. */
6395 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6396 return NULL_TREE;
6397 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6399 /* We can handle only constantly sized accesses that are known to not
6400 be larger than size of array element. */
6401 if (!TYPE_SIZE_UNIT (type)
6402 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6403 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6404 || elt_size == 0)
6405 return NULL_TREE;
6407 /* Compute the array index we look for. */
6408 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6409 elt_size);
6410 access_index += low_bound;
6412 /* And offset within the access. */
6413 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6415 /* See if the array field is large enough to span whole access. We do not
6416 care to fold accesses spanning multiple array indexes. */
6417 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6418 return NULL_TREE;
6419 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6420 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
6422 /* When memory is not explicitely mentioned in constructor,
6423 it is 0 (or out of range). */
6424 return build_zero_cst (type);
6427 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6428 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6430 static tree
6431 fold_nonarray_ctor_reference (tree type, tree ctor,
6432 unsigned HOST_WIDE_INT offset,
6433 unsigned HOST_WIDE_INT size,
6434 tree from_decl)
6436 unsigned HOST_WIDE_INT cnt;
6437 tree cfield, cval;
6439 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6440 cval)
6442 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6443 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6444 tree field_size = DECL_SIZE (cfield);
6445 offset_int bitoffset;
6446 offset_int bitoffset_end, access_end;
6448 /* Variable sized objects in static constructors makes no sense,
6449 but field_size can be NULL for flexible array members. */
6450 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6451 && TREE_CODE (byte_offset) == INTEGER_CST
6452 && (field_size != NULL_TREE
6453 ? TREE_CODE (field_size) == INTEGER_CST
6454 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6456 /* Compute bit offset of the field. */
6457 bitoffset = (wi::to_offset (field_offset)
6458 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6459 /* Compute bit offset where the field ends. */
6460 if (field_size != NULL_TREE)
6461 bitoffset_end = bitoffset + wi::to_offset (field_size);
6462 else
6463 bitoffset_end = 0;
6465 access_end = offset_int (offset) + size;
6467 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6468 [BITOFFSET, BITOFFSET_END)? */
6469 if (wi::cmps (access_end, bitoffset) > 0
6470 && (field_size == NULL_TREE
6471 || wi::lts_p (offset, bitoffset_end)))
6473 offset_int inner_offset = offset_int (offset) - bitoffset;
6474 /* We do have overlap. Now see if field is large enough to
6475 cover the access. Give up for accesses spanning multiple
6476 fields. */
6477 if (wi::cmps (access_end, bitoffset_end) > 0)
6478 return NULL_TREE;
6479 if (offset < bitoffset)
6480 return NULL_TREE;
6481 return fold_ctor_reference (type, cval,
6482 inner_offset.to_uhwi (), size,
6483 from_decl);
6486 /* When memory is not explicitely mentioned in constructor, it is 0. */
6487 return build_zero_cst (type);
6490 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
6491 to the memory at bit OFFSET. */
6493 tree
6494 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
6495 unsigned HOST_WIDE_INT size, tree from_decl)
6497 tree ret;
6499 /* We found the field with exact match. */
6500 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6501 && !offset)
6502 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6504 /* We are at the end of walk, see if we can view convert the
6505 result. */
6506 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6507 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6508 && !compare_tree_int (TYPE_SIZE (type), size)
6509 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6511 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6512 if (ret)
6514 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6515 if (ret)
6516 STRIP_USELESS_TYPE_CONVERSION (ret);
6518 return ret;
6520 /* For constants and byte-aligned/sized reads try to go through
6521 native_encode/interpret. */
6522 if (CONSTANT_CLASS_P (ctor)
6523 && BITS_PER_UNIT == 8
6524 && offset % BITS_PER_UNIT == 0
6525 && size % BITS_PER_UNIT == 0
6526 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6528 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6529 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6530 offset / BITS_PER_UNIT);
6531 if (len > 0)
6532 return native_interpret_expr (type, buf, len);
6534 if (TREE_CODE (ctor) == CONSTRUCTOR)
6537 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6538 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6539 return fold_array_ctor_reference (type, ctor, offset, size,
6540 from_decl);
6541 else
6542 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6543 from_decl);
6546 return NULL_TREE;
6549 /* Return the tree representing the element referenced by T if T is an
6550 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6551 names using VALUEIZE. Return NULL_TREE otherwise. */
6553 tree
6554 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6556 tree ctor, idx, base;
6557 HOST_WIDE_INT offset, size, max_size;
6558 tree tem;
6559 bool reverse;
6561 if (TREE_THIS_VOLATILE (t))
6562 return NULL_TREE;
6564 if (DECL_P (t))
6565 return get_symbol_constant_value (t);
6567 tem = fold_read_from_constant_string (t);
6568 if (tem)
6569 return tem;
6571 switch (TREE_CODE (t))
6573 case ARRAY_REF:
6574 case ARRAY_RANGE_REF:
6575 /* Constant indexes are handled well by get_base_constructor.
6576 Only special case variable offsets.
6577 FIXME: This code can't handle nested references with variable indexes
6578 (they will be handled only by iteration of ccp). Perhaps we can bring
6579 get_ref_base_and_extent here and make it use a valueize callback. */
6580 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6581 && valueize
6582 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6583 && TREE_CODE (idx) == INTEGER_CST)
6585 tree low_bound, unit_size;
6587 /* If the resulting bit-offset is constant, track it. */
6588 if ((low_bound = array_ref_low_bound (t),
6589 TREE_CODE (low_bound) == INTEGER_CST)
6590 && (unit_size = array_ref_element_size (t),
6591 tree_fits_uhwi_p (unit_size)))
6593 offset_int woffset
6594 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
6595 TYPE_PRECISION (TREE_TYPE (idx)));
6597 if (wi::fits_shwi_p (woffset))
6599 offset = woffset.to_shwi ();
6600 /* TODO: This code seems wrong, multiply then check
6601 to see if it fits. */
6602 offset *= tree_to_uhwi (unit_size);
6603 offset *= BITS_PER_UNIT;
6605 base = TREE_OPERAND (t, 0);
6606 ctor = get_base_constructor (base, &offset, valueize);
6607 /* Empty constructor. Always fold to 0. */
6608 if (ctor == error_mark_node)
6609 return build_zero_cst (TREE_TYPE (t));
6610 /* Out of bound array access. Value is undefined,
6611 but don't fold. */
6612 if (offset < 0)
6613 return NULL_TREE;
6614 /* We can not determine ctor. */
6615 if (!ctor)
6616 return NULL_TREE;
6617 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6618 tree_to_uhwi (unit_size)
6619 * BITS_PER_UNIT,
6620 base);
6624 /* Fallthru. */
6626 case COMPONENT_REF:
6627 case BIT_FIELD_REF:
6628 case TARGET_MEM_REF:
6629 case MEM_REF:
6630 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6631 ctor = get_base_constructor (base, &offset, valueize);
6633 /* Empty constructor. Always fold to 0. */
6634 if (ctor == error_mark_node)
6635 return build_zero_cst (TREE_TYPE (t));
6636 /* We do not know precise address. */
6637 if (max_size == -1 || max_size != size)
6638 return NULL_TREE;
6639 /* We can not determine ctor. */
6640 if (!ctor)
6641 return NULL_TREE;
6643 /* Out of bound array access. Value is undefined, but don't fold. */
6644 if (offset < 0)
6645 return NULL_TREE;
6647 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6648 base);
6650 case REALPART_EXPR:
6651 case IMAGPART_EXPR:
6653 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6654 if (c && TREE_CODE (c) == COMPLEX_CST)
6655 return fold_build1_loc (EXPR_LOCATION (t),
6656 TREE_CODE (t), TREE_TYPE (t), c);
6657 break;
6660 default:
6661 break;
6664 return NULL_TREE;
6667 tree
6668 fold_const_aggregate_ref (tree t)
6670 return fold_const_aggregate_ref_1 (t, NULL);
6673 /* Lookup virtual method with index TOKEN in a virtual table V
6674 at OFFSET.
6675 Set CAN_REFER if non-NULL to false if method
6676 is not referable or if the virtual table is ill-formed (such as rewriten
6677 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6679 tree
6680 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6681 tree v,
6682 unsigned HOST_WIDE_INT offset,
6683 bool *can_refer)
6685 tree vtable = v, init, fn;
6686 unsigned HOST_WIDE_INT size;
6687 unsigned HOST_WIDE_INT elt_size, access_index;
6688 tree domain_type;
6690 if (can_refer)
6691 *can_refer = true;
6693 /* First of all double check we have virtual table. */
6694 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6696 /* Pass down that we lost track of the target. */
6697 if (can_refer)
6698 *can_refer = false;
6699 return NULL_TREE;
6702 init = ctor_for_folding (v);
6704 /* The virtual tables should always be born with constructors
6705 and we always should assume that they are avaialble for
6706 folding. At the moment we do not stream them in all cases,
6707 but it should never happen that ctor seem unreachable. */
6708 gcc_assert (init);
6709 if (init == error_mark_node)
6711 /* Pass down that we lost track of the target. */
6712 if (can_refer)
6713 *can_refer = false;
6714 return NULL_TREE;
6716 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6717 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6718 offset *= BITS_PER_UNIT;
6719 offset += token * size;
6721 /* Lookup the value in the constructor that is assumed to be array.
6722 This is equivalent to
6723 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6724 offset, size, NULL);
6725 but in a constant time. We expect that frontend produced a simple
6726 array without indexed initializers. */
6728 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6729 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6730 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6731 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6733 access_index = offset / BITS_PER_UNIT / elt_size;
6734 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6736 /* This code makes an assumption that there are no
6737 indexed fileds produced by C++ FE, so we can directly index the array. */
6738 if (access_index < CONSTRUCTOR_NELTS (init))
6740 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6741 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6742 STRIP_NOPS (fn);
6744 else
6745 fn = NULL;
6747 /* For type inconsistent program we may end up looking up virtual method
6748 in virtual table that does not contain TOKEN entries. We may overrun
6749 the virtual table and pick up a constant or RTTI info pointer.
6750 In any case the call is undefined. */
6751 if (!fn
6752 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6753 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6754 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6755 else
6757 fn = TREE_OPERAND (fn, 0);
6759 /* When cgraph node is missing and function is not public, we cannot
6760 devirtualize. This can happen in WHOPR when the actual method
6761 ends up in other partition, because we found devirtualization
6762 possibility too late. */
6763 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6765 if (can_refer)
6767 *can_refer = false;
6768 return fn;
6770 return NULL_TREE;
6774 /* Make sure we create a cgraph node for functions we'll reference.
6775 They can be non-existent if the reference comes from an entry
6776 of an external vtable for example. */
6777 cgraph_node::get_create (fn);
6779 return fn;
6782 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6783 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6784 KNOWN_BINFO carries the binfo describing the true type of
6785 OBJ_TYPE_REF_OBJECT(REF).
6786 Set CAN_REFER if non-NULL to false if method
6787 is not referable or if the virtual table is ill-formed (such as rewriten
6788 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6790 tree
6791 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6792 bool *can_refer)
6794 unsigned HOST_WIDE_INT offset;
6795 tree v;
6797 v = BINFO_VTABLE (known_binfo);
6798 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6799 if (!v)
6800 return NULL_TREE;
6802 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6804 if (can_refer)
6805 *can_refer = false;
6806 return NULL_TREE;
6808 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6811 /* Given a pointer value T, return a simplified version of an
6812 indirection through T, or NULL_TREE if no simplification is
6813 possible. Note that the resulting type may be different from
6814 the type pointed to in the sense that it is still compatible
6815 from the langhooks point of view. */
6817 tree
6818 gimple_fold_indirect_ref (tree t)
6820 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6821 tree sub = t;
6822 tree subtype;
6824 STRIP_NOPS (sub);
6825 subtype = TREE_TYPE (sub);
6826 if (!POINTER_TYPE_P (subtype)
6827 || TYPE_REF_CAN_ALIAS_ALL (ptype))
6828 return NULL_TREE;
6830 if (TREE_CODE (sub) == ADDR_EXPR)
6832 tree op = TREE_OPERAND (sub, 0);
6833 tree optype = TREE_TYPE (op);
6834 /* *&p => p */
6835 if (useless_type_conversion_p (type, optype))
6836 return op;
6838 /* *(foo *)&fooarray => fooarray[0] */
6839 if (TREE_CODE (optype) == ARRAY_TYPE
6840 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6841 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6843 tree type_domain = TYPE_DOMAIN (optype);
6844 tree min_val = size_zero_node;
6845 if (type_domain && TYPE_MIN_VALUE (type_domain))
6846 min_val = TYPE_MIN_VALUE (type_domain);
6847 if (TREE_CODE (min_val) == INTEGER_CST)
6848 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6850 /* *(foo *)&complexfoo => __real__ complexfoo */
6851 else if (TREE_CODE (optype) == COMPLEX_TYPE
6852 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6853 return fold_build1 (REALPART_EXPR, type, op);
6854 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6855 else if (TREE_CODE (optype) == VECTOR_TYPE
6856 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6858 tree part_width = TYPE_SIZE (type);
6859 tree index = bitsize_int (0);
6860 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6864 /* *(p + CST) -> ... */
6865 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6866 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6868 tree addr = TREE_OPERAND (sub, 0);
6869 tree off = TREE_OPERAND (sub, 1);
6870 tree addrtype;
6872 STRIP_NOPS (addr);
6873 addrtype = TREE_TYPE (addr);
6875 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6876 if (TREE_CODE (addr) == ADDR_EXPR
6877 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6878 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
6879 && tree_fits_uhwi_p (off))
6881 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
6882 tree part_width = TYPE_SIZE (type);
6883 unsigned HOST_WIDE_INT part_widthi
6884 = tree_to_shwi (part_width) / BITS_PER_UNIT;
6885 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6886 tree index = bitsize_int (indexi);
6887 if (offset / part_widthi
6888 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
6889 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6890 part_width, index);
6893 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6894 if (TREE_CODE (addr) == ADDR_EXPR
6895 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6896 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6898 tree size = TYPE_SIZE_UNIT (type);
6899 if (tree_int_cst_equal (size, off))
6900 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6903 /* *(p + CST) -> MEM_REF <p, CST>. */
6904 if (TREE_CODE (addr) != ADDR_EXPR
6905 || DECL_P (TREE_OPERAND (addr, 0)))
6906 return fold_build2 (MEM_REF, type,
6907 addr,
6908 wide_int_to_tree (ptype, wi::to_wide (off)));
6911 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6912 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6913 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6914 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6916 tree type_domain;
6917 tree min_val = size_zero_node;
6918 tree osub = sub;
6919 sub = gimple_fold_indirect_ref (sub);
6920 if (! sub)
6921 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6922 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6923 if (type_domain && TYPE_MIN_VALUE (type_domain))
6924 min_val = TYPE_MIN_VALUE (type_domain);
6925 if (TREE_CODE (min_val) == INTEGER_CST)
6926 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6929 return NULL_TREE;
6932 /* Return true if CODE is an operation that when operating on signed
6933 integer types involves undefined behavior on overflow and the
6934 operation can be expressed with unsigned arithmetic. */
6936 bool
6937 arith_code_with_undefined_signed_overflow (tree_code code)
6939 switch (code)
6941 case PLUS_EXPR:
6942 case MINUS_EXPR:
6943 case MULT_EXPR:
6944 case NEGATE_EXPR:
6945 case POINTER_PLUS_EXPR:
6946 return true;
6947 default:
6948 return false;
6952 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6953 operation that can be transformed to unsigned arithmetic by converting
6954 its operand, carrying out the operation in the corresponding unsigned
6955 type and converting the result back to the original type.
6957 Returns a sequence of statements that replace STMT and also contain
6958 a modified form of STMT itself. */
6960 gimple_seq
6961 rewrite_to_defined_overflow (gimple *stmt)
6963 if (dump_file && (dump_flags & TDF_DETAILS))
6965 fprintf (dump_file, "rewriting stmt with undefined signed "
6966 "overflow ");
6967 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6970 tree lhs = gimple_assign_lhs (stmt);
6971 tree type = unsigned_type_for (TREE_TYPE (lhs));
6972 gimple_seq stmts = NULL;
6973 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6975 tree op = gimple_op (stmt, i);
6976 op = gimple_convert (&stmts, type, op);
6977 gimple_set_op (stmt, i, op);
6979 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6980 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6981 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6982 gimple_seq_add_stmt (&stmts, stmt);
6983 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6984 gimple_seq_add_stmt (&stmts, cvt);
6986 return stmts;
6990 /* The valueization hook we use for the gimple_build API simplification.
6991 This makes us match fold_buildN behavior by only combining with
6992 statements in the sequence(s) we are currently building. */
6994 static tree
6995 gimple_build_valueize (tree op)
6997 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6998 return op;
6999 return NULL_TREE;
7002 /* Build the expression CODE OP0 of type TYPE with location LOC,
7003 simplifying it first if possible. Returns the built
7004 expression value and appends statements possibly defining it
7005 to SEQ. */
7007 tree
7008 gimple_build (gimple_seq *seq, location_t loc,
7009 enum tree_code code, tree type, tree op0)
7011 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7012 if (!res)
7014 res = create_tmp_reg_or_ssa_name (type);
7015 gimple *stmt;
7016 if (code == REALPART_EXPR
7017 || code == IMAGPART_EXPR
7018 || code == VIEW_CONVERT_EXPR)
7019 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7020 else
7021 stmt = gimple_build_assign (res, code, op0);
7022 gimple_set_location (stmt, loc);
7023 gimple_seq_add_stmt_without_update (seq, stmt);
7025 return res;
7028 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7029 simplifying it first if possible. Returns the built
7030 expression value and appends statements possibly defining it
7031 to SEQ. */
7033 tree
7034 gimple_build (gimple_seq *seq, location_t loc,
7035 enum tree_code code, tree type, tree op0, tree op1)
7037 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7038 if (!res)
7040 res = create_tmp_reg_or_ssa_name (type);
7041 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7042 gimple_set_location (stmt, loc);
7043 gimple_seq_add_stmt_without_update (seq, stmt);
7045 return res;
7048 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7049 simplifying it first if possible. Returns the built
7050 expression value and appends statements possibly defining it
7051 to SEQ. */
7053 tree
7054 gimple_build (gimple_seq *seq, location_t loc,
7055 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7057 tree res = gimple_simplify (code, type, op0, op1, op2,
7058 seq, gimple_build_valueize);
7059 if (!res)
7061 res = create_tmp_reg_or_ssa_name (type);
7062 gimple *stmt;
7063 if (code == BIT_FIELD_REF)
7064 stmt = gimple_build_assign (res, code,
7065 build3 (code, type, op0, op1, op2));
7066 else
7067 stmt = gimple_build_assign (res, code, op0, op1, op2);
7068 gimple_set_location (stmt, loc);
7069 gimple_seq_add_stmt_without_update (seq, stmt);
7071 return res;
7074 /* Build the call FN (ARG0) with a result of type TYPE
7075 (or no result if TYPE is void) with location LOC,
7076 simplifying it first if possible. Returns the built
7077 expression value (or NULL_TREE if TYPE is void) and appends
7078 statements possibly defining it to SEQ. */
7080 tree
7081 gimple_build (gimple_seq *seq, location_t loc,
7082 enum built_in_function fn, tree type, tree arg0)
7084 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7085 if (!res)
7087 tree decl = builtin_decl_implicit (fn);
7088 gimple *stmt = gimple_build_call (decl, 1, arg0);
7089 if (!VOID_TYPE_P (type))
7091 res = create_tmp_reg_or_ssa_name (type);
7092 gimple_call_set_lhs (stmt, res);
7094 gimple_set_location (stmt, loc);
7095 gimple_seq_add_stmt_without_update (seq, stmt);
7097 return res;
7100 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7101 (or no result if TYPE is void) with location LOC,
7102 simplifying it first if possible. Returns the built
7103 expression value (or NULL_TREE if TYPE is void) and appends
7104 statements possibly defining it to SEQ. */
7106 tree
7107 gimple_build (gimple_seq *seq, location_t loc,
7108 enum built_in_function fn, tree type, tree arg0, tree arg1)
7110 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7111 if (!res)
7113 tree decl = builtin_decl_implicit (fn);
7114 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
7115 if (!VOID_TYPE_P (type))
7117 res = create_tmp_reg_or_ssa_name (type);
7118 gimple_call_set_lhs (stmt, res);
7120 gimple_set_location (stmt, loc);
7121 gimple_seq_add_stmt_without_update (seq, stmt);
7123 return res;
7126 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7127 (or no result if TYPE is void) with location LOC,
7128 simplifying it first if possible. Returns the built
7129 expression value (or NULL_TREE if TYPE is void) and appends
7130 statements possibly defining it to SEQ. */
7132 tree
7133 gimple_build (gimple_seq *seq, location_t loc,
7134 enum built_in_function fn, tree type,
7135 tree arg0, tree arg1, tree arg2)
7137 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7138 seq, gimple_build_valueize);
7139 if (!res)
7141 tree decl = builtin_decl_implicit (fn);
7142 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7143 if (!VOID_TYPE_P (type))
7145 res = create_tmp_reg_or_ssa_name (type);
7146 gimple_call_set_lhs (stmt, res);
7148 gimple_set_location (stmt, loc);
7149 gimple_seq_add_stmt_without_update (seq, stmt);
7151 return res;
7154 /* Build the conversion (TYPE) OP with a result of type TYPE
7155 with location LOC if such conversion is neccesary in GIMPLE,
7156 simplifying it first.
7157 Returns the built expression value and appends
7158 statements possibly defining it to SEQ. */
7160 tree
7161 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7163 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7164 return op;
7165 return gimple_build (seq, loc, NOP_EXPR, type, op);
7168 /* Build the conversion (ptrofftype) OP with a result of a type
7169 compatible with ptrofftype with location LOC if such conversion
7170 is neccesary in GIMPLE, simplifying it first.
7171 Returns the built expression value and appends
7172 statements possibly defining it to SEQ. */
7174 tree
7175 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7177 if (ptrofftype_p (TREE_TYPE (op)))
7178 return op;
7179 return gimple_convert (seq, loc, sizetype, op);
7182 /* Build a vector of type TYPE in which each element has the value OP.
7183 Return a gimple value for the result, appending any new statements
7184 to SEQ. */
7186 tree
7187 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7188 tree op)
7190 tree res, vec = build_vector_from_val (type, op);
7191 if (is_gimple_val (vec))
7192 return vec;
7193 if (gimple_in_ssa_p (cfun))
7194 res = make_ssa_name (type);
7195 else
7196 res = create_tmp_reg (type);
7197 gimple *stmt = gimple_build_assign (res, vec);
7198 gimple_set_location (stmt, loc);
7199 gimple_seq_add_stmt_without_update (seq, stmt);
7200 return res;
7203 /* Build a vector of type TYPE in which the elements have the values
7204 given by ELTS. Return a gimple value for the result, appending any
7205 new instructions to SEQ. */
7207 tree
7208 gimple_build_vector (gimple_seq *seq, location_t loc, tree type,
7209 vec<tree> elts)
7211 unsigned int nelts = elts.length ();
7212 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
7213 for (unsigned int i = 0; i < nelts; ++i)
7214 if (!TREE_CONSTANT (elts[i]))
7216 vec<constructor_elt, va_gc> *v;
7217 vec_alloc (v, nelts);
7218 for (i = 0; i < nelts; ++i)
7219 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[i]);
7221 tree res;
7222 if (gimple_in_ssa_p (cfun))
7223 res = make_ssa_name (type);
7224 else
7225 res = create_tmp_reg (type);
7226 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7227 gimple_set_location (stmt, loc);
7228 gimple_seq_add_stmt_without_update (seq, stmt);
7229 return res;
7231 return build_vector (type, elts);
7234 /* Return true if the result of assignment STMT is known to be non-negative.
7235 If the return value is based on the assumption that signed overflow is
7236 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7237 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7239 static bool
7240 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7241 int depth)
7243 enum tree_code code = gimple_assign_rhs_code (stmt);
7244 switch (get_gimple_rhs_class (code))
7246 case GIMPLE_UNARY_RHS:
7247 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7248 gimple_expr_type (stmt),
7249 gimple_assign_rhs1 (stmt),
7250 strict_overflow_p, depth);
7251 case GIMPLE_BINARY_RHS:
7252 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7253 gimple_expr_type (stmt),
7254 gimple_assign_rhs1 (stmt),
7255 gimple_assign_rhs2 (stmt),
7256 strict_overflow_p, depth);
7257 case GIMPLE_TERNARY_RHS:
7258 return false;
7259 case GIMPLE_SINGLE_RHS:
7260 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7261 strict_overflow_p, depth);
7262 case GIMPLE_INVALID_RHS:
7263 break;
7265 gcc_unreachable ();
7268 /* Return true if return value of call STMT is known to be non-negative.
7269 If the return value is based on the assumption that signed overflow is
7270 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7271 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7273 static bool
7274 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7275 int depth)
7277 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7278 gimple_call_arg (stmt, 0) : NULL_TREE;
7279 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7280 gimple_call_arg (stmt, 1) : NULL_TREE;
7282 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7283 gimple_call_combined_fn (stmt),
7284 arg0,
7285 arg1,
7286 strict_overflow_p, depth);
7289 /* Return true if return value of call STMT is known to be non-negative.
7290 If the return value is based on the assumption that signed overflow is
7291 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7292 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7294 static bool
7295 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7296 int depth)
7298 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7300 tree arg = gimple_phi_arg_def (stmt, i);
7301 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7302 return false;
7304 return true;
7307 /* Return true if STMT is known to compute a non-negative value.
7308 If the return value is based on the assumption that signed overflow is
7309 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7310 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7312 bool
7313 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7314 int depth)
7316 switch (gimple_code (stmt))
7318 case GIMPLE_ASSIGN:
7319 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7320 depth);
7321 case GIMPLE_CALL:
7322 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7323 depth);
7324 case GIMPLE_PHI:
7325 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7326 depth);
7327 default:
7328 return false;
7332 /* Return true if the floating-point value computed by assignment STMT
7333 is known to have an integer value. We also allow +Inf, -Inf and NaN
7334 to be considered integer values. Return false for signaling NaN.
7336 DEPTH is the current nesting depth of the query. */
7338 static bool
7339 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7341 enum tree_code code = gimple_assign_rhs_code (stmt);
7342 switch (get_gimple_rhs_class (code))
7344 case GIMPLE_UNARY_RHS:
7345 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7346 gimple_assign_rhs1 (stmt), depth);
7347 case GIMPLE_BINARY_RHS:
7348 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7349 gimple_assign_rhs1 (stmt),
7350 gimple_assign_rhs2 (stmt), depth);
7351 case GIMPLE_TERNARY_RHS:
7352 return false;
7353 case GIMPLE_SINGLE_RHS:
7354 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7355 case GIMPLE_INVALID_RHS:
7356 break;
7358 gcc_unreachable ();
7361 /* Return true if the floating-point value computed by call STMT is known
7362 to have an integer value. We also allow +Inf, -Inf and NaN to be
7363 considered integer values. Return false for signaling NaN.
7365 DEPTH is the current nesting depth of the query. */
7367 static bool
7368 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7370 tree arg0 = (gimple_call_num_args (stmt) > 0
7371 ? gimple_call_arg (stmt, 0)
7372 : NULL_TREE);
7373 tree arg1 = (gimple_call_num_args (stmt) > 1
7374 ? gimple_call_arg (stmt, 1)
7375 : NULL_TREE);
7376 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7377 arg0, arg1, depth);
7380 /* Return true if the floating-point result of phi STMT is known to have
7381 an integer value. We also allow +Inf, -Inf and NaN to be considered
7382 integer values. Return false for signaling NaN.
7384 DEPTH is the current nesting depth of the query. */
7386 static bool
7387 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7389 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7391 tree arg = gimple_phi_arg_def (stmt, i);
7392 if (!integer_valued_real_single_p (arg, depth + 1))
7393 return false;
7395 return true;
7398 /* Return true if the floating-point value computed by STMT is known
7399 to have an integer value. We also allow +Inf, -Inf and NaN to be
7400 considered integer values. Return false for signaling NaN.
7402 DEPTH is the current nesting depth of the query. */
7404 bool
7405 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7407 switch (gimple_code (stmt))
7409 case GIMPLE_ASSIGN:
7410 return gimple_assign_integer_valued_real_p (stmt, depth);
7411 case GIMPLE_CALL:
7412 return gimple_call_integer_valued_real_p (stmt, depth);
7413 case GIMPLE_PHI:
7414 return gimple_phi_integer_valued_real_p (stmt, depth);
7415 default:
7416 return false;