Daily bump.
[official-gcc.git] / gcc / gimple-fold.c
blobc3fa4cb7cc1b9097b521964604387392910b1139
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2020 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
68 #include "varasm.h"
70 enum strlen_range_kind {
71 /* Compute the exact constant string length. */
72 SRK_STRLEN,
73 /* Compute the maximum constant string length. */
74 SRK_STRLENMAX,
75 /* Compute a range of string lengths bounded by object sizes. When
76 the length of a string cannot be determined, consider as the upper
77 bound the size of the enclosing object the string may be a member
78 or element of. Also determine the size of the largest character
79 array the string may refer to. */
80 SRK_LENRANGE,
81 /* Determine the integer value of the argument (not string length). */
82 SRK_INT_VALUE
85 static bool
86 get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
88 /* Return true when DECL can be referenced from current unit.
89 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
90 We can get declarations that are not possible to reference for various
91 reasons:
93 1) When analyzing C++ virtual tables.
94 C++ virtual tables do have known constructors even
95 when they are keyed to other compilation unit.
96 Those tables can contain pointers to methods and vars
97 in other units. Those methods have both STATIC and EXTERNAL
98 set.
99 2) In WHOPR mode devirtualization might lead to reference
100 to method that was partitioned elsehwere.
101 In this case we have static VAR_DECL or FUNCTION_DECL
102 that has no corresponding callgraph/varpool node
103 declaring the body.
104 3) COMDAT functions referred by external vtables that
105 we devirtualize only during final compilation stage.
106 At this time we already decided that we will not output
107 the function body and thus we can't reference the symbol
108 directly. */
110 static bool
111 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
113 varpool_node *vnode;
114 struct cgraph_node *node;
115 symtab_node *snode;
117 if (DECL_ABSTRACT_P (decl))
118 return false;
120 /* We are concerned only about static/external vars and functions. */
121 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
122 || !VAR_OR_FUNCTION_DECL_P (decl))
123 return true;
125 /* Static objects can be referred only if they are defined and not optimized
126 out yet. */
127 if (!TREE_PUBLIC (decl))
129 if (DECL_EXTERNAL (decl))
130 return false;
131 /* Before we start optimizing unreachable code we can be sure all
132 static objects are defined. */
133 if (symtab->function_flags_ready)
134 return true;
135 snode = symtab_node::get (decl);
136 if (!snode || !snode->definition)
137 return false;
138 node = dyn_cast <cgraph_node *> (snode);
139 return !node || !node->inlined_to;
142 /* We will later output the initializer, so we can refer to it.
143 So we are concerned only when DECL comes from initializer of
144 external var or var that has been optimized out. */
145 if (!from_decl
146 || !VAR_P (from_decl)
147 || (!DECL_EXTERNAL (from_decl)
148 && (vnode = varpool_node::get (from_decl)) != NULL
149 && vnode->definition)
150 || (flag_ltrans
151 && (vnode = varpool_node::get (from_decl)) != NULL
152 && vnode->in_other_partition))
153 return true;
154 /* We are folding reference from external vtable. The vtable may reffer
155 to a symbol keyed to other compilation unit. The other compilation
156 unit may be in separate DSO and the symbol may be hidden. */
157 if (DECL_VISIBILITY_SPECIFIED (decl)
158 && DECL_EXTERNAL (decl)
159 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
160 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
161 return false;
162 /* When function is public, we always can introduce new reference.
163 Exception are the COMDAT functions where introducing a direct
164 reference imply need to include function body in the curren tunit. */
165 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
166 return true;
167 /* We have COMDAT. We are going to check if we still have definition
168 or if the definition is going to be output in other partition.
169 Bypass this when gimplifying; all needed functions will be produced.
171 As observed in PR20991 for already optimized out comdat virtual functions
172 it may be tempting to not necessarily give up because the copy will be
173 output elsewhere when corresponding vtable is output.
174 This is however not possible - ABI specify that COMDATs are output in
175 units where they are used and when the other unit was compiled with LTO
176 it is possible that vtable was kept public while the function itself
177 was privatized. */
178 if (!symtab->function_flags_ready)
179 return true;
181 snode = symtab_node::get (decl);
182 if (!snode
183 || ((!snode->definition || DECL_EXTERNAL (decl))
184 && (!snode->in_other_partition
185 || (!snode->forced_by_abi && !snode->force_output))))
186 return false;
187 node = dyn_cast <cgraph_node *> (snode);
188 return !node || !node->inlined_to;
191 /* Create a temporary for TYPE for a statement STMT. If the current function
192 is in SSA form, a SSA name is created. Otherwise a temporary register
193 is made. */
195 tree
196 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
198 if (gimple_in_ssa_p (cfun))
199 return make_ssa_name (type, stmt);
200 else
201 return create_tmp_reg (type);
204 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
205 acceptable form for is_gimple_min_invariant.
206 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
208 tree
209 canonicalize_constructor_val (tree cval, tree from_decl)
211 if (CONSTANT_CLASS_P (cval))
212 return cval;
214 tree orig_cval = cval;
215 STRIP_NOPS (cval);
216 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
217 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
219 tree ptr = TREE_OPERAND (cval, 0);
220 if (is_gimple_min_invariant (ptr))
221 cval = build1_loc (EXPR_LOCATION (cval),
222 ADDR_EXPR, TREE_TYPE (ptr),
223 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
224 ptr,
225 fold_convert (ptr_type_node,
226 TREE_OPERAND (cval, 1))));
228 if (TREE_CODE (cval) == ADDR_EXPR)
230 tree base = NULL_TREE;
231 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
233 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
234 if (base)
235 TREE_OPERAND (cval, 0) = base;
237 else
238 base = get_base_address (TREE_OPERAND (cval, 0));
239 if (!base)
240 return NULL_TREE;
242 if (VAR_OR_FUNCTION_DECL_P (base)
243 && !can_refer_decl_in_current_unit_p (base, from_decl))
244 return NULL_TREE;
245 if (TREE_TYPE (base) == error_mark_node)
246 return NULL_TREE;
247 if (VAR_P (base))
248 TREE_ADDRESSABLE (base) = 1;
249 else if (TREE_CODE (base) == FUNCTION_DECL)
251 /* Make sure we create a cgraph node for functions we'll reference.
252 They can be non-existent if the reference comes from an entry
253 of an external vtable for example. */
254 cgraph_node::get_create (base);
256 /* Fixup types in global initializers. */
257 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
258 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
260 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
261 cval = fold_convert (TREE_TYPE (orig_cval), cval);
262 return cval;
264 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
265 if (TREE_CODE (cval) == INTEGER_CST)
267 if (TREE_OVERFLOW_P (cval))
268 cval = drop_tree_overflow (cval);
269 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
270 cval = fold_convert (TREE_TYPE (orig_cval), cval);
271 return cval;
273 return orig_cval;
276 /* If SYM is a constant variable with known value, return the value.
277 NULL_TREE is returned otherwise. */
279 tree
280 get_symbol_constant_value (tree sym)
282 tree val = ctor_for_folding (sym);
283 if (val != error_mark_node)
285 if (val)
287 val = canonicalize_constructor_val (unshare_expr (val), sym);
288 if (val && is_gimple_min_invariant (val))
289 return val;
290 else
291 return NULL_TREE;
293 /* Variables declared 'const' without an initializer
294 have zero as the initializer if they may not be
295 overridden at link or run time. */
296 if (!val
297 && is_gimple_reg_type (TREE_TYPE (sym)))
298 return build_zero_cst (TREE_TYPE (sym));
301 return NULL_TREE;
306 /* Subroutine of fold_stmt. We perform several simplifications of the
307 memory reference tree EXPR and make sure to re-gimplify them properly
308 after propagation of constant addresses. IS_LHS is true if the
309 reference is supposed to be an lvalue. */
311 static tree
312 maybe_fold_reference (tree expr, bool is_lhs)
314 tree result;
316 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
317 || TREE_CODE (expr) == REALPART_EXPR
318 || TREE_CODE (expr) == IMAGPART_EXPR)
319 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
320 return fold_unary_loc (EXPR_LOCATION (expr),
321 TREE_CODE (expr),
322 TREE_TYPE (expr),
323 TREE_OPERAND (expr, 0));
324 else if (TREE_CODE (expr) == BIT_FIELD_REF
325 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
326 return fold_ternary_loc (EXPR_LOCATION (expr),
327 TREE_CODE (expr),
328 TREE_TYPE (expr),
329 TREE_OPERAND (expr, 0),
330 TREE_OPERAND (expr, 1),
331 TREE_OPERAND (expr, 2));
333 if (!is_lhs
334 && (result = fold_const_aggregate_ref (expr))
335 && is_gimple_min_invariant (result))
336 return result;
338 return NULL_TREE;
342 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
343 replacement rhs for the statement or NULL_TREE if no simplification
344 could be made. It is assumed that the operands have been previously
345 folded. */
347 static tree
348 fold_gimple_assign (gimple_stmt_iterator *si)
350 gimple *stmt = gsi_stmt (*si);
351 enum tree_code subcode = gimple_assign_rhs_code (stmt);
352 location_t loc = gimple_location (stmt);
354 tree result = NULL_TREE;
356 switch (get_gimple_rhs_class (subcode))
358 case GIMPLE_SINGLE_RHS:
360 tree rhs = gimple_assign_rhs1 (stmt);
362 if (TREE_CLOBBER_P (rhs))
363 return NULL_TREE;
365 if (REFERENCE_CLASS_P (rhs))
366 return maybe_fold_reference (rhs, false);
368 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
370 tree val = OBJ_TYPE_REF_EXPR (rhs);
371 if (is_gimple_min_invariant (val))
372 return val;
373 else if (flag_devirtualize && virtual_method_call_p (rhs))
375 bool final;
376 vec <cgraph_node *>targets
377 = possible_polymorphic_call_targets (rhs, stmt, &final);
378 if (final && targets.length () <= 1 && dbg_cnt (devirt))
380 if (dump_enabled_p ())
382 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
383 "resolving virtual function address "
384 "reference to function %s\n",
385 targets.length () == 1
386 ? targets[0]->name ()
387 : "NULL");
389 if (targets.length () == 1)
391 val = fold_convert (TREE_TYPE (val),
392 build_fold_addr_expr_loc
393 (loc, targets[0]->decl));
394 STRIP_USELESS_TYPE_CONVERSION (val);
396 else
397 /* We cannot use __builtin_unreachable here because it
398 cannot have address taken. */
399 val = build_int_cst (TREE_TYPE (val), 0);
400 return val;
405 else if (TREE_CODE (rhs) == ADDR_EXPR)
407 tree ref = TREE_OPERAND (rhs, 0);
408 tree tem = maybe_fold_reference (ref, true);
409 if (tem
410 && TREE_CODE (tem) == MEM_REF
411 && integer_zerop (TREE_OPERAND (tem, 1)))
412 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
413 else if (tem)
414 result = fold_convert (TREE_TYPE (rhs),
415 build_fold_addr_expr_loc (loc, tem));
416 else if (TREE_CODE (ref) == MEM_REF
417 && integer_zerop (TREE_OPERAND (ref, 1)))
418 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
420 if (result)
422 /* Strip away useless type conversions. Both the
423 NON_LVALUE_EXPR that may have been added by fold, and
424 "useless" type conversions that might now be apparent
425 due to propagation. */
426 STRIP_USELESS_TYPE_CONVERSION (result);
428 if (result != rhs && valid_gimple_rhs_p (result))
429 return result;
433 else if (TREE_CODE (rhs) == CONSTRUCTOR
434 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
436 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
437 unsigned i;
438 tree val;
440 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
441 if (! CONSTANT_CLASS_P (val))
442 return NULL_TREE;
444 return build_vector_from_ctor (TREE_TYPE (rhs),
445 CONSTRUCTOR_ELTS (rhs));
448 else if (DECL_P (rhs))
449 return get_symbol_constant_value (rhs);
451 break;
453 case GIMPLE_UNARY_RHS:
454 break;
456 case GIMPLE_BINARY_RHS:
457 break;
459 case GIMPLE_TERNARY_RHS:
460 result = fold_ternary_loc (loc, subcode,
461 TREE_TYPE (gimple_assign_lhs (stmt)),
462 gimple_assign_rhs1 (stmt),
463 gimple_assign_rhs2 (stmt),
464 gimple_assign_rhs3 (stmt));
466 if (result)
468 STRIP_USELESS_TYPE_CONVERSION (result);
469 if (valid_gimple_rhs_p (result))
470 return result;
472 break;
474 case GIMPLE_INVALID_RHS:
475 gcc_unreachable ();
478 return NULL_TREE;
482 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
483 adjusting the replacement stmts location and virtual operands.
484 If the statement has a lhs the last stmt in the sequence is expected
485 to assign to that lhs. */
487 static void
488 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
490 gimple *stmt = gsi_stmt (*si_p);
492 if (gimple_has_location (stmt))
493 annotate_all_with_location (stmts, gimple_location (stmt));
495 /* First iterate over the replacement statements backward, assigning
496 virtual operands to their defining statements. */
497 gimple *laststore = NULL;
498 for (gimple_stmt_iterator i = gsi_last (stmts);
499 !gsi_end_p (i); gsi_prev (&i))
501 gimple *new_stmt = gsi_stmt (i);
502 if ((gimple_assign_single_p (new_stmt)
503 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
504 || (is_gimple_call (new_stmt)
505 && (gimple_call_flags (new_stmt)
506 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
508 tree vdef;
509 if (!laststore)
510 vdef = gimple_vdef (stmt);
511 else
512 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
513 gimple_set_vdef (new_stmt, vdef);
514 if (vdef && TREE_CODE (vdef) == SSA_NAME)
515 SSA_NAME_DEF_STMT (vdef) = new_stmt;
516 laststore = new_stmt;
520 /* Second iterate over the statements forward, assigning virtual
521 operands to their uses. */
522 tree reaching_vuse = gimple_vuse (stmt);
523 for (gimple_stmt_iterator i = gsi_start (stmts);
524 !gsi_end_p (i); gsi_next (&i))
526 gimple *new_stmt = gsi_stmt (i);
527 /* If the new statement possibly has a VUSE, update it with exact SSA
528 name we know will reach this one. */
529 if (gimple_has_mem_ops (new_stmt))
530 gimple_set_vuse (new_stmt, reaching_vuse);
531 gimple_set_modified (new_stmt, true);
532 if (gimple_vdef (new_stmt))
533 reaching_vuse = gimple_vdef (new_stmt);
536 /* If the new sequence does not do a store release the virtual
537 definition of the original statement. */
538 if (reaching_vuse
539 && reaching_vuse == gimple_vuse (stmt))
541 tree vdef = gimple_vdef (stmt);
542 if (vdef
543 && TREE_CODE (vdef) == SSA_NAME)
545 unlink_stmt_vdef (stmt);
546 release_ssa_name (vdef);
550 /* Finally replace the original statement with the sequence. */
551 gsi_replace_with_seq (si_p, stmts, false);
554 /* Convert EXPR into a GIMPLE value suitable for substitution on the
555 RHS of an assignment. Insert the necessary statements before
556 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
557 is replaced. If the call is expected to produces a result, then it
558 is replaced by an assignment of the new RHS to the result variable.
559 If the result is to be ignored, then the call is replaced by a
560 GIMPLE_NOP. A proper VDEF chain is retained by making the first
561 VUSE and the last VDEF of the whole sequence be the same as the replaced
562 statement and using new SSA names for stores in between. */
564 void
565 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
567 tree lhs;
568 gimple *stmt, *new_stmt;
569 gimple_stmt_iterator i;
570 gimple_seq stmts = NULL;
572 stmt = gsi_stmt (*si_p);
574 gcc_assert (is_gimple_call (stmt));
576 push_gimplify_context (gimple_in_ssa_p (cfun));
578 lhs = gimple_call_lhs (stmt);
579 if (lhs == NULL_TREE)
581 gimplify_and_add (expr, &stmts);
582 /* We can end up with folding a memcpy of an empty class assignment
583 which gets optimized away by C++ gimplification. */
584 if (gimple_seq_empty_p (stmts))
586 pop_gimplify_context (NULL);
587 if (gimple_in_ssa_p (cfun))
589 unlink_stmt_vdef (stmt);
590 release_defs (stmt);
592 gsi_replace (si_p, gimple_build_nop (), false);
593 return;
596 else
598 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
599 new_stmt = gimple_build_assign (lhs, tmp);
600 i = gsi_last (stmts);
601 gsi_insert_after_without_update (&i, new_stmt,
602 GSI_CONTINUE_LINKING);
605 pop_gimplify_context (NULL);
607 gsi_replace_with_seq_vops (si_p, stmts);
611 /* Replace the call at *GSI with the gimple value VAL. */
613 void
614 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
616 gimple *stmt = gsi_stmt (*gsi);
617 tree lhs = gimple_call_lhs (stmt);
618 gimple *repl;
619 if (lhs)
621 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
622 val = fold_convert (TREE_TYPE (lhs), val);
623 repl = gimple_build_assign (lhs, val);
625 else
626 repl = gimple_build_nop ();
627 tree vdef = gimple_vdef (stmt);
628 if (vdef && TREE_CODE (vdef) == SSA_NAME)
630 unlink_stmt_vdef (stmt);
631 release_ssa_name (vdef);
633 gsi_replace (gsi, repl, false);
636 /* Replace the call at *GSI with the new call REPL and fold that
637 again. */
639 static void
640 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
642 gimple *stmt = gsi_stmt (*gsi);
643 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
644 gimple_set_location (repl, gimple_location (stmt));
645 gimple_move_vops (repl, stmt);
646 gsi_replace (gsi, repl, false);
647 fold_stmt (gsi);
650 /* Return true if VAR is a VAR_DECL or a component thereof. */
652 static bool
653 var_decl_component_p (tree var)
655 tree inner = var;
656 while (handled_component_p (inner))
657 inner = TREE_OPERAND (inner, 0);
658 return (DECL_P (inner)
659 || (TREE_CODE (inner) == MEM_REF
660 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
663 /* Return TRUE if the SIZE argument, representing the size of an
664 object, is in a range of values of which exactly zero is valid. */
666 static bool
667 size_must_be_zero_p (tree size)
669 if (integer_zerop (size))
670 return true;
672 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
673 return false;
675 tree type = TREE_TYPE (size);
676 int prec = TYPE_PRECISION (type);
678 /* Compute the value of SSIZE_MAX, the largest positive value that
679 can be stored in ssize_t, the signed counterpart of size_t. */
680 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
681 value_range valid_range (build_int_cst (type, 0),
682 wide_int_to_tree (type, ssize_max));
683 value_range vr;
684 get_range_info (size, vr);
685 vr.intersect (&valid_range);
686 return vr.zero_p ();
689 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
690 diagnose (otherwise undefined) overlapping copies without preventing
691 folding. When folded, GCC guarantees that overlapping memcpy has
692 the same semantics as memmove. Call to the library memcpy need not
693 provide the same guarantee. Return false if no simplification can
694 be made. */
696 static bool
697 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
698 tree dest, tree src, enum built_in_function code)
700 gimple *stmt = gsi_stmt (*gsi);
701 tree lhs = gimple_call_lhs (stmt);
702 tree len = gimple_call_arg (stmt, 2);
703 location_t loc = gimple_location (stmt);
705 /* If the LEN parameter is a constant zero or in range where
706 the only valid value is zero, return DEST. */
707 if (size_must_be_zero_p (len))
709 gimple *repl;
710 if (gimple_call_lhs (stmt))
711 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
712 else
713 repl = gimple_build_nop ();
714 tree vdef = gimple_vdef (stmt);
715 if (vdef && TREE_CODE (vdef) == SSA_NAME)
717 unlink_stmt_vdef (stmt);
718 release_ssa_name (vdef);
720 gsi_replace (gsi, repl, false);
721 return true;
724 /* If SRC and DEST are the same (and not volatile), return
725 DEST{,+LEN,+LEN-1}. */
726 if (operand_equal_p (src, dest, 0))
728 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
729 It's safe and may even be emitted by GCC itself (see bug
730 32667). */
731 unlink_stmt_vdef (stmt);
732 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
733 release_ssa_name (gimple_vdef (stmt));
734 if (!lhs)
736 gsi_replace (gsi, gimple_build_nop (), false);
737 return true;
739 goto done;
741 else
743 /* We cannot (easily) change the type of the copy if it is a storage
744 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
745 modify the storage order of objects (see storage_order_barrier_p). */
746 tree srctype
747 = POINTER_TYPE_P (TREE_TYPE (src))
748 ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
749 tree desttype
750 = POINTER_TYPE_P (TREE_TYPE (dest))
751 ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
752 tree destvar, srcvar, srcoff;
753 unsigned int src_align, dest_align;
754 unsigned HOST_WIDE_INT tmp_len;
755 const char *tmp_str;
757 /* Build accesses at offset zero with a ref-all character type. */
758 tree off0
759 = build_int_cst (build_pointer_type_for_mode (char_type_node,
760 ptr_mode, true), 0);
762 /* If we can perform the copy efficiently with first doing all loads
763 and then all stores inline it that way. Currently efficiently
764 means that we can load all the memory into a single integer
765 register which is what MOVE_MAX gives us. */
766 src_align = get_pointer_alignment (src);
767 dest_align = get_pointer_alignment (dest);
768 if (tree_fits_uhwi_p (len)
769 && compare_tree_int (len, MOVE_MAX) <= 0
770 /* FIXME: Don't transform copies from strings with known length.
771 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
772 from being handled, and the case was XFAILed for that reason.
773 Now that it is handled and the XFAIL removed, as soon as other
774 strlenopt tests that rely on it for passing are adjusted, this
775 hack can be removed. */
776 && !c_strlen (src, 1)
777 && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
778 && memchr (tmp_str, 0, tmp_len) == NULL)
779 && !(srctype
780 && AGGREGATE_TYPE_P (srctype)
781 && TYPE_REVERSE_STORAGE_ORDER (srctype))
782 && !(desttype
783 && AGGREGATE_TYPE_P (desttype)
784 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
786 unsigned ilen = tree_to_uhwi (len);
787 if (pow2p_hwi (ilen))
789 /* Detect out-of-bounds accesses without issuing warnings.
790 Avoid folding out-of-bounds copies but to avoid false
791 positives for unreachable code defer warning until after
792 DCE has worked its magic.
793 -Wrestrict is still diagnosed. */
794 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
795 dest, src, len, len,
796 false, false))
797 if (warning != OPT_Wrestrict)
798 return false;
800 scalar_int_mode mode;
801 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
802 if (type
803 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
804 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
805 /* If the destination pointer is not aligned we must be able
806 to emit an unaligned store. */
807 && (dest_align >= GET_MODE_ALIGNMENT (mode)
808 || !targetm.slow_unaligned_access (mode, dest_align)
809 || (optab_handler (movmisalign_optab, mode)
810 != CODE_FOR_nothing)))
812 tree srctype = type;
813 tree desttype = type;
814 if (src_align < GET_MODE_ALIGNMENT (mode))
815 srctype = build_aligned_type (type, src_align);
816 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
817 tree tem = fold_const_aggregate_ref (srcmem);
818 if (tem)
819 srcmem = tem;
820 else if (src_align < GET_MODE_ALIGNMENT (mode)
821 && targetm.slow_unaligned_access (mode, src_align)
822 && (optab_handler (movmisalign_optab, mode)
823 == CODE_FOR_nothing))
824 srcmem = NULL_TREE;
825 if (srcmem)
827 gimple *new_stmt;
828 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
830 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
831 srcmem
832 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
833 new_stmt);
834 gimple_assign_set_lhs (new_stmt, srcmem);
835 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
836 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
838 if (dest_align < GET_MODE_ALIGNMENT (mode))
839 desttype = build_aligned_type (type, dest_align);
840 new_stmt
841 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
842 dest, off0),
843 srcmem);
844 gimple_move_vops (new_stmt, stmt);
845 if (!lhs)
847 gsi_replace (gsi, new_stmt, false);
848 return true;
850 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
851 goto done;
857 if (code == BUILT_IN_MEMMOVE)
859 /* Both DEST and SRC must be pointer types.
860 ??? This is what old code did. Is the testing for pointer types
861 really mandatory?
863 If either SRC is readonly or length is 1, we can use memcpy. */
864 if (!dest_align || !src_align)
865 return false;
866 if (readonly_data_expr (src)
867 || (tree_fits_uhwi_p (len)
868 && (MIN (src_align, dest_align) / BITS_PER_UNIT
869 >= tree_to_uhwi (len))))
871 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
872 if (!fn)
873 return false;
874 gimple_call_set_fndecl (stmt, fn);
875 gimple_call_set_arg (stmt, 0, dest);
876 gimple_call_set_arg (stmt, 1, src);
877 fold_stmt (gsi);
878 return true;
881 /* If *src and *dest can't overlap, optimize into memcpy as well. */
882 if (TREE_CODE (src) == ADDR_EXPR
883 && TREE_CODE (dest) == ADDR_EXPR)
885 tree src_base, dest_base, fn;
886 poly_int64 src_offset = 0, dest_offset = 0;
887 poly_uint64 maxsize;
889 srcvar = TREE_OPERAND (src, 0);
890 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
891 if (src_base == NULL)
892 src_base = srcvar;
893 destvar = TREE_OPERAND (dest, 0);
894 dest_base = get_addr_base_and_unit_offset (destvar,
895 &dest_offset);
896 if (dest_base == NULL)
897 dest_base = destvar;
898 if (!poly_int_tree_p (len, &maxsize))
899 maxsize = -1;
900 if (SSA_VAR_P (src_base)
901 && SSA_VAR_P (dest_base))
903 if (operand_equal_p (src_base, dest_base, 0)
904 && ranges_maybe_overlap_p (src_offset, maxsize,
905 dest_offset, maxsize))
906 return false;
908 else if (TREE_CODE (src_base) == MEM_REF
909 && TREE_CODE (dest_base) == MEM_REF)
911 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
912 TREE_OPERAND (dest_base, 0), 0))
913 return false;
914 poly_offset_int full_src_offset
915 = mem_ref_offset (src_base) + src_offset;
916 poly_offset_int full_dest_offset
917 = mem_ref_offset (dest_base) + dest_offset;
918 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
919 full_dest_offset, maxsize))
920 return false;
922 else
923 return false;
925 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
926 if (!fn)
927 return false;
928 gimple_call_set_fndecl (stmt, fn);
929 gimple_call_set_arg (stmt, 0, dest);
930 gimple_call_set_arg (stmt, 1, src);
931 fold_stmt (gsi);
932 return true;
935 /* If the destination and source do not alias optimize into
936 memcpy as well. */
937 if ((is_gimple_min_invariant (dest)
938 || TREE_CODE (dest) == SSA_NAME)
939 && (is_gimple_min_invariant (src)
940 || TREE_CODE (src) == SSA_NAME))
942 ao_ref destr, srcr;
943 ao_ref_init_from_ptr_and_size (&destr, dest, len);
944 ao_ref_init_from_ptr_and_size (&srcr, src, len);
945 if (!refs_may_alias_p_1 (&destr, &srcr, false))
947 tree fn;
948 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
949 if (!fn)
950 return false;
951 gimple_call_set_fndecl (stmt, fn);
952 gimple_call_set_arg (stmt, 0, dest);
953 gimple_call_set_arg (stmt, 1, src);
954 fold_stmt (gsi);
955 return true;
959 return false;
962 if (!tree_fits_shwi_p (len))
963 return false;
964 if (!srctype
965 || (AGGREGATE_TYPE_P (srctype)
966 && TYPE_REVERSE_STORAGE_ORDER (srctype)))
967 return false;
968 if (!desttype
969 || (AGGREGATE_TYPE_P (desttype)
970 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
971 return false;
972 /* In the following try to find a type that is most natural to be
973 used for the memcpy source and destination and that allows
974 the most optimization when memcpy is turned into a plain assignment
975 using that type. In theory we could always use a char[len] type
976 but that only gains us that the destination and source possibly
977 no longer will have their address taken. */
978 if (TREE_CODE (srctype) == ARRAY_TYPE
979 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
980 srctype = TREE_TYPE (srctype);
981 if (TREE_CODE (desttype) == ARRAY_TYPE
982 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
983 desttype = TREE_TYPE (desttype);
984 if (TREE_ADDRESSABLE (srctype)
985 || TREE_ADDRESSABLE (desttype))
986 return false;
988 /* Make sure we are not copying using a floating-point mode or
989 a type whose size possibly does not match its precision. */
990 if (FLOAT_MODE_P (TYPE_MODE (desttype))
991 || TREE_CODE (desttype) == BOOLEAN_TYPE
992 || TREE_CODE (desttype) == ENUMERAL_TYPE)
993 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
994 if (FLOAT_MODE_P (TYPE_MODE (srctype))
995 || TREE_CODE (srctype) == BOOLEAN_TYPE
996 || TREE_CODE (srctype) == ENUMERAL_TYPE)
997 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
998 if (!srctype)
999 srctype = desttype;
1000 if (!desttype)
1001 desttype = srctype;
1002 if (!srctype)
1003 return false;
1005 src_align = get_pointer_alignment (src);
1006 dest_align = get_pointer_alignment (dest);
1008 /* Choose between src and destination type for the access based
1009 on alignment, whether the access constitutes a register access
1010 and whether it may actually expose a declaration for SSA rewrite
1011 or SRA decomposition. Also try to expose a string constant, we
1012 might be able to concatenate several of them later into a single
1013 string store. */
1014 destvar = NULL_TREE;
1015 srcvar = NULL_TREE;
1016 if (TREE_CODE (dest) == ADDR_EXPR
1017 && var_decl_component_p (TREE_OPERAND (dest, 0))
1018 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1019 && dest_align >= TYPE_ALIGN (desttype)
1020 && (is_gimple_reg_type (desttype)
1021 || src_align >= TYPE_ALIGN (desttype)))
1022 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1023 else if (TREE_CODE (src) == ADDR_EXPR
1024 && var_decl_component_p (TREE_OPERAND (src, 0))
1025 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1026 && src_align >= TYPE_ALIGN (srctype)
1027 && (is_gimple_reg_type (srctype)
1028 || dest_align >= TYPE_ALIGN (srctype)))
1029 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1030 /* FIXME: Don't transform copies from strings with known original length.
1031 As soon as strlenopt tests that rely on it for passing are adjusted,
1032 this hack can be removed. */
1033 else if (gimple_call_alloca_for_var_p (stmt)
1034 && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1035 && integer_zerop (srcoff)
1036 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1037 && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1038 srctype = TREE_TYPE (srcvar);
1039 else
1040 return false;
1042 /* Now that we chose an access type express the other side in
1043 terms of it if the target allows that with respect to alignment
1044 constraints. */
1045 if (srcvar == NULL_TREE)
1047 if (src_align >= TYPE_ALIGN (desttype))
1048 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1049 else
1051 if (STRICT_ALIGNMENT)
1052 return false;
1053 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1054 src_align);
1055 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1058 else if (destvar == NULL_TREE)
1060 if (dest_align >= TYPE_ALIGN (srctype))
1061 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1062 else
1064 if (STRICT_ALIGNMENT)
1065 return false;
1066 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1067 dest_align);
1068 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1072 /* Same as above, detect out-of-bounds accesses without issuing
1073 warnings. Avoid folding out-of-bounds copies but to avoid
1074 false positives for unreachable code defer warning until
1075 after DCE has worked its magic.
1076 -Wrestrict is still diagnosed. */
1077 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1078 dest, src, len, len,
1079 false, false))
1080 if (warning != OPT_Wrestrict)
1081 return false;
1083 gimple *new_stmt;
1084 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1086 tree tem = fold_const_aggregate_ref (srcvar);
1087 if (tem)
1088 srcvar = tem;
1089 if (! is_gimple_min_invariant (srcvar))
1091 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1092 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1093 new_stmt);
1094 gimple_assign_set_lhs (new_stmt, srcvar);
1095 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1096 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1098 new_stmt = gimple_build_assign (destvar, srcvar);
1099 goto set_vop_and_replace;
1102 /* We get an aggregate copy. If the source is a STRING_CST, then
1103 directly use its type to perform the copy. */
1104 if (TREE_CODE (srcvar) == STRING_CST)
1105 desttype = srctype;
1107 /* Or else, use an unsigned char[] type to perform the copy in order
1108 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1109 types or float modes behavior on copying. */
1110 else
1112 desttype = build_array_type_nelts (unsigned_char_type_node,
1113 tree_to_uhwi (len));
1114 srctype = desttype;
1115 if (src_align > TYPE_ALIGN (srctype))
1116 srctype = build_aligned_type (srctype, src_align);
1117 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1120 if (dest_align > TYPE_ALIGN (desttype))
1121 desttype = build_aligned_type (desttype, dest_align);
1122 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1123 new_stmt = gimple_build_assign (destvar, srcvar);
1125 set_vop_and_replace:
1126 gimple_move_vops (new_stmt, stmt);
1127 if (!lhs)
1129 gsi_replace (gsi, new_stmt, false);
1130 return true;
1132 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1135 done:
1136 gimple_seq stmts = NULL;
1137 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1138 len = NULL_TREE;
1139 else if (code == BUILT_IN_MEMPCPY)
1141 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1142 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1143 TREE_TYPE (dest), dest, len);
1145 else
1146 gcc_unreachable ();
1148 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1149 gimple *repl = gimple_build_assign (lhs, dest);
1150 gsi_replace (gsi, repl, false);
1151 return true;
1154 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1155 to built-in memcmp (a, b, len). */
1157 static bool
1158 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1160 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1162 if (!fn)
1163 return false;
1165 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1167 gimple *stmt = gsi_stmt (*gsi);
1168 tree a = gimple_call_arg (stmt, 0);
1169 tree b = gimple_call_arg (stmt, 1);
1170 tree len = gimple_call_arg (stmt, 2);
1172 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1173 replace_call_with_call_and_fold (gsi, repl);
1175 return true;
1178 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1179 to built-in memmove (dest, src, len). */
1181 static bool
1182 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1184 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1186 if (!fn)
1187 return false;
1189 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1190 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1191 len) into memmove (dest, src, len). */
1193 gimple *stmt = gsi_stmt (*gsi);
1194 tree src = gimple_call_arg (stmt, 0);
1195 tree dest = gimple_call_arg (stmt, 1);
1196 tree len = gimple_call_arg (stmt, 2);
1198 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1199 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1200 replace_call_with_call_and_fold (gsi, repl);
1202 return true;
1205 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1206 to built-in memset (dest, 0, len). */
1208 static bool
1209 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1211 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1213 if (!fn)
1214 return false;
1216 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1218 gimple *stmt = gsi_stmt (*gsi);
1219 tree dest = gimple_call_arg (stmt, 0);
1220 tree len = gimple_call_arg (stmt, 1);
1222 gimple_seq seq = NULL;
1223 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1224 gimple_seq_add_stmt_without_update (&seq, repl);
1225 gsi_replace_with_seq_vops (gsi, seq);
1226 fold_stmt (gsi);
1228 return true;
1231 /* Fold function call to builtin memset or bzero at *GSI setting the
1232 memory of size LEN to VAL. Return whether a simplification was made. */
1234 static bool
1235 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1237 gimple *stmt = gsi_stmt (*gsi);
1238 tree etype;
1239 unsigned HOST_WIDE_INT length, cval;
1241 /* If the LEN parameter is zero, return DEST. */
1242 if (integer_zerop (len))
1244 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1245 return true;
1248 if (! tree_fits_uhwi_p (len))
1249 return false;
1251 if (TREE_CODE (c) != INTEGER_CST)
1252 return false;
1254 tree dest = gimple_call_arg (stmt, 0);
1255 tree var = dest;
1256 if (TREE_CODE (var) != ADDR_EXPR)
1257 return false;
1259 var = TREE_OPERAND (var, 0);
1260 if (TREE_THIS_VOLATILE (var))
1261 return false;
1263 etype = TREE_TYPE (var);
1264 if (TREE_CODE (etype) == ARRAY_TYPE)
1265 etype = TREE_TYPE (etype);
1267 if (!INTEGRAL_TYPE_P (etype)
1268 && !POINTER_TYPE_P (etype))
1269 return NULL_TREE;
1271 if (! var_decl_component_p (var))
1272 return NULL_TREE;
1274 length = tree_to_uhwi (len);
1275 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1276 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1277 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
1278 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1279 return NULL_TREE;
1281 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1282 return NULL_TREE;
1284 if (!type_has_mode_precision_p (etype))
1285 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1286 TYPE_UNSIGNED (etype));
1288 if (integer_zerop (c))
1289 cval = 0;
1290 else
1292 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1293 return NULL_TREE;
1295 cval = TREE_INT_CST_LOW (c);
1296 cval &= 0xff;
1297 cval |= cval << 8;
1298 cval |= cval << 16;
1299 cval |= (cval << 31) << 1;
1302 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1303 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1304 gimple_move_vops (store, stmt);
1305 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1306 if (gimple_call_lhs (stmt))
1308 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1309 gsi_replace (gsi, asgn, false);
1311 else
1313 gimple_stmt_iterator gsi2 = *gsi;
1314 gsi_prev (gsi);
1315 gsi_remove (&gsi2, true);
1318 return true;
1321 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1323 static bool
1324 get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1325 c_strlen_data *pdata, unsigned eltsize)
1327 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1329 /* The length computed by this invocation of the function. */
1330 tree val = NULL_TREE;
1332 /* True if VAL is an optimistic (tight) bound determined from
1333 the size of the character array in which the string may be
1334 stored. In that case, the computed VAL is used to set
1335 PDATA->MAXBOUND. */
1336 bool tight_bound = false;
1338 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1339 if (TREE_CODE (arg) == ADDR_EXPR
1340 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1342 tree op = TREE_OPERAND (arg, 0);
1343 if (integer_zerop (TREE_OPERAND (op, 1)))
1345 tree aop0 = TREE_OPERAND (op, 0);
1346 if (TREE_CODE (aop0) == INDIRECT_REF
1347 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1348 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1349 pdata, eltsize);
1351 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1352 && rkind == SRK_LENRANGE)
1354 /* Fail if an array is the last member of a struct object
1355 since it could be treated as a (fake) flexible array
1356 member. */
1357 tree idx = TREE_OPERAND (op, 1);
1359 arg = TREE_OPERAND (op, 0);
1360 tree optype = TREE_TYPE (arg);
1361 if (tree dom = TYPE_DOMAIN (optype))
1362 if (tree bound = TYPE_MAX_VALUE (dom))
1363 if (TREE_CODE (bound) == INTEGER_CST
1364 && TREE_CODE (idx) == INTEGER_CST
1365 && tree_int_cst_lt (bound, idx))
1366 return false;
1370 if (rkind == SRK_INT_VALUE)
1372 /* We are computing the maximum value (not string length). */
1373 val = arg;
1374 if (TREE_CODE (val) != INTEGER_CST
1375 || tree_int_cst_sgn (val) < 0)
1376 return false;
1378 else
1380 c_strlen_data lendata = { };
1381 val = c_strlen (arg, 1, &lendata, eltsize);
1383 if (!val && lendata.decl)
1385 /* ARG refers to an unterminated const character array.
1386 DATA.DECL with size DATA.LEN. */
1387 val = lendata.minlen;
1388 pdata->decl = lendata.decl;
1392 /* Set if VAL represents the maximum length based on array size (set
1393 when exact length cannot be determined). */
1394 bool maxbound = false;
1396 if (!val && rkind == SRK_LENRANGE)
1398 if (TREE_CODE (arg) == ADDR_EXPR)
1399 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1400 pdata, eltsize);
1402 if (TREE_CODE (arg) == ARRAY_REF)
1404 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1406 /* Determine the "innermost" array type. */
1407 while (TREE_CODE (optype) == ARRAY_TYPE
1408 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1409 optype = TREE_TYPE (optype);
1411 /* Avoid arrays of pointers. */
1412 tree eltype = TREE_TYPE (optype);
1413 if (TREE_CODE (optype) != ARRAY_TYPE
1414 || !INTEGRAL_TYPE_P (eltype))
1415 return false;
1417 /* Fail when the array bound is unknown or zero. */
1418 val = TYPE_SIZE_UNIT (optype);
1419 if (!val
1420 || TREE_CODE (val) != INTEGER_CST
1421 || integer_zerop (val))
1422 return false;
1424 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1425 integer_one_node);
1427 /* Set the minimum size to zero since the string in
1428 the array could have zero length. */
1429 pdata->minlen = ssize_int (0);
1431 tight_bound = true;
1433 else if (TREE_CODE (arg) == COMPONENT_REF
1434 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1435 == ARRAY_TYPE))
1437 /* Use the type of the member array to determine the upper
1438 bound on the length of the array. This may be overly
1439 optimistic if the array itself isn't NUL-terminated and
1440 the caller relies on the subsequent member to contain
1441 the NUL but that would only be considered valid if
1442 the array were the last member of a struct. */
1444 tree fld = TREE_OPERAND (arg, 1);
1446 tree optype = TREE_TYPE (fld);
1448 /* Determine the "innermost" array type. */
1449 while (TREE_CODE (optype) == ARRAY_TYPE
1450 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1451 optype = TREE_TYPE (optype);
1453 /* Fail when the array bound is unknown or zero. */
1454 val = TYPE_SIZE_UNIT (optype);
1455 if (!val
1456 || TREE_CODE (val) != INTEGER_CST
1457 || integer_zerop (val))
1458 return false;
1459 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1460 integer_one_node);
1462 /* Set the minimum size to zero since the string in
1463 the array could have zero length. */
1464 pdata->minlen = ssize_int (0);
1466 /* The array size determined above is an optimistic bound
1467 on the length. If the array isn't nul-terminated the
1468 length computed by the library function would be greater.
1469 Even though using strlen to cross the subobject boundary
1470 is undefined, avoid drawing conclusions from the member
1471 type about the length here. */
1472 tight_bound = true;
1474 else if (TREE_CODE (arg) == MEM_REF
1475 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1476 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1477 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1479 /* Handle a MEM_REF into a DECL accessing an array of integers,
1480 being conservative about references to extern structures with
1481 flexible array members that can be initialized to arbitrary
1482 numbers of elements as an extension (static structs are okay).
1483 FIXME: Make this less conservative -- see
1484 component_ref_size in tree.c. */
1485 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1486 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1487 && (decl_binds_to_current_def_p (ref)
1488 || !array_at_struct_end_p (arg)))
1490 /* Fail if the offset is out of bounds. Such accesses
1491 should be diagnosed at some point. */
1492 val = DECL_SIZE_UNIT (ref);
1493 if (!val
1494 || TREE_CODE (val) != INTEGER_CST
1495 || integer_zerop (val))
1496 return false;
1498 poly_offset_int psiz = wi::to_offset (val);
1499 poly_offset_int poff = mem_ref_offset (arg);
1500 if (known_le (psiz, poff))
1501 return false;
1503 pdata->minlen = ssize_int (0);
1505 /* Subtract the offset and one for the terminating nul. */
1506 psiz -= poff;
1507 psiz -= 1;
1508 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1509 /* Since VAL reflects the size of a declared object
1510 rather the type of the access it is not a tight bound. */
1513 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
1515 /* Avoid handling pointers to arrays. GCC might misuse
1516 a pointer to an array of one bound to point to an array
1517 object of a greater bound. */
1518 tree argtype = TREE_TYPE (arg);
1519 if (TREE_CODE (argtype) == ARRAY_TYPE)
1521 val = TYPE_SIZE_UNIT (argtype);
1522 if (!val
1523 || TREE_CODE (val) != INTEGER_CST
1524 || integer_zerop (val))
1525 return false;
1526 val = wide_int_to_tree (TREE_TYPE (val),
1527 wi::sub (wi::to_wide (val), 1));
1529 /* Set the minimum size to zero since the string in
1530 the array could have zero length. */
1531 pdata->minlen = ssize_int (0);
1534 maxbound = true;
1537 if (!val)
1538 return false;
1540 /* Adjust the lower bound on the string length as necessary. */
1541 if (!pdata->minlen
1542 || (rkind != SRK_STRLEN
1543 && TREE_CODE (pdata->minlen) == INTEGER_CST
1544 && TREE_CODE (val) == INTEGER_CST
1545 && tree_int_cst_lt (val, pdata->minlen)))
1546 pdata->minlen = val;
1548 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1550 /* Adjust the tighter (more optimistic) string length bound
1551 if necessary and proceed to adjust the more conservative
1552 bound. */
1553 if (TREE_CODE (val) == INTEGER_CST)
1555 if (tree_int_cst_lt (pdata->maxbound, val))
1556 pdata->maxbound = val;
1558 else
1559 pdata->maxbound = val;
1561 else if (pdata->maxbound || maxbound)
1562 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1563 if VAL corresponds to the maximum length determined based
1564 on the type of the object. */
1565 pdata->maxbound = val;
1567 if (tight_bound)
1569 /* VAL computed above represents an optimistically tight bound
1570 on the length of the string based on the referenced object's
1571 or subobject's type. Determine the conservative upper bound
1572 based on the enclosing object's size if possible. */
1573 if (rkind == SRK_LENRANGE)
1575 poly_int64 offset;
1576 tree base = get_addr_base_and_unit_offset (arg, &offset);
1577 if (!base)
1579 /* When the call above fails due to a non-constant offset
1580 assume the offset is zero and use the size of the whole
1581 enclosing object instead. */
1582 base = get_base_address (arg);
1583 offset = 0;
1585 /* If the base object is a pointer no upper bound on the length
1586 can be determined. Otherwise the maximum length is equal to
1587 the size of the enclosing object minus the offset of
1588 the referenced subobject minus 1 (for the terminating nul). */
1589 tree type = TREE_TYPE (base);
1590 if (TREE_CODE (type) == POINTER_TYPE
1591 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1592 || !(val = DECL_SIZE_UNIT (base)))
1593 val = build_all_ones_cst (size_type_node);
1594 else
1596 val = DECL_SIZE_UNIT (base);
1597 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1598 size_int (offset + 1));
1601 else
1602 return false;
1605 if (pdata->maxlen)
1607 /* Adjust the more conservative bound if possible/necessary
1608 and fail otherwise. */
1609 if (rkind != SRK_STRLEN)
1611 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1612 || TREE_CODE (val) != INTEGER_CST)
1613 return false;
1615 if (tree_int_cst_lt (pdata->maxlen, val))
1616 pdata->maxlen = val;
1617 return true;
1619 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1621 /* Fail if the length of this ARG is different from that
1622 previously determined from another ARG. */
1623 return false;
1627 pdata->maxlen = val;
1628 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1631 /* For an ARG referencing one or more strings, try to obtain the range
1632 of their lengths, or the size of the largest array ARG referes to if
1633 the range of lengths cannot be determined, and store all in *PDATA.
1634 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1635 the maximum constant value.
1636 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1637 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1638 length or if we are unable to determine the length, return false.
1639 VISITED is a bitmap of visited variables.
1640 RKIND determines the kind of value or range to obtain (see
1641 strlen_range_kind).
1642 Set PDATA->DECL if ARG refers to an unterminated constant array.
1643 On input, set ELTSIZE to 1 for normal single byte character strings,
1644 and either 2 or 4 for wide characer strings (the size of wchar_t).
1645 Return true if *PDATA was successfully populated and false otherwise. */
1647 static bool
1648 get_range_strlen (tree arg, bitmap *visited,
1649 strlen_range_kind rkind,
1650 c_strlen_data *pdata, unsigned eltsize)
1653 if (TREE_CODE (arg) != SSA_NAME)
1654 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1656 /* If ARG is registered for SSA update we cannot look at its defining
1657 statement. */
1658 if (name_registered_for_update_p (arg))
1659 return false;
1661 /* If we were already here, break the infinite cycle. */
1662 if (!*visited)
1663 *visited = BITMAP_ALLOC (NULL);
1664 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1665 return true;
1667 tree var = arg;
1668 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1670 switch (gimple_code (def_stmt))
1672 case GIMPLE_ASSIGN:
1673 /* The RHS of the statement defining VAR must either have a
1674 constant length or come from another SSA_NAME with a constant
1675 length. */
1676 if (gimple_assign_single_p (def_stmt)
1677 || gimple_assign_unary_nop_p (def_stmt))
1679 tree rhs = gimple_assign_rhs1 (def_stmt);
1680 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1682 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1684 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1685 gimple_assign_rhs3 (def_stmt) };
1687 for (unsigned int i = 0; i < 2; i++)
1688 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1690 if (rkind != SRK_LENRANGE)
1691 return false;
1692 /* Set the upper bound to the maximum to prevent
1693 it from being adjusted in the next iteration but
1694 leave MINLEN and the more conservative MAXBOUND
1695 determined so far alone (or leave them null if
1696 they haven't been set yet). That the MINLEN is
1697 in fact zero can be determined from MAXLEN being
1698 unbounded but the discovered minimum is used for
1699 diagnostics. */
1700 pdata->maxlen = build_all_ones_cst (size_type_node);
1702 return true;
1704 return false;
1706 case GIMPLE_PHI:
1707 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1708 must have a constant length. */
1709 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1711 tree arg = gimple_phi_arg (def_stmt, i)->def;
1713 /* If this PHI has itself as an argument, we cannot
1714 determine the string length of this argument. However,
1715 if we can find a constant string length for the other
1716 PHI args then we can still be sure that this is a
1717 constant string length. So be optimistic and just
1718 continue with the next argument. */
1719 if (arg == gimple_phi_result (def_stmt))
1720 continue;
1722 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1724 if (rkind != SRK_LENRANGE)
1725 return false;
1726 /* Set the upper bound to the maximum to prevent
1727 it from being adjusted in the next iteration but
1728 leave MINLEN and the more conservative MAXBOUND
1729 determined so far alone (or leave them null if
1730 they haven't been set yet). That the MINLEN is
1731 in fact zero can be determined from MAXLEN being
1732 unbounded but the discovered minimum is used for
1733 diagnostics. */
1734 pdata->maxlen = build_all_ones_cst (size_type_node);
1737 return true;
1739 default:
1740 return false;
1744 /* Try to obtain the range of the lengths of the string(s) referenced
1745 by ARG, or the size of the largest array ARG refers to if the range
1746 of lengths cannot be determined, and store all in *PDATA which must
1747 be zero-initialized on input except PDATA->MAXBOUND may be set to
1748 a non-null tree node other than INTEGER_CST to request to have it
1749 set to the length of the longest string in a PHI. ELTSIZE is
1750 the expected size of the string element in bytes: 1 for char and
1751 some power of 2 for wide characters.
1752 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1753 for optimization. Returning false means that a nonzero PDATA->MINLEN
1754 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1755 is -1 (in that case, the actual range is indeterminate, i.e.,
1756 [0, PTRDIFF_MAX - 2]. */
1758 bool
1759 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1761 bitmap visited = NULL;
1762 tree maxbound = pdata->maxbound;
1764 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
1766 /* On failure extend the length range to an impossible maximum
1767 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1768 members can stay unchanged regardless. */
1769 pdata->minlen = ssize_int (0);
1770 pdata->maxlen = build_all_ones_cst (size_type_node);
1772 else if (!pdata->minlen)
1773 pdata->minlen = ssize_int (0);
1775 /* If it's unchanged from it initial non-null value, set the conservative
1776 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1777 if (maxbound && pdata->maxbound == maxbound)
1778 pdata->maxbound = build_all_ones_cst (size_type_node);
1780 if (visited)
1781 BITMAP_FREE (visited);
1783 return !integer_all_onesp (pdata->maxlen);
1786 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1787 For ARG of pointer types, NONSTR indicates if the caller is prepared
1788 to handle unterminated strings. For integer ARG and when RKIND ==
1789 SRK_INT_VALUE, NONSTR must be null.
1791 If an unterminated array is discovered and our caller handles
1792 unterminated arrays, then bubble up the offending DECL and
1793 return the maximum size. Otherwise return NULL. */
1795 static tree
1796 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1798 /* A non-null NONSTR is meaningless when determining the maximum
1799 value of an integer ARG. */
1800 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1801 /* ARG must have an integral type when RKIND says so. */
1802 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1804 bitmap visited = NULL;
1806 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1807 is unbounded. */
1808 c_strlen_data lendata = { };
1809 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
1810 lendata.maxlen = NULL_TREE;
1811 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1812 lendata.maxlen = NULL_TREE;
1814 if (visited)
1815 BITMAP_FREE (visited);
1817 if (nonstr)
1819 /* For callers prepared to handle unterminated arrays set
1820 *NONSTR to point to the declaration of the array and return
1821 the maximum length/size. */
1822 *nonstr = lendata.decl;
1823 return lendata.maxlen;
1826 /* Fail if the constant array isn't nul-terminated. */
1827 return lendata.decl ? NULL_TREE : lendata.maxlen;
1831 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1832 If LEN is not NULL, it represents the length of the string to be
1833 copied. Return NULL_TREE if no simplification can be made. */
1835 static bool
1836 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1837 tree dest, tree src)
1839 gimple *stmt = gsi_stmt (*gsi);
1840 location_t loc = gimple_location (stmt);
1841 tree fn;
1843 /* If SRC and DEST are the same (and not volatile), return DEST. */
1844 if (operand_equal_p (src, dest, 0))
1846 /* Issue -Wrestrict unless the pointers are null (those do
1847 not point to objects and so do not indicate an overlap;
1848 such calls could be the result of sanitization and jump
1849 threading). */
1850 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1852 tree func = gimple_call_fndecl (stmt);
1854 warning_at (loc, OPT_Wrestrict,
1855 "%qD source argument is the same as destination",
1856 func);
1859 replace_call_with_value (gsi, dest);
1860 return true;
1863 if (optimize_function_for_size_p (cfun))
1864 return false;
1866 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1867 if (!fn)
1868 return false;
1870 /* Set to non-null if ARG refers to an unterminated array. */
1871 tree nonstr = NULL;
1872 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
1874 if (nonstr)
1876 /* Avoid folding calls with unterminated arrays. */
1877 if (!gimple_no_warning_p (stmt))
1878 warn_string_no_nul (loc, NULL_TREE, "strcpy", src, nonstr);
1879 gimple_set_no_warning (stmt, true);
1880 return false;
1883 if (!len)
1884 return false;
1886 len = fold_convert_loc (loc, size_type_node, len);
1887 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1888 len = force_gimple_operand_gsi (gsi, len, true,
1889 NULL_TREE, true, GSI_SAME_STMT);
1890 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1891 replace_call_with_call_and_fold (gsi, repl);
1892 return true;
1895 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1896 If SLEN is not NULL, it represents the length of the source string.
1897 Return NULL_TREE if no simplification can be made. */
1899 static bool
1900 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1901 tree dest, tree src, tree len)
1903 gimple *stmt = gsi_stmt (*gsi);
1904 location_t loc = gimple_location (stmt);
1905 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1907 /* If the LEN parameter is zero, return DEST. */
1908 if (integer_zerop (len))
1910 /* Avoid warning if the destination refers to an array/pointer
1911 decorate with attribute nonstring. */
1912 if (!nonstring)
1914 tree fndecl = gimple_call_fndecl (stmt);
1916 /* Warn about the lack of nul termination: the result is not
1917 a (nul-terminated) string. */
1918 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1919 if (slen && !integer_zerop (slen))
1920 warning_at (loc, OPT_Wstringop_truncation,
1921 "%G%qD destination unchanged after copying no bytes "
1922 "from a string of length %E",
1923 stmt, fndecl, slen);
1924 else
1925 warning_at (loc, OPT_Wstringop_truncation,
1926 "%G%qD destination unchanged after copying no bytes",
1927 stmt, fndecl);
1930 replace_call_with_value (gsi, dest);
1931 return true;
1934 /* We can't compare slen with len as constants below if len is not a
1935 constant. */
1936 if (TREE_CODE (len) != INTEGER_CST)
1937 return false;
1939 /* Now, we must be passed a constant src ptr parameter. */
1940 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1941 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1942 return false;
1944 /* The size of the source string including the terminating nul. */
1945 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1947 /* We do not support simplification of this case, though we do
1948 support it when expanding trees into RTL. */
1949 /* FIXME: generate a call to __builtin_memset. */
1950 if (tree_int_cst_lt (ssize, len))
1951 return false;
1953 /* Diagnose truncation that leaves the copy unterminated. */
1954 maybe_diag_stxncpy_trunc (*gsi, src, len);
1956 /* OK transform into builtin memcpy. */
1957 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1958 if (!fn)
1959 return false;
1961 len = fold_convert_loc (loc, size_type_node, len);
1962 len = force_gimple_operand_gsi (gsi, len, true,
1963 NULL_TREE, true, GSI_SAME_STMT);
1964 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1965 replace_call_with_call_and_fold (gsi, repl);
1967 return true;
1970 /* Fold function call to builtin strchr or strrchr.
1971 If both arguments are constant, evaluate and fold the result,
1972 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1973 In general strlen is significantly faster than strchr
1974 due to being a simpler operation. */
1975 static bool
1976 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1978 gimple *stmt = gsi_stmt (*gsi);
1979 tree str = gimple_call_arg (stmt, 0);
1980 tree c = gimple_call_arg (stmt, 1);
1981 location_t loc = gimple_location (stmt);
1982 const char *p;
1983 char ch;
1985 if (!gimple_call_lhs (stmt))
1986 return false;
1988 /* Avoid folding if the first argument is not a nul-terminated array.
1989 Defer warning until later. */
1990 if (!check_nul_terminated_array (NULL_TREE, str))
1991 return false;
1993 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1995 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1997 if (p1 == NULL)
1999 replace_call_with_value (gsi, integer_zero_node);
2000 return true;
2003 tree len = build_int_cst (size_type_node, p1 - p);
2004 gimple_seq stmts = NULL;
2005 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2006 POINTER_PLUS_EXPR, str, len);
2007 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2008 gsi_replace_with_seq_vops (gsi, stmts);
2009 return true;
2012 if (!integer_zerop (c))
2013 return false;
2015 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2016 if (is_strrchr && optimize_function_for_size_p (cfun))
2018 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2020 if (strchr_fn)
2022 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2023 replace_call_with_call_and_fold (gsi, repl);
2024 return true;
2027 return false;
2030 tree len;
2031 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2033 if (!strlen_fn)
2034 return false;
2036 /* Create newstr = strlen (str). */
2037 gimple_seq stmts = NULL;
2038 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2039 gimple_set_location (new_stmt, loc);
2040 len = create_tmp_reg_or_ssa_name (size_type_node);
2041 gimple_call_set_lhs (new_stmt, len);
2042 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2044 /* Create (str p+ strlen (str)). */
2045 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2046 POINTER_PLUS_EXPR, str, len);
2047 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2048 gsi_replace_with_seq_vops (gsi, stmts);
2049 /* gsi now points at the assignment to the lhs, get a
2050 stmt iterator to the strlen.
2051 ??? We can't use gsi_for_stmt as that doesn't work when the
2052 CFG isn't built yet. */
2053 gimple_stmt_iterator gsi2 = *gsi;
2054 gsi_prev (&gsi2);
2055 fold_stmt (&gsi2);
2056 return true;
2059 /* Fold function call to builtin strstr.
2060 If both arguments are constant, evaluate and fold the result,
2061 additionally fold strstr (x, "") into x and strstr (x, "c")
2062 into strchr (x, 'c'). */
2063 static bool
2064 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2066 gimple *stmt = gsi_stmt (*gsi);
2067 if (!gimple_call_lhs (stmt))
2068 return false;
2070 tree haystack = gimple_call_arg (stmt, 0);
2071 tree needle = gimple_call_arg (stmt, 1);
2073 /* Avoid folding if either argument is not a nul-terminated array.
2074 Defer warning until later. */
2075 if (!check_nul_terminated_array (NULL_TREE, haystack)
2076 || !check_nul_terminated_array (NULL_TREE, needle))
2077 return false;
2079 const char *q = c_getstr (needle);
2080 if (q == NULL)
2081 return false;
2083 if (const char *p = c_getstr (haystack))
2085 const char *r = strstr (p, q);
2087 if (r == NULL)
2089 replace_call_with_value (gsi, integer_zero_node);
2090 return true;
2093 tree len = build_int_cst (size_type_node, r - p);
2094 gimple_seq stmts = NULL;
2095 gimple *new_stmt
2096 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2097 haystack, len);
2098 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2099 gsi_replace_with_seq_vops (gsi, stmts);
2100 return true;
2103 /* For strstr (x, "") return x. */
2104 if (q[0] == '\0')
2106 replace_call_with_value (gsi, haystack);
2107 return true;
2110 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2111 if (q[1] == '\0')
2113 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2114 if (strchr_fn)
2116 tree c = build_int_cst (integer_type_node, q[0]);
2117 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2118 replace_call_with_call_and_fold (gsi, repl);
2119 return true;
2123 return false;
2126 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2127 to the call.
2129 Return NULL_TREE if no simplification was possible, otherwise return the
2130 simplified form of the call as a tree.
2132 The simplified form may be a constant or other expression which
2133 computes the same value, but in a more efficient manner (including
2134 calls to other builtin functions).
2136 The call may contain arguments which need to be evaluated, but
2137 which are not useful to determine the result of the call. In
2138 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2139 COMPOUND_EXPR will be an argument which must be evaluated.
2140 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2141 COMPOUND_EXPR in the chain will contain the tree for the simplified
2142 form of the builtin function call. */
2144 static bool
2145 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2147 gimple *stmt = gsi_stmt (*gsi);
2148 location_t loc = gimple_location (stmt);
2150 const char *p = c_getstr (src);
2152 /* If the string length is zero, return the dst parameter. */
2153 if (p && *p == '\0')
2155 replace_call_with_value (gsi, dst);
2156 return true;
2159 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2160 return false;
2162 /* See if we can store by pieces into (dst + strlen(dst)). */
2163 tree newdst;
2164 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2165 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2167 if (!strlen_fn || !memcpy_fn)
2168 return false;
2170 /* If the length of the source string isn't computable don't
2171 split strcat into strlen and memcpy. */
2172 tree len = get_maxval_strlen (src, SRK_STRLEN);
2173 if (! len)
2174 return false;
2176 /* Create strlen (dst). */
2177 gimple_seq stmts = NULL, stmts2;
2178 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2179 gimple_set_location (repl, loc);
2180 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2181 gimple_call_set_lhs (repl, newdst);
2182 gimple_seq_add_stmt_without_update (&stmts, repl);
2184 /* Create (dst p+ strlen (dst)). */
2185 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2186 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2187 gimple_seq_add_seq_without_update (&stmts, stmts2);
2189 len = fold_convert_loc (loc, size_type_node, len);
2190 len = size_binop_loc (loc, PLUS_EXPR, len,
2191 build_int_cst (size_type_node, 1));
2192 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2193 gimple_seq_add_seq_without_update (&stmts, stmts2);
2195 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2196 gimple_seq_add_stmt_without_update (&stmts, repl);
2197 if (gimple_call_lhs (stmt))
2199 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2200 gimple_seq_add_stmt_without_update (&stmts, repl);
2201 gsi_replace_with_seq_vops (gsi, stmts);
2202 /* gsi now points at the assignment to the lhs, get a
2203 stmt iterator to the memcpy call.
2204 ??? We can't use gsi_for_stmt as that doesn't work when the
2205 CFG isn't built yet. */
2206 gimple_stmt_iterator gsi2 = *gsi;
2207 gsi_prev (&gsi2);
2208 fold_stmt (&gsi2);
2210 else
2212 gsi_replace_with_seq_vops (gsi, stmts);
2213 fold_stmt (gsi);
2215 return true;
2218 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2219 are the arguments to the call. */
2221 static bool
2222 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2224 gimple *stmt = gsi_stmt (*gsi);
2225 tree dest = gimple_call_arg (stmt, 0);
2226 tree src = gimple_call_arg (stmt, 1);
2227 tree size = gimple_call_arg (stmt, 2);
2228 tree fn;
2229 const char *p;
2232 p = c_getstr (src);
2233 /* If the SRC parameter is "", return DEST. */
2234 if (p && *p == '\0')
2236 replace_call_with_value (gsi, dest);
2237 return true;
2240 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2241 return false;
2243 /* If __builtin_strcat_chk is used, assume strcat is available. */
2244 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2245 if (!fn)
2246 return false;
2248 gimple *repl = gimple_build_call (fn, 2, dest, src);
2249 replace_call_with_call_and_fold (gsi, repl);
2250 return true;
2253 /* Simplify a call to the strncat builtin. */
2255 static bool
2256 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2258 gimple *stmt = gsi_stmt (*gsi);
2259 tree dst = gimple_call_arg (stmt, 0);
2260 tree src = gimple_call_arg (stmt, 1);
2261 tree len = gimple_call_arg (stmt, 2);
2263 const char *p = c_getstr (src);
2265 /* If the requested length is zero, or the src parameter string
2266 length is zero, return the dst parameter. */
2267 if (integer_zerop (len) || (p && *p == '\0'))
2269 replace_call_with_value (gsi, dst);
2270 return true;
2273 if (TREE_CODE (len) != INTEGER_CST || !p)
2274 return false;
2276 unsigned srclen = strlen (p);
2278 int cmpsrc = compare_tree_int (len, srclen);
2280 /* Return early if the requested len is less than the string length.
2281 Warnings will be issued elsewhere later. */
2282 if (cmpsrc < 0)
2283 return false;
2285 unsigned HOST_WIDE_INT dstsize;
2287 bool nowarn = gimple_no_warning_p (stmt);
2289 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2291 int cmpdst = compare_tree_int (len, dstsize);
2293 if (cmpdst >= 0)
2295 tree fndecl = gimple_call_fndecl (stmt);
2297 /* Strncat copies (at most) LEN bytes and always appends
2298 the terminating NUL so the specified bound should never
2299 be equal to (or greater than) the size of the destination.
2300 If it is, the copy could overflow. */
2301 location_t loc = gimple_location (stmt);
2302 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2303 cmpdst == 0
2304 ? G_("%G%qD specified bound %E equals "
2305 "destination size")
2306 : G_("%G%qD specified bound %E exceeds "
2307 "destination size %wu"),
2308 stmt, fndecl, len, dstsize);
2309 if (nowarn)
2310 gimple_set_no_warning (stmt, true);
2314 if (!nowarn && cmpsrc == 0)
2316 tree fndecl = gimple_call_fndecl (stmt);
2317 location_t loc = gimple_location (stmt);
2319 /* To avoid possible overflow the specified bound should also
2320 not be equal to the length of the source, even when the size
2321 of the destination is unknown (it's not an uncommon mistake
2322 to specify as the bound to strncpy the length of the source). */
2323 if (warning_at (loc, OPT_Wstringop_overflow_,
2324 "%G%qD specified bound %E equals source length",
2325 stmt, fndecl, len))
2326 gimple_set_no_warning (stmt, true);
2329 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2331 /* If the replacement _DECL isn't initialized, don't do the
2332 transformation. */
2333 if (!fn)
2334 return false;
2336 /* Otherwise, emit a call to strcat. */
2337 gcall *repl = gimple_build_call (fn, 2, dst, src);
2338 replace_call_with_call_and_fold (gsi, repl);
2339 return true;
2342 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2343 LEN, and SIZE. */
2345 static bool
2346 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2348 gimple *stmt = gsi_stmt (*gsi);
2349 tree dest = gimple_call_arg (stmt, 0);
2350 tree src = gimple_call_arg (stmt, 1);
2351 tree len = gimple_call_arg (stmt, 2);
2352 tree size = gimple_call_arg (stmt, 3);
2353 tree fn;
2354 const char *p;
2356 p = c_getstr (src);
2357 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2358 if ((p && *p == '\0')
2359 || integer_zerop (len))
2361 replace_call_with_value (gsi, dest);
2362 return true;
2365 if (! tree_fits_uhwi_p (size))
2366 return false;
2368 if (! integer_all_onesp (size))
2370 tree src_len = c_strlen (src, 1);
2371 if (src_len
2372 && tree_fits_uhwi_p (src_len)
2373 && tree_fits_uhwi_p (len)
2374 && ! tree_int_cst_lt (len, src_len))
2376 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2377 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2378 if (!fn)
2379 return false;
2381 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2382 replace_call_with_call_and_fold (gsi, repl);
2383 return true;
2385 return false;
2388 /* If __builtin_strncat_chk is used, assume strncat is available. */
2389 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2390 if (!fn)
2391 return false;
2393 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2394 replace_call_with_call_and_fold (gsi, repl);
2395 return true;
2398 /* Build and append gimple statements to STMTS that would load a first
2399 character of a memory location identified by STR. LOC is location
2400 of the statement. */
2402 static tree
2403 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2405 tree var;
2407 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2408 tree cst_uchar_ptr_node
2409 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2410 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2412 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2413 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2414 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2416 gimple_assign_set_lhs (stmt, var);
2417 gimple_seq_add_stmt_without_update (stmts, stmt);
2419 return var;
2422 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2424 static bool
2425 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2427 gimple *stmt = gsi_stmt (*gsi);
2428 tree callee = gimple_call_fndecl (stmt);
2429 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2431 tree type = integer_type_node;
2432 tree str1 = gimple_call_arg (stmt, 0);
2433 tree str2 = gimple_call_arg (stmt, 1);
2434 tree lhs = gimple_call_lhs (stmt);
2436 tree bound_node = NULL_TREE;
2437 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
2439 /* Handle strncmp and strncasecmp functions. */
2440 if (gimple_call_num_args (stmt) == 3)
2442 bound_node = gimple_call_arg (stmt, 2);
2443 if (tree_fits_uhwi_p (bound_node))
2444 bound = tree_to_uhwi (bound_node);
2447 /* If the BOUND parameter is zero, return zero. */
2448 if (bound == 0)
2450 replace_call_with_value (gsi, integer_zero_node);
2451 return true;
2454 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2455 if (operand_equal_p (str1, str2, 0))
2457 replace_call_with_value (gsi, integer_zero_node);
2458 return true;
2461 /* Initially set to the number of characters, including the terminating
2462 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2463 the array Sx is not terminated by a nul.
2464 For nul-terminated strings then adjusted to their length so that
2465 LENx == NULPOSx holds. */
2466 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2467 const char *p1 = getbyterep (str1, &len1);
2468 const char *p2 = getbyterep (str2, &len2);
2470 /* The position of the terminating nul character if one exists, otherwise
2471 a value greater than LENx. */
2472 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2474 if (p1)
2476 size_t n = strnlen (p1, len1);
2477 if (n < len1)
2478 len1 = nulpos1 = n;
2481 if (p2)
2483 size_t n = strnlen (p2, len2);
2484 if (n < len2)
2485 len2 = nulpos2 = n;
2488 /* For known strings, return an immediate value. */
2489 if (p1 && p2)
2491 int r = 0;
2492 bool known_result = false;
2494 switch (fcode)
2496 case BUILT_IN_STRCMP:
2497 case BUILT_IN_STRCMP_EQ:
2498 if (len1 != nulpos1 || len2 != nulpos2)
2499 break;
2501 r = strcmp (p1, p2);
2502 known_result = true;
2503 break;
2505 case BUILT_IN_STRNCMP:
2506 case BUILT_IN_STRNCMP_EQ:
2508 if (bound == HOST_WIDE_INT_M1U)
2509 break;
2511 /* Reduce the bound to be no more than the length
2512 of the shorter of the two strings, or the sizes
2513 of the unterminated arrays. */
2514 unsigned HOST_WIDE_INT n = bound;
2516 if (len1 == nulpos1 && len1 < n)
2517 n = len1 + 1;
2518 if (len2 == nulpos2 && len2 < n)
2519 n = len2 + 1;
2521 if (MIN (nulpos1, nulpos2) + 1 < n)
2522 break;
2524 r = strncmp (p1, p2, n);
2525 known_result = true;
2526 break;
2528 /* Only handleable situation is where the string are equal (result 0),
2529 which is already handled by operand_equal_p case. */
2530 case BUILT_IN_STRCASECMP:
2531 break;
2532 case BUILT_IN_STRNCASECMP:
2534 if (bound == HOST_WIDE_INT_M1U)
2535 break;
2536 r = strncmp (p1, p2, bound);
2537 if (r == 0)
2538 known_result = true;
2539 break;
2541 default:
2542 gcc_unreachable ();
2545 if (known_result)
2547 replace_call_with_value (gsi, build_cmp_result (type, r));
2548 return true;
2552 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
2553 || fcode == BUILT_IN_STRCMP
2554 || fcode == BUILT_IN_STRCMP_EQ
2555 || fcode == BUILT_IN_STRCASECMP;
2557 location_t loc = gimple_location (stmt);
2559 /* If the second arg is "", return *(const unsigned char*)arg1. */
2560 if (p2 && *p2 == '\0' && nonzero_bound)
2562 gimple_seq stmts = NULL;
2563 tree var = gimple_load_first_char (loc, str1, &stmts);
2564 if (lhs)
2566 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2567 gimple_seq_add_stmt_without_update (&stmts, stmt);
2570 gsi_replace_with_seq_vops (gsi, stmts);
2571 return true;
2574 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2575 if (p1 && *p1 == '\0' && nonzero_bound)
2577 gimple_seq stmts = NULL;
2578 tree var = gimple_load_first_char (loc, str2, &stmts);
2580 if (lhs)
2582 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2583 stmt = gimple_build_assign (c, NOP_EXPR, var);
2584 gimple_seq_add_stmt_without_update (&stmts, stmt);
2586 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2587 gimple_seq_add_stmt_without_update (&stmts, stmt);
2590 gsi_replace_with_seq_vops (gsi, stmts);
2591 return true;
2594 /* If BOUND is one, return an expression corresponding to
2595 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2596 if (fcode == BUILT_IN_STRNCMP && bound == 1)
2598 gimple_seq stmts = NULL;
2599 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2600 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2602 if (lhs)
2604 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2605 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2606 gimple_seq_add_stmt_without_update (&stmts, convert1);
2608 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2609 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2610 gimple_seq_add_stmt_without_update (&stmts, convert2);
2612 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2613 gimple_seq_add_stmt_without_update (&stmts, stmt);
2616 gsi_replace_with_seq_vops (gsi, stmts);
2617 return true;
2620 /* If BOUND is greater than the length of one constant string,
2621 and the other argument is also a nul-terminated string, replace
2622 strncmp with strcmp. */
2623 if (fcode == BUILT_IN_STRNCMP
2624 && bound > 0 && bound < HOST_WIDE_INT_M1U
2625 && ((p2 && len2 < bound && len2 == nulpos2)
2626 || (p1 && len1 < bound && len1 == nulpos1)))
2628 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2629 if (!fn)
2630 return false;
2631 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2632 replace_call_with_call_and_fold (gsi, repl);
2633 return true;
2636 return false;
2639 /* Fold a call to the memchr pointed by GSI iterator. */
2641 static bool
2642 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2644 gimple *stmt = gsi_stmt (*gsi);
2645 tree lhs = gimple_call_lhs (stmt);
2646 tree arg1 = gimple_call_arg (stmt, 0);
2647 tree arg2 = gimple_call_arg (stmt, 1);
2648 tree len = gimple_call_arg (stmt, 2);
2650 /* If the LEN parameter is zero, return zero. */
2651 if (integer_zerop (len))
2653 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2654 return true;
2657 char c;
2658 if (TREE_CODE (arg2) != INTEGER_CST
2659 || !tree_fits_uhwi_p (len)
2660 || !target_char_cst_p (arg2, &c))
2661 return false;
2663 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2664 unsigned HOST_WIDE_INT string_length;
2665 const char *p1 = getbyterep (arg1, &string_length);
2667 if (p1)
2669 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2670 if (r == NULL)
2672 tree mem_size, offset_node;
2673 byte_representation (arg1, &offset_node, &mem_size, NULL);
2674 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2675 ? 0 : tree_to_uhwi (offset_node);
2676 /* MEM_SIZE is the size of the array the string literal
2677 is stored in. */
2678 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2679 gcc_checking_assert (string_length <= string_size);
2680 if (length <= string_size)
2682 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2683 return true;
2686 else
2688 unsigned HOST_WIDE_INT offset = r - p1;
2689 gimple_seq stmts = NULL;
2690 if (lhs != NULL_TREE)
2692 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2693 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2694 arg1, offset_cst);
2695 gimple_seq_add_stmt_without_update (&stmts, stmt);
2697 else
2698 gimple_seq_add_stmt_without_update (&stmts,
2699 gimple_build_nop ());
2701 gsi_replace_with_seq_vops (gsi, stmts);
2702 return true;
2706 return false;
2709 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2710 to the call. IGNORE is true if the value returned
2711 by the builtin will be ignored. UNLOCKED is true is true if this
2712 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2713 the known length of the string. Return NULL_TREE if no simplification
2714 was possible. */
2716 static bool
2717 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2718 tree arg0, tree arg1,
2719 bool unlocked)
2721 gimple *stmt = gsi_stmt (*gsi);
2723 /* If we're using an unlocked function, assume the other unlocked
2724 functions exist explicitly. */
2725 tree const fn_fputc = (unlocked
2726 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2727 : builtin_decl_implicit (BUILT_IN_FPUTC));
2728 tree const fn_fwrite = (unlocked
2729 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2730 : builtin_decl_implicit (BUILT_IN_FWRITE));
2732 /* If the return value is used, don't do the transformation. */
2733 if (gimple_call_lhs (stmt))
2734 return false;
2736 /* Get the length of the string passed to fputs. If the length
2737 can't be determined, punt. */
2738 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2739 if (!len
2740 || TREE_CODE (len) != INTEGER_CST)
2741 return false;
2743 switch (compare_tree_int (len, 1))
2745 case -1: /* length is 0, delete the call entirely . */
2746 replace_call_with_value (gsi, integer_zero_node);
2747 return true;
2749 case 0: /* length is 1, call fputc. */
2751 const char *p = c_getstr (arg0);
2752 if (p != NULL)
2754 if (!fn_fputc)
2755 return false;
2757 gimple *repl = gimple_build_call (fn_fputc, 2,
2758 build_int_cst
2759 (integer_type_node, p[0]), arg1);
2760 replace_call_with_call_and_fold (gsi, repl);
2761 return true;
2764 /* FALLTHROUGH */
2765 case 1: /* length is greater than 1, call fwrite. */
2767 /* If optimizing for size keep fputs. */
2768 if (optimize_function_for_size_p (cfun))
2769 return false;
2770 /* New argument list transforming fputs(string, stream) to
2771 fwrite(string, 1, len, stream). */
2772 if (!fn_fwrite)
2773 return false;
2775 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2776 size_one_node, len, arg1);
2777 replace_call_with_call_and_fold (gsi, repl);
2778 return true;
2780 default:
2781 gcc_unreachable ();
2783 return false;
2786 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2787 DEST, SRC, LEN, and SIZE are the arguments to the call.
2788 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2789 code of the builtin. If MAXLEN is not NULL, it is maximum length
2790 passed as third argument. */
2792 static bool
2793 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2794 tree dest, tree src, tree len, tree size,
2795 enum built_in_function fcode)
2797 gimple *stmt = gsi_stmt (*gsi);
2798 location_t loc = gimple_location (stmt);
2799 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2800 tree fn;
2802 /* If SRC and DEST are the same (and not volatile), return DEST
2803 (resp. DEST+LEN for __mempcpy_chk). */
2804 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2806 if (fcode != BUILT_IN_MEMPCPY_CHK)
2808 replace_call_with_value (gsi, dest);
2809 return true;
2811 else
2813 gimple_seq stmts = NULL;
2814 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2815 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2816 TREE_TYPE (dest), dest, len);
2817 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2818 replace_call_with_value (gsi, temp);
2819 return true;
2823 if (! tree_fits_uhwi_p (size))
2824 return false;
2826 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2827 if (! integer_all_onesp (size))
2829 if (! tree_fits_uhwi_p (len))
2831 /* If LEN is not constant, try MAXLEN too.
2832 For MAXLEN only allow optimizing into non-_ocs function
2833 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2834 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2836 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2838 /* (void) __mempcpy_chk () can be optimized into
2839 (void) __memcpy_chk (). */
2840 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2841 if (!fn)
2842 return false;
2844 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2845 replace_call_with_call_and_fold (gsi, repl);
2846 return true;
2848 return false;
2851 else
2852 maxlen = len;
2854 if (tree_int_cst_lt (size, maxlen))
2855 return false;
2858 fn = NULL_TREE;
2859 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2860 mem{cpy,pcpy,move,set} is available. */
2861 switch (fcode)
2863 case BUILT_IN_MEMCPY_CHK:
2864 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2865 break;
2866 case BUILT_IN_MEMPCPY_CHK:
2867 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2868 break;
2869 case BUILT_IN_MEMMOVE_CHK:
2870 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2871 break;
2872 case BUILT_IN_MEMSET_CHK:
2873 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2874 break;
2875 default:
2876 break;
2879 if (!fn)
2880 return false;
2882 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2883 replace_call_with_call_and_fold (gsi, repl);
2884 return true;
2887 /* Fold a call to the __st[rp]cpy_chk builtin.
2888 DEST, SRC, and SIZE are the arguments to the call.
2889 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2890 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2891 strings passed as second argument. */
2893 static bool
2894 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2895 tree dest,
2896 tree src, tree size,
2897 enum built_in_function fcode)
2899 gimple *stmt = gsi_stmt (*gsi);
2900 location_t loc = gimple_location (stmt);
2901 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2902 tree len, fn;
2904 /* If SRC and DEST are the same (and not volatile), return DEST. */
2905 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2907 /* Issue -Wrestrict unless the pointers are null (those do
2908 not point to objects and so do not indicate an overlap;
2909 such calls could be the result of sanitization and jump
2910 threading). */
2911 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2913 tree func = gimple_call_fndecl (stmt);
2915 warning_at (loc, OPT_Wrestrict,
2916 "%qD source argument is the same as destination",
2917 func);
2920 replace_call_with_value (gsi, dest);
2921 return true;
2924 if (! tree_fits_uhwi_p (size))
2925 return false;
2927 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
2928 if (! integer_all_onesp (size))
2930 len = c_strlen (src, 1);
2931 if (! len || ! tree_fits_uhwi_p (len))
2933 /* If LEN is not constant, try MAXLEN too.
2934 For MAXLEN only allow optimizing into non-_ocs function
2935 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2936 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2938 if (fcode == BUILT_IN_STPCPY_CHK)
2940 if (! ignore)
2941 return false;
2943 /* If return value of __stpcpy_chk is ignored,
2944 optimize into __strcpy_chk. */
2945 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2946 if (!fn)
2947 return false;
2949 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2950 replace_call_with_call_and_fold (gsi, repl);
2951 return true;
2954 if (! len || TREE_SIDE_EFFECTS (len))
2955 return false;
2957 /* If c_strlen returned something, but not a constant,
2958 transform __strcpy_chk into __memcpy_chk. */
2959 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2960 if (!fn)
2961 return false;
2963 gimple_seq stmts = NULL;
2964 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
2965 len = gimple_convert (&stmts, loc, size_type_node, len);
2966 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2967 build_int_cst (size_type_node, 1));
2968 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2969 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2970 replace_call_with_call_and_fold (gsi, repl);
2971 return true;
2974 else
2975 maxlen = len;
2977 if (! tree_int_cst_lt (maxlen, size))
2978 return false;
2981 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2982 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2983 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2984 if (!fn)
2985 return false;
2987 gimple *repl = gimple_build_call (fn, 2, dest, src);
2988 replace_call_with_call_and_fold (gsi, repl);
2989 return true;
2992 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2993 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2994 length passed as third argument. IGNORE is true if return value can be
2995 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2997 static bool
2998 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2999 tree dest, tree src,
3000 tree len, tree size,
3001 enum built_in_function fcode)
3003 gimple *stmt = gsi_stmt (*gsi);
3004 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3005 tree fn;
3007 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
3009 /* If return value of __stpncpy_chk is ignored,
3010 optimize into __strncpy_chk. */
3011 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3012 if (fn)
3014 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3015 replace_call_with_call_and_fold (gsi, repl);
3016 return true;
3020 if (! tree_fits_uhwi_p (size))
3021 return false;
3023 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3024 if (! integer_all_onesp (size))
3026 if (! tree_fits_uhwi_p (len))
3028 /* If LEN is not constant, try MAXLEN too.
3029 For MAXLEN only allow optimizing into non-_ocs function
3030 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3031 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3032 return false;
3034 else
3035 maxlen = len;
3037 if (tree_int_cst_lt (size, maxlen))
3038 return false;
3041 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3042 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
3043 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3044 if (!fn)
3045 return false;
3047 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3048 replace_call_with_call_and_fold (gsi, repl);
3049 return true;
3052 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3053 Return NULL_TREE if no simplification can be made. */
3055 static bool
3056 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3058 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3059 location_t loc = gimple_location (stmt);
3060 tree dest = gimple_call_arg (stmt, 0);
3061 tree src = gimple_call_arg (stmt, 1);
3062 tree fn, lenp1;
3064 /* If the result is unused, replace stpcpy with strcpy. */
3065 if (gimple_call_lhs (stmt) == NULL_TREE)
3067 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3068 if (!fn)
3069 return false;
3070 gimple_call_set_fndecl (stmt, fn);
3071 fold_stmt (gsi);
3072 return true;
3075 /* Set to non-null if ARG refers to an unterminated array. */
3076 c_strlen_data data = { };
3077 /* The size of the unterminated array if SRC referes to one. */
3078 tree size;
3079 /* True if the size is exact/constant, false if it's the lower bound
3080 of a range. */
3081 bool exact;
3082 tree len = c_strlen (src, 1, &data, 1);
3083 if (!len
3084 || TREE_CODE (len) != INTEGER_CST)
3086 data.decl = unterminated_array (src, &size, &exact);
3087 if (!data.decl)
3088 return false;
3091 if (data.decl)
3093 /* Avoid folding calls with unterminated arrays. */
3094 if (!gimple_no_warning_p (stmt))
3095 warn_string_no_nul (loc, NULL_TREE, "stpcpy", src, data.decl, size,
3096 exact);
3097 gimple_set_no_warning (stmt, true);
3098 return false;
3101 if (optimize_function_for_size_p (cfun)
3102 /* If length is zero it's small enough. */
3103 && !integer_zerop (len))
3104 return false;
3106 /* If the source has a known length replace stpcpy with memcpy. */
3107 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3108 if (!fn)
3109 return false;
3111 gimple_seq stmts = NULL;
3112 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3113 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3114 tem, build_int_cst (size_type_node, 1));
3115 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3116 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
3117 gimple_move_vops (repl, stmt);
3118 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3119 /* Replace the result with dest + len. */
3120 stmts = NULL;
3121 tem = gimple_convert (&stmts, loc, sizetype, len);
3122 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3123 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3124 POINTER_PLUS_EXPR, dest, tem);
3125 gsi_replace (gsi, ret, false);
3126 /* Finally fold the memcpy call. */
3127 gimple_stmt_iterator gsi2 = *gsi;
3128 gsi_prev (&gsi2);
3129 fold_stmt (&gsi2);
3130 return true;
3133 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3134 NULL_TREE if a normal call should be emitted rather than expanding
3135 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3136 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3137 passed as second argument. */
3139 static bool
3140 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3141 enum built_in_function fcode)
3143 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3144 tree dest, size, len, fn, fmt, flag;
3145 const char *fmt_str;
3147 /* Verify the required arguments in the original call. */
3148 if (gimple_call_num_args (stmt) < 5)
3149 return false;
3151 dest = gimple_call_arg (stmt, 0);
3152 len = gimple_call_arg (stmt, 1);
3153 flag = gimple_call_arg (stmt, 2);
3154 size = gimple_call_arg (stmt, 3);
3155 fmt = gimple_call_arg (stmt, 4);
3157 if (! tree_fits_uhwi_p (size))
3158 return false;
3160 if (! integer_all_onesp (size))
3162 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3163 if (! tree_fits_uhwi_p (len))
3165 /* If LEN is not constant, try MAXLEN too.
3166 For MAXLEN only allow optimizing into non-_ocs function
3167 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3168 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3169 return false;
3171 else
3172 maxlen = len;
3174 if (tree_int_cst_lt (size, maxlen))
3175 return false;
3178 if (!init_target_chars ())
3179 return false;
3181 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3182 or if format doesn't contain % chars or is "%s". */
3183 if (! integer_zerop (flag))
3185 fmt_str = c_getstr (fmt);
3186 if (fmt_str == NULL)
3187 return false;
3188 if (strchr (fmt_str, target_percent) != NULL
3189 && strcmp (fmt_str, target_percent_s))
3190 return false;
3193 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3194 available. */
3195 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3196 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3197 if (!fn)
3198 return false;
3200 /* Replace the called function and the first 5 argument by 3 retaining
3201 trailing varargs. */
3202 gimple_call_set_fndecl (stmt, fn);
3203 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3204 gimple_call_set_arg (stmt, 0, dest);
3205 gimple_call_set_arg (stmt, 1, len);
3206 gimple_call_set_arg (stmt, 2, fmt);
3207 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3208 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3209 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3210 fold_stmt (gsi);
3211 return true;
3214 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3215 Return NULL_TREE if a normal call should be emitted rather than
3216 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3217 or BUILT_IN_VSPRINTF_CHK. */
3219 static bool
3220 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3221 enum built_in_function fcode)
3223 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3224 tree dest, size, len, fn, fmt, flag;
3225 const char *fmt_str;
3226 unsigned nargs = gimple_call_num_args (stmt);
3228 /* Verify the required arguments in the original call. */
3229 if (nargs < 4)
3230 return false;
3231 dest = gimple_call_arg (stmt, 0);
3232 flag = gimple_call_arg (stmt, 1);
3233 size = gimple_call_arg (stmt, 2);
3234 fmt = gimple_call_arg (stmt, 3);
3236 if (! tree_fits_uhwi_p (size))
3237 return false;
3239 len = NULL_TREE;
3241 if (!init_target_chars ())
3242 return false;
3244 /* Check whether the format is a literal string constant. */
3245 fmt_str = c_getstr (fmt);
3246 if (fmt_str != NULL)
3248 /* If the format doesn't contain % args or %%, we know the size. */
3249 if (strchr (fmt_str, target_percent) == 0)
3251 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3252 len = build_int_cstu (size_type_node, strlen (fmt_str));
3254 /* If the format is "%s" and first ... argument is a string literal,
3255 we know the size too. */
3256 else if (fcode == BUILT_IN_SPRINTF_CHK
3257 && strcmp (fmt_str, target_percent_s) == 0)
3259 tree arg;
3261 if (nargs == 5)
3263 arg = gimple_call_arg (stmt, 4);
3264 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3266 len = c_strlen (arg, 1);
3267 if (! len || ! tree_fits_uhwi_p (len))
3268 len = NULL_TREE;
3274 if (! integer_all_onesp (size))
3276 if (! len || ! tree_int_cst_lt (len, size))
3277 return false;
3280 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3281 or if format doesn't contain % chars or is "%s". */
3282 if (! integer_zerop (flag))
3284 if (fmt_str == NULL)
3285 return false;
3286 if (strchr (fmt_str, target_percent) != NULL
3287 && strcmp (fmt_str, target_percent_s))
3288 return false;
3291 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3292 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3293 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3294 if (!fn)
3295 return false;
3297 /* Replace the called function and the first 4 argument by 2 retaining
3298 trailing varargs. */
3299 gimple_call_set_fndecl (stmt, fn);
3300 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3301 gimple_call_set_arg (stmt, 0, dest);
3302 gimple_call_set_arg (stmt, 1, fmt);
3303 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3304 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3305 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3306 fold_stmt (gsi);
3307 return true;
3310 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3311 ORIG may be null if this is a 2-argument call. We don't attempt to
3312 simplify calls with more than 3 arguments.
3314 Return true if simplification was possible, otherwise false. */
3316 bool
3317 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3319 gimple *stmt = gsi_stmt (*gsi);
3320 tree dest = gimple_call_arg (stmt, 0);
3321 tree fmt = gimple_call_arg (stmt, 1);
3322 tree orig = NULL_TREE;
3323 const char *fmt_str = NULL;
3325 /* Verify the required arguments in the original call. We deal with two
3326 types of sprintf() calls: 'sprintf (str, fmt)' and
3327 'sprintf (dest, "%s", orig)'. */
3328 if (gimple_call_num_args (stmt) > 3)
3329 return false;
3331 if (gimple_call_num_args (stmt) == 3)
3332 orig = gimple_call_arg (stmt, 2);
3334 /* Check whether the format is a literal string constant. */
3335 fmt_str = c_getstr (fmt);
3336 if (fmt_str == NULL)
3337 return false;
3339 if (!init_target_chars ())
3340 return false;
3342 /* If the format doesn't contain % args or %%, use strcpy. */
3343 if (strchr (fmt_str, target_percent) == NULL)
3345 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3347 if (!fn)
3348 return false;
3350 /* Don't optimize sprintf (buf, "abc", ptr++). */
3351 if (orig)
3352 return false;
3354 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3355 'format' is known to contain no % formats. */
3356 gimple_seq stmts = NULL;
3357 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3359 /* Propagate the NO_WARNING bit to avoid issuing the same
3360 warning more than once. */
3361 if (gimple_no_warning_p (stmt))
3362 gimple_set_no_warning (repl, true);
3364 gimple_seq_add_stmt_without_update (&stmts, repl);
3365 if (tree lhs = gimple_call_lhs (stmt))
3367 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3368 strlen (fmt_str)));
3369 gimple_seq_add_stmt_without_update (&stmts, repl);
3370 gsi_replace_with_seq_vops (gsi, stmts);
3371 /* gsi now points at the assignment to the lhs, get a
3372 stmt iterator to the memcpy call.
3373 ??? We can't use gsi_for_stmt as that doesn't work when the
3374 CFG isn't built yet. */
3375 gimple_stmt_iterator gsi2 = *gsi;
3376 gsi_prev (&gsi2);
3377 fold_stmt (&gsi2);
3379 else
3381 gsi_replace_with_seq_vops (gsi, stmts);
3382 fold_stmt (gsi);
3384 return true;
3387 /* If the format is "%s", use strcpy if the result isn't used. */
3388 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3390 tree fn;
3391 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3393 if (!fn)
3394 return false;
3396 /* Don't crash on sprintf (str1, "%s"). */
3397 if (!orig)
3398 return false;
3400 tree orig_len = NULL_TREE;
3401 if (gimple_call_lhs (stmt))
3403 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3404 if (!orig_len)
3405 return false;
3408 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3409 gimple_seq stmts = NULL;
3410 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3412 /* Propagate the NO_WARNING bit to avoid issuing the same
3413 warning more than once. */
3414 if (gimple_no_warning_p (stmt))
3415 gimple_set_no_warning (repl, true);
3417 gimple_seq_add_stmt_without_update (&stmts, repl);
3418 if (tree lhs = gimple_call_lhs (stmt))
3420 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3421 TREE_TYPE (orig_len)))
3422 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3423 repl = gimple_build_assign (lhs, orig_len);
3424 gimple_seq_add_stmt_without_update (&stmts, repl);
3425 gsi_replace_with_seq_vops (gsi, stmts);
3426 /* gsi now points at the assignment to the lhs, get a
3427 stmt iterator to the memcpy call.
3428 ??? We can't use gsi_for_stmt as that doesn't work when the
3429 CFG isn't built yet. */
3430 gimple_stmt_iterator gsi2 = *gsi;
3431 gsi_prev (&gsi2);
3432 fold_stmt (&gsi2);
3434 else
3436 gsi_replace_with_seq_vops (gsi, stmts);
3437 fold_stmt (gsi);
3439 return true;
3441 return false;
3444 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3445 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3446 attempt to simplify calls with more than 4 arguments.
3448 Return true if simplification was possible, otherwise false. */
3450 bool
3451 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3453 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3454 tree dest = gimple_call_arg (stmt, 0);
3455 tree destsize = gimple_call_arg (stmt, 1);
3456 tree fmt = gimple_call_arg (stmt, 2);
3457 tree orig = NULL_TREE;
3458 const char *fmt_str = NULL;
3460 if (gimple_call_num_args (stmt) > 4)
3461 return false;
3463 if (gimple_call_num_args (stmt) == 4)
3464 orig = gimple_call_arg (stmt, 3);
3466 if (!tree_fits_uhwi_p (destsize))
3467 return false;
3468 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3470 /* Check whether the format is a literal string constant. */
3471 fmt_str = c_getstr (fmt);
3472 if (fmt_str == NULL)
3473 return false;
3475 if (!init_target_chars ())
3476 return false;
3478 /* If the format doesn't contain % args or %%, use strcpy. */
3479 if (strchr (fmt_str, target_percent) == NULL)
3481 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3482 if (!fn)
3483 return false;
3485 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3486 if (orig)
3487 return false;
3489 /* We could expand this as
3490 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3491 or to
3492 memcpy (str, fmt_with_nul_at_cstm1, cst);
3493 but in the former case that might increase code size
3494 and in the latter case grow .rodata section too much.
3495 So punt for now. */
3496 size_t len = strlen (fmt_str);
3497 if (len >= destlen)
3498 return false;
3500 gimple_seq stmts = NULL;
3501 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3502 gimple_seq_add_stmt_without_update (&stmts, repl);
3503 if (tree lhs = gimple_call_lhs (stmt))
3505 repl = gimple_build_assign (lhs,
3506 build_int_cst (TREE_TYPE (lhs), len));
3507 gimple_seq_add_stmt_without_update (&stmts, repl);
3508 gsi_replace_with_seq_vops (gsi, stmts);
3509 /* gsi now points at the assignment to the lhs, get a
3510 stmt iterator to the memcpy call.
3511 ??? We can't use gsi_for_stmt as that doesn't work when the
3512 CFG isn't built yet. */
3513 gimple_stmt_iterator gsi2 = *gsi;
3514 gsi_prev (&gsi2);
3515 fold_stmt (&gsi2);
3517 else
3519 gsi_replace_with_seq_vops (gsi, stmts);
3520 fold_stmt (gsi);
3522 return true;
3525 /* If the format is "%s", use strcpy if the result isn't used. */
3526 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3528 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3529 if (!fn)
3530 return false;
3532 /* Don't crash on snprintf (str1, cst, "%s"). */
3533 if (!orig)
3534 return false;
3536 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3537 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3538 return false;
3540 /* We could expand this as
3541 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3542 or to
3543 memcpy (str1, str2_with_nul_at_cstm1, cst);
3544 but in the former case that might increase code size
3545 and in the latter case grow .rodata section too much.
3546 So punt for now. */
3547 if (compare_tree_int (orig_len, destlen) >= 0)
3548 return false;
3550 /* Convert snprintf (str1, cst, "%s", str2) into
3551 strcpy (str1, str2) if strlen (str2) < cst. */
3552 gimple_seq stmts = NULL;
3553 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3554 gimple_seq_add_stmt_without_update (&stmts, repl);
3555 if (tree lhs = gimple_call_lhs (stmt))
3557 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3558 TREE_TYPE (orig_len)))
3559 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3560 repl = gimple_build_assign (lhs, orig_len);
3561 gimple_seq_add_stmt_without_update (&stmts, repl);
3562 gsi_replace_with_seq_vops (gsi, stmts);
3563 /* gsi now points at the assignment to the lhs, get a
3564 stmt iterator to the memcpy call.
3565 ??? We can't use gsi_for_stmt as that doesn't work when the
3566 CFG isn't built yet. */
3567 gimple_stmt_iterator gsi2 = *gsi;
3568 gsi_prev (&gsi2);
3569 fold_stmt (&gsi2);
3571 else
3573 gsi_replace_with_seq_vops (gsi, stmts);
3574 fold_stmt (gsi);
3576 return true;
3578 return false;
3581 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3582 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3583 more than 3 arguments, and ARG may be null in the 2-argument case.
3585 Return NULL_TREE if no simplification was possible, otherwise return the
3586 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3587 code of the function to be simplified. */
3589 static bool
3590 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3591 tree fp, tree fmt, tree arg,
3592 enum built_in_function fcode)
3594 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3595 tree fn_fputc, fn_fputs;
3596 const char *fmt_str = NULL;
3598 /* If the return value is used, don't do the transformation. */
3599 if (gimple_call_lhs (stmt) != NULL_TREE)
3600 return false;
3602 /* Check whether the format is a literal string constant. */
3603 fmt_str = c_getstr (fmt);
3604 if (fmt_str == NULL)
3605 return false;
3607 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3609 /* If we're using an unlocked function, assume the other
3610 unlocked functions exist explicitly. */
3611 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3612 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3614 else
3616 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3617 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3620 if (!init_target_chars ())
3621 return false;
3623 /* If the format doesn't contain % args or %%, use strcpy. */
3624 if (strchr (fmt_str, target_percent) == NULL)
3626 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3627 && arg)
3628 return false;
3630 /* If the format specifier was "", fprintf does nothing. */
3631 if (fmt_str[0] == '\0')
3633 replace_call_with_value (gsi, NULL_TREE);
3634 return true;
3637 /* When "string" doesn't contain %, replace all cases of
3638 fprintf (fp, string) with fputs (string, fp). The fputs
3639 builtin will take care of special cases like length == 1. */
3640 if (fn_fputs)
3642 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3643 replace_call_with_call_and_fold (gsi, repl);
3644 return true;
3648 /* The other optimizations can be done only on the non-va_list variants. */
3649 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3650 return false;
3652 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3653 else if (strcmp (fmt_str, target_percent_s) == 0)
3655 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3656 return false;
3657 if (fn_fputs)
3659 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3660 replace_call_with_call_and_fold (gsi, repl);
3661 return true;
3665 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3666 else if (strcmp (fmt_str, target_percent_c) == 0)
3668 if (!arg
3669 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3670 return false;
3671 if (fn_fputc)
3673 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3674 replace_call_with_call_and_fold (gsi, repl);
3675 return true;
3679 return false;
3682 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3683 FMT and ARG are the arguments to the call; we don't fold cases with
3684 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3686 Return NULL_TREE if no simplification was possible, otherwise return the
3687 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3688 code of the function to be simplified. */
3690 static bool
3691 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3692 tree arg, enum built_in_function fcode)
3694 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3695 tree fn_putchar, fn_puts, newarg;
3696 const char *fmt_str = NULL;
3698 /* If the return value is used, don't do the transformation. */
3699 if (gimple_call_lhs (stmt) != NULL_TREE)
3700 return false;
3702 /* Check whether the format is a literal string constant. */
3703 fmt_str = c_getstr (fmt);
3704 if (fmt_str == NULL)
3705 return false;
3707 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3709 /* If we're using an unlocked function, assume the other
3710 unlocked functions exist explicitly. */
3711 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3712 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3714 else
3716 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3717 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3720 if (!init_target_chars ())
3721 return false;
3723 if (strcmp (fmt_str, target_percent_s) == 0
3724 || strchr (fmt_str, target_percent) == NULL)
3726 const char *str;
3728 if (strcmp (fmt_str, target_percent_s) == 0)
3730 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3731 return false;
3733 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3734 return false;
3736 str = c_getstr (arg);
3737 if (str == NULL)
3738 return false;
3740 else
3742 /* The format specifier doesn't contain any '%' characters. */
3743 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3744 && arg)
3745 return false;
3746 str = fmt_str;
3749 /* If the string was "", printf does nothing. */
3750 if (str[0] == '\0')
3752 replace_call_with_value (gsi, NULL_TREE);
3753 return true;
3756 /* If the string has length of 1, call putchar. */
3757 if (str[1] == '\0')
3759 /* Given printf("c"), (where c is any one character,)
3760 convert "c"[0] to an int and pass that to the replacement
3761 function. */
3762 newarg = build_int_cst (integer_type_node, str[0]);
3763 if (fn_putchar)
3765 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3766 replace_call_with_call_and_fold (gsi, repl);
3767 return true;
3770 else
3772 /* If the string was "string\n", call puts("string"). */
3773 size_t len = strlen (str);
3774 if ((unsigned char)str[len - 1] == target_newline
3775 && (size_t) (int) len == len
3776 && (int) len > 0)
3778 char *newstr;
3780 /* Create a NUL-terminated string that's one char shorter
3781 than the original, stripping off the trailing '\n'. */
3782 newstr = xstrdup (str);
3783 newstr[len - 1] = '\0';
3784 newarg = build_string_literal (len, newstr);
3785 free (newstr);
3786 if (fn_puts)
3788 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3789 replace_call_with_call_and_fold (gsi, repl);
3790 return true;
3793 else
3794 /* We'd like to arrange to call fputs(string,stdout) here,
3795 but we need stdout and don't have a way to get it yet. */
3796 return false;
3800 /* The other optimizations can be done only on the non-va_list variants. */
3801 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3802 return false;
3804 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3805 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3807 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3808 return false;
3809 if (fn_puts)
3811 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3812 replace_call_with_call_and_fold (gsi, repl);
3813 return true;
3817 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3818 else if (strcmp (fmt_str, target_percent_c) == 0)
3820 if (!arg || ! useless_type_conversion_p (integer_type_node,
3821 TREE_TYPE (arg)))
3822 return false;
3823 if (fn_putchar)
3825 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3826 replace_call_with_call_and_fold (gsi, repl);
3827 return true;
3831 return false;
3836 /* Fold a call to __builtin_strlen with known length LEN. */
3838 static bool
3839 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3841 gimple *stmt = gsi_stmt (*gsi);
3842 tree arg = gimple_call_arg (stmt, 0);
3844 wide_int minlen;
3845 wide_int maxlen;
3847 c_strlen_data lendata = { };
3848 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
3849 && !lendata.decl
3850 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3851 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
3853 /* The range of lengths refers to either a single constant
3854 string or to the longest and shortest constant string
3855 referenced by the argument of the strlen() call, or to
3856 the strings that can possibly be stored in the arrays
3857 the argument refers to. */
3858 minlen = wi::to_wide (lendata.minlen);
3859 maxlen = wi::to_wide (lendata.maxlen);
3861 else
3863 unsigned prec = TYPE_PRECISION (sizetype);
3865 minlen = wi::shwi (0, prec);
3866 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3869 if (minlen == maxlen)
3871 /* Fold the strlen call to a constant. */
3872 tree type = TREE_TYPE (lendata.minlen);
3873 tree len = force_gimple_operand_gsi (gsi,
3874 wide_int_to_tree (type, minlen),
3875 true, NULL, true, GSI_SAME_STMT);
3876 replace_call_with_value (gsi, len);
3877 return true;
3880 /* Set the strlen() range to [0, MAXLEN]. */
3881 if (tree lhs = gimple_call_lhs (stmt))
3882 set_strlen_range (lhs, minlen, maxlen);
3884 return false;
3887 /* Fold a call to __builtin_acc_on_device. */
3889 static bool
3890 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3892 /* Defer folding until we know which compiler we're in. */
3893 if (symtab->state != EXPANSION)
3894 return false;
3896 unsigned val_host = GOMP_DEVICE_HOST;
3897 unsigned val_dev = GOMP_DEVICE_NONE;
3899 #ifdef ACCEL_COMPILER
3900 val_host = GOMP_DEVICE_NOT_HOST;
3901 val_dev = ACCEL_COMPILER_acc_device;
3902 #endif
3904 location_t loc = gimple_location (gsi_stmt (*gsi));
3906 tree host_eq = make_ssa_name (boolean_type_node);
3907 gimple *host_ass = gimple_build_assign
3908 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3909 gimple_set_location (host_ass, loc);
3910 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3912 tree dev_eq = make_ssa_name (boolean_type_node);
3913 gimple *dev_ass = gimple_build_assign
3914 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3915 gimple_set_location (dev_ass, loc);
3916 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3918 tree result = make_ssa_name (boolean_type_node);
3919 gimple *result_ass = gimple_build_assign
3920 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3921 gimple_set_location (result_ass, loc);
3922 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3924 replace_call_with_value (gsi, result);
3926 return true;
3929 /* Fold realloc (0, n) -> malloc (n). */
3931 static bool
3932 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3934 gimple *stmt = gsi_stmt (*gsi);
3935 tree arg = gimple_call_arg (stmt, 0);
3936 tree size = gimple_call_arg (stmt, 1);
3938 if (operand_equal_p (arg, null_pointer_node, 0))
3940 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3941 if (fn_malloc)
3943 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3944 replace_call_with_call_and_fold (gsi, repl);
3945 return true;
3948 return false;
3951 /* Fold the non-target builtin at *GSI and return whether any simplification
3952 was made. */
3954 static bool
3955 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3957 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3958 tree callee = gimple_call_fndecl (stmt);
3960 /* Give up for always_inline inline builtins until they are
3961 inlined. */
3962 if (avoid_folding_inline_builtin (callee))
3963 return false;
3965 unsigned n = gimple_call_num_args (stmt);
3966 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3967 switch (fcode)
3969 case BUILT_IN_BCMP:
3970 return gimple_fold_builtin_bcmp (gsi);
3971 case BUILT_IN_BCOPY:
3972 return gimple_fold_builtin_bcopy (gsi);
3973 case BUILT_IN_BZERO:
3974 return gimple_fold_builtin_bzero (gsi);
3976 case BUILT_IN_MEMSET:
3977 return gimple_fold_builtin_memset (gsi,
3978 gimple_call_arg (stmt, 1),
3979 gimple_call_arg (stmt, 2));
3980 case BUILT_IN_MEMCPY:
3981 case BUILT_IN_MEMPCPY:
3982 case BUILT_IN_MEMMOVE:
3983 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3984 gimple_call_arg (stmt, 1), fcode);
3985 case BUILT_IN_SPRINTF_CHK:
3986 case BUILT_IN_VSPRINTF_CHK:
3987 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3988 case BUILT_IN_STRCAT_CHK:
3989 return gimple_fold_builtin_strcat_chk (gsi);
3990 case BUILT_IN_STRNCAT_CHK:
3991 return gimple_fold_builtin_strncat_chk (gsi);
3992 case BUILT_IN_STRLEN:
3993 return gimple_fold_builtin_strlen (gsi);
3994 case BUILT_IN_STRCPY:
3995 return gimple_fold_builtin_strcpy (gsi,
3996 gimple_call_arg (stmt, 0),
3997 gimple_call_arg (stmt, 1));
3998 case BUILT_IN_STRNCPY:
3999 return gimple_fold_builtin_strncpy (gsi,
4000 gimple_call_arg (stmt, 0),
4001 gimple_call_arg (stmt, 1),
4002 gimple_call_arg (stmt, 2));
4003 case BUILT_IN_STRCAT:
4004 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
4005 gimple_call_arg (stmt, 1));
4006 case BUILT_IN_STRNCAT:
4007 return gimple_fold_builtin_strncat (gsi);
4008 case BUILT_IN_INDEX:
4009 case BUILT_IN_STRCHR:
4010 return gimple_fold_builtin_strchr (gsi, false);
4011 case BUILT_IN_RINDEX:
4012 case BUILT_IN_STRRCHR:
4013 return gimple_fold_builtin_strchr (gsi, true);
4014 case BUILT_IN_STRSTR:
4015 return gimple_fold_builtin_strstr (gsi);
4016 case BUILT_IN_STRCMP:
4017 case BUILT_IN_STRCMP_EQ:
4018 case BUILT_IN_STRCASECMP:
4019 case BUILT_IN_STRNCMP:
4020 case BUILT_IN_STRNCMP_EQ:
4021 case BUILT_IN_STRNCASECMP:
4022 return gimple_fold_builtin_string_compare (gsi);
4023 case BUILT_IN_MEMCHR:
4024 return gimple_fold_builtin_memchr (gsi);
4025 case BUILT_IN_FPUTS:
4026 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
4027 gimple_call_arg (stmt, 1), false);
4028 case BUILT_IN_FPUTS_UNLOCKED:
4029 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
4030 gimple_call_arg (stmt, 1), true);
4031 case BUILT_IN_MEMCPY_CHK:
4032 case BUILT_IN_MEMPCPY_CHK:
4033 case BUILT_IN_MEMMOVE_CHK:
4034 case BUILT_IN_MEMSET_CHK:
4035 return gimple_fold_builtin_memory_chk (gsi,
4036 gimple_call_arg (stmt, 0),
4037 gimple_call_arg (stmt, 1),
4038 gimple_call_arg (stmt, 2),
4039 gimple_call_arg (stmt, 3),
4040 fcode);
4041 case BUILT_IN_STPCPY:
4042 return gimple_fold_builtin_stpcpy (gsi);
4043 case BUILT_IN_STRCPY_CHK:
4044 case BUILT_IN_STPCPY_CHK:
4045 return gimple_fold_builtin_stxcpy_chk (gsi,
4046 gimple_call_arg (stmt, 0),
4047 gimple_call_arg (stmt, 1),
4048 gimple_call_arg (stmt, 2),
4049 fcode);
4050 case BUILT_IN_STRNCPY_CHK:
4051 case BUILT_IN_STPNCPY_CHK:
4052 return gimple_fold_builtin_stxncpy_chk (gsi,
4053 gimple_call_arg (stmt, 0),
4054 gimple_call_arg (stmt, 1),
4055 gimple_call_arg (stmt, 2),
4056 gimple_call_arg (stmt, 3),
4057 fcode);
4058 case BUILT_IN_SNPRINTF_CHK:
4059 case BUILT_IN_VSNPRINTF_CHK:
4060 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
4062 case BUILT_IN_FPRINTF:
4063 case BUILT_IN_FPRINTF_UNLOCKED:
4064 case BUILT_IN_VFPRINTF:
4065 if (n == 2 || n == 3)
4066 return gimple_fold_builtin_fprintf (gsi,
4067 gimple_call_arg (stmt, 0),
4068 gimple_call_arg (stmt, 1),
4069 n == 3
4070 ? gimple_call_arg (stmt, 2)
4071 : NULL_TREE,
4072 fcode);
4073 break;
4074 case BUILT_IN_FPRINTF_CHK:
4075 case BUILT_IN_VFPRINTF_CHK:
4076 if (n == 3 || n == 4)
4077 return gimple_fold_builtin_fprintf (gsi,
4078 gimple_call_arg (stmt, 0),
4079 gimple_call_arg (stmt, 2),
4080 n == 4
4081 ? gimple_call_arg (stmt, 3)
4082 : NULL_TREE,
4083 fcode);
4084 break;
4085 case BUILT_IN_PRINTF:
4086 case BUILT_IN_PRINTF_UNLOCKED:
4087 case BUILT_IN_VPRINTF:
4088 if (n == 1 || n == 2)
4089 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
4090 n == 2
4091 ? gimple_call_arg (stmt, 1)
4092 : NULL_TREE, fcode);
4093 break;
4094 case BUILT_IN_PRINTF_CHK:
4095 case BUILT_IN_VPRINTF_CHK:
4096 if (n == 2 || n == 3)
4097 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
4098 n == 3
4099 ? gimple_call_arg (stmt, 2)
4100 : NULL_TREE, fcode);
4101 break;
4102 case BUILT_IN_ACC_ON_DEVICE:
4103 return gimple_fold_builtin_acc_on_device (gsi,
4104 gimple_call_arg (stmt, 0));
4105 case BUILT_IN_REALLOC:
4106 return gimple_fold_builtin_realloc (gsi);
4108 default:;
4111 /* Try the generic builtin folder. */
4112 bool ignore = (gimple_call_lhs (stmt) == NULL);
4113 tree result = fold_call_stmt (stmt, ignore);
4114 if (result)
4116 if (ignore)
4117 STRIP_NOPS (result);
4118 else
4119 result = fold_convert (gimple_call_return_type (stmt), result);
4120 if (!update_call_from_tree (gsi, result))
4121 gimplify_and_update_call_from_tree (gsi, result);
4122 return true;
4125 return false;
4128 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
4129 function calls to constants, where possible. */
4131 static tree
4132 fold_internal_goacc_dim (const gimple *call)
4134 int axis = oacc_get_ifn_dim_arg (call);
4135 int size = oacc_get_fn_dim_size (current_function_decl, axis);
4136 tree result = NULL_TREE;
4137 tree type = TREE_TYPE (gimple_call_lhs (call));
4139 switch (gimple_call_internal_fn (call))
4141 case IFN_GOACC_DIM_POS:
4142 /* If the size is 1, we know the answer. */
4143 if (size == 1)
4144 result = build_int_cst (type, 0);
4145 break;
4146 case IFN_GOACC_DIM_SIZE:
4147 /* If the size is not dynamic, we know the answer. */
4148 if (size)
4149 result = build_int_cst (type, size);
4150 break;
4151 default:
4152 break;
4155 return result;
4158 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4159 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4160 &var where var is only addressable because of such calls. */
4162 bool
4163 optimize_atomic_compare_exchange_p (gimple *stmt)
4165 if (gimple_call_num_args (stmt) != 6
4166 || !flag_inline_atomics
4167 || !optimize
4168 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
4169 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4170 || !gimple_vdef (stmt)
4171 || !gimple_vuse (stmt))
4172 return false;
4174 tree fndecl = gimple_call_fndecl (stmt);
4175 switch (DECL_FUNCTION_CODE (fndecl))
4177 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
4178 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
4179 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
4180 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
4181 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
4182 break;
4183 default:
4184 return false;
4187 tree expected = gimple_call_arg (stmt, 1);
4188 if (TREE_CODE (expected) != ADDR_EXPR
4189 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
4190 return false;
4192 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
4193 if (!is_gimple_reg_type (etype)
4194 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
4195 || TREE_THIS_VOLATILE (etype)
4196 || VECTOR_TYPE_P (etype)
4197 || TREE_CODE (etype) == COMPLEX_TYPE
4198 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4199 might not preserve all the bits. See PR71716. */
4200 || SCALAR_FLOAT_TYPE_P (etype)
4201 || maybe_ne (TYPE_PRECISION (etype),
4202 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4203 return false;
4205 tree weak = gimple_call_arg (stmt, 3);
4206 if (!integer_zerop (weak) && !integer_onep (weak))
4207 return false;
4209 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4210 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4211 machine_mode mode = TYPE_MODE (itype);
4213 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
4214 == CODE_FOR_nothing
4215 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
4216 return false;
4218 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
4219 return false;
4221 return true;
4224 /* Fold
4225 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4226 into
4227 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4228 i = IMAGPART_EXPR <t>;
4229 r = (_Bool) i;
4230 e = REALPART_EXPR <t>; */
4232 void
4233 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
4235 gimple *stmt = gsi_stmt (*gsi);
4236 tree fndecl = gimple_call_fndecl (stmt);
4237 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4238 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4239 tree ctype = build_complex_type (itype);
4240 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
4241 bool throws = false;
4242 edge e = NULL;
4243 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4244 expected);
4245 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4246 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4247 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4249 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4250 build1 (VIEW_CONVERT_EXPR, itype,
4251 gimple_assign_lhs (g)));
4252 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4254 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4255 + int_size_in_bytes (itype);
4256 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4257 gimple_call_arg (stmt, 0),
4258 gimple_assign_lhs (g),
4259 gimple_call_arg (stmt, 2),
4260 build_int_cst (integer_type_node, flag),
4261 gimple_call_arg (stmt, 4),
4262 gimple_call_arg (stmt, 5));
4263 tree lhs = make_ssa_name (ctype);
4264 gimple_call_set_lhs (g, lhs);
4265 gimple_move_vops (g, stmt);
4266 tree oldlhs = gimple_call_lhs (stmt);
4267 if (stmt_can_throw_internal (cfun, stmt))
4269 throws = true;
4270 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4272 gimple_call_set_nothrow (as_a <gcall *> (g),
4273 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4274 gimple_call_set_lhs (stmt, NULL_TREE);
4275 gsi_replace (gsi, g, true);
4276 if (oldlhs)
4278 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4279 build1 (IMAGPART_EXPR, itype, lhs));
4280 if (throws)
4282 gsi_insert_on_edge_immediate (e, g);
4283 *gsi = gsi_for_stmt (g);
4285 else
4286 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4287 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4288 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4290 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4291 build1 (REALPART_EXPR, itype, lhs));
4292 if (throws && oldlhs == NULL_TREE)
4294 gsi_insert_on_edge_immediate (e, g);
4295 *gsi = gsi_for_stmt (g);
4297 else
4298 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4299 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4301 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4302 VIEW_CONVERT_EXPR,
4303 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4304 gimple_assign_lhs (g)));
4305 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4307 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4308 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4309 *gsi = gsiret;
4312 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4313 doesn't fit into TYPE. The test for overflow should be regardless of
4314 -fwrapv, and even for unsigned types. */
4316 bool
4317 arith_overflowed_p (enum tree_code code, const_tree type,
4318 const_tree arg0, const_tree arg1)
4320 widest2_int warg0 = widest2_int_cst (arg0);
4321 widest2_int warg1 = widest2_int_cst (arg1);
4322 widest2_int wres;
4323 switch (code)
4325 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4326 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4327 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4328 default: gcc_unreachable ();
4330 signop sign = TYPE_SIGN (type);
4331 if (sign == UNSIGNED && wi::neg_p (wres))
4332 return true;
4333 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4336 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
4337 for the memory it references, otherwise return null. VECTYPE is the
4338 type of the memory vector. */
4340 static tree
4341 gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
4343 tree ptr = gimple_call_arg (call, 0);
4344 tree alias_align = gimple_call_arg (call, 1);
4345 tree mask = gimple_call_arg (call, 2);
4346 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
4347 return NULL_TREE;
4349 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align) * BITS_PER_UNIT;
4350 if (TYPE_ALIGN (vectype) != align)
4351 vectype = build_aligned_type (vectype, align);
4352 tree offset = build_zero_cst (TREE_TYPE (alias_align));
4353 return fold_build2 (MEM_REF, vectype, ptr, offset);
4356 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
4358 static bool
4359 gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
4361 tree lhs = gimple_call_lhs (call);
4362 if (!lhs)
4363 return false;
4365 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
4367 gassign *new_stmt = gimple_build_assign (lhs, rhs);
4368 gimple_set_location (new_stmt, gimple_location (call));
4369 gimple_move_vops (new_stmt, call);
4370 gsi_replace (gsi, new_stmt, false);
4371 return true;
4373 return false;
4376 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
4378 static bool
4379 gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
4381 tree rhs = gimple_call_arg (call, 3);
4382 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
4384 gassign *new_stmt = gimple_build_assign (lhs, rhs);
4385 gimple_set_location (new_stmt, gimple_location (call));
4386 gimple_move_vops (new_stmt, call);
4387 gsi_replace (gsi, new_stmt, false);
4388 return true;
4390 return false;
4393 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4394 The statement may be replaced by another statement, e.g., if the call
4395 simplifies to a constant value. Return true if any changes were made.
4396 It is assumed that the operands have been previously folded. */
4398 static bool
4399 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4401 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4402 tree callee;
4403 bool changed = false;
4404 unsigned i;
4406 /* Fold *& in call arguments. */
4407 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4408 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4410 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4411 if (tmp)
4413 gimple_call_set_arg (stmt, i, tmp);
4414 changed = true;
4418 /* Check for virtual calls that became direct calls. */
4419 callee = gimple_call_fn (stmt);
4420 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4422 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4424 if (dump_file && virtual_method_call_p (callee)
4425 && !possible_polymorphic_call_target_p
4426 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4427 (OBJ_TYPE_REF_EXPR (callee)))))
4429 fprintf (dump_file,
4430 "Type inheritance inconsistent devirtualization of ");
4431 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4432 fprintf (dump_file, " to ");
4433 print_generic_expr (dump_file, callee, TDF_SLIM);
4434 fprintf (dump_file, "\n");
4437 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4438 changed = true;
4440 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4442 bool final;
4443 vec <cgraph_node *>targets
4444 = possible_polymorphic_call_targets (callee, stmt, &final);
4445 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4447 tree lhs = gimple_call_lhs (stmt);
4448 if (dump_enabled_p ())
4450 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4451 "folding virtual function call to %s\n",
4452 targets.length () == 1
4453 ? targets[0]->name ()
4454 : "__builtin_unreachable");
4456 if (targets.length () == 1)
4458 tree fndecl = targets[0]->decl;
4459 gimple_call_set_fndecl (stmt, fndecl);
4460 changed = true;
4461 /* If changing the call to __cxa_pure_virtual
4462 or similar noreturn function, adjust gimple_call_fntype
4463 too. */
4464 if (gimple_call_noreturn_p (stmt)
4465 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4466 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4467 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4468 == void_type_node))
4469 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4470 /* If the call becomes noreturn, remove the lhs. */
4471 if (lhs
4472 && gimple_call_noreturn_p (stmt)
4473 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4474 || should_remove_lhs_p (lhs)))
4476 if (TREE_CODE (lhs) == SSA_NAME)
4478 tree var = create_tmp_var (TREE_TYPE (lhs));
4479 tree def = get_or_create_ssa_default_def (cfun, var);
4480 gimple *new_stmt = gimple_build_assign (lhs, def);
4481 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4483 gimple_call_set_lhs (stmt, NULL_TREE);
4485 maybe_remove_unused_call_args (cfun, stmt);
4487 else
4489 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4490 gimple *new_stmt = gimple_build_call (fndecl, 0);
4491 gimple_set_location (new_stmt, gimple_location (stmt));
4492 /* If the call had a SSA name as lhs morph that into
4493 an uninitialized value. */
4494 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4496 tree var = create_tmp_var (TREE_TYPE (lhs));
4497 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4498 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4499 set_ssa_default_def (cfun, var, lhs);
4501 gimple_move_vops (new_stmt, stmt);
4502 gsi_replace (gsi, new_stmt, false);
4503 return true;
4509 /* Check for indirect calls that became direct calls, and then
4510 no longer require a static chain. */
4511 if (gimple_call_chain (stmt))
4513 tree fn = gimple_call_fndecl (stmt);
4514 if (fn && !DECL_STATIC_CHAIN (fn))
4516 gimple_call_set_chain (stmt, NULL);
4517 changed = true;
4519 else
4521 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4522 if (tmp)
4524 gimple_call_set_chain (stmt, tmp);
4525 changed = true;
4530 if (inplace)
4531 return changed;
4533 /* Check for builtins that CCP can handle using information not
4534 available in the generic fold routines. */
4535 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4537 if (gimple_fold_builtin (gsi))
4538 changed = true;
4540 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4542 changed |= targetm.gimple_fold_builtin (gsi);
4544 else if (gimple_call_internal_p (stmt))
4546 enum tree_code subcode = ERROR_MARK;
4547 tree result = NULL_TREE;
4548 bool cplx_result = false;
4549 tree overflow = NULL_TREE;
4550 switch (gimple_call_internal_fn (stmt))
4552 case IFN_BUILTIN_EXPECT:
4553 result = fold_builtin_expect (gimple_location (stmt),
4554 gimple_call_arg (stmt, 0),
4555 gimple_call_arg (stmt, 1),
4556 gimple_call_arg (stmt, 2),
4557 NULL_TREE);
4558 break;
4559 case IFN_UBSAN_OBJECT_SIZE:
4561 tree offset = gimple_call_arg (stmt, 1);
4562 tree objsize = gimple_call_arg (stmt, 2);
4563 if (integer_all_onesp (objsize)
4564 || (TREE_CODE (offset) == INTEGER_CST
4565 && TREE_CODE (objsize) == INTEGER_CST
4566 && tree_int_cst_le (offset, objsize)))
4568 replace_call_with_value (gsi, NULL_TREE);
4569 return true;
4572 break;
4573 case IFN_UBSAN_PTR:
4574 if (integer_zerop (gimple_call_arg (stmt, 1)))
4576 replace_call_with_value (gsi, NULL_TREE);
4577 return true;
4579 break;
4580 case IFN_UBSAN_BOUNDS:
4582 tree index = gimple_call_arg (stmt, 1);
4583 tree bound = gimple_call_arg (stmt, 2);
4584 if (TREE_CODE (index) == INTEGER_CST
4585 && TREE_CODE (bound) == INTEGER_CST)
4587 index = fold_convert (TREE_TYPE (bound), index);
4588 if (TREE_CODE (index) == INTEGER_CST
4589 && tree_int_cst_le (index, bound))
4591 replace_call_with_value (gsi, NULL_TREE);
4592 return true;
4596 break;
4597 case IFN_GOACC_DIM_SIZE:
4598 case IFN_GOACC_DIM_POS:
4599 result = fold_internal_goacc_dim (stmt);
4600 break;
4601 case IFN_UBSAN_CHECK_ADD:
4602 subcode = PLUS_EXPR;
4603 break;
4604 case IFN_UBSAN_CHECK_SUB:
4605 subcode = MINUS_EXPR;
4606 break;
4607 case IFN_UBSAN_CHECK_MUL:
4608 subcode = MULT_EXPR;
4609 break;
4610 case IFN_ADD_OVERFLOW:
4611 subcode = PLUS_EXPR;
4612 cplx_result = true;
4613 break;
4614 case IFN_SUB_OVERFLOW:
4615 subcode = MINUS_EXPR;
4616 cplx_result = true;
4617 break;
4618 case IFN_MUL_OVERFLOW:
4619 subcode = MULT_EXPR;
4620 cplx_result = true;
4621 break;
4622 case IFN_MASK_LOAD:
4623 changed |= gimple_fold_mask_load (gsi, stmt);
4624 break;
4625 case IFN_MASK_STORE:
4626 changed |= gimple_fold_mask_store (gsi, stmt);
4627 break;
4628 default:
4629 break;
4631 if (subcode != ERROR_MARK)
4633 tree arg0 = gimple_call_arg (stmt, 0);
4634 tree arg1 = gimple_call_arg (stmt, 1);
4635 tree type = TREE_TYPE (arg0);
4636 if (cplx_result)
4638 tree lhs = gimple_call_lhs (stmt);
4639 if (lhs == NULL_TREE)
4640 type = NULL_TREE;
4641 else
4642 type = TREE_TYPE (TREE_TYPE (lhs));
4644 if (type == NULL_TREE)
4646 /* x = y + 0; x = y - 0; x = y * 0; */
4647 else if (integer_zerop (arg1))
4648 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4649 /* x = 0 + y; x = 0 * y; */
4650 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4651 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4652 /* x = y - y; */
4653 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4654 result = integer_zero_node;
4655 /* x = y * 1; x = 1 * y; */
4656 else if (subcode == MULT_EXPR && integer_onep (arg1))
4657 result = arg0;
4658 else if (subcode == MULT_EXPR && integer_onep (arg0))
4659 result = arg1;
4660 else if (TREE_CODE (arg0) == INTEGER_CST
4661 && TREE_CODE (arg1) == INTEGER_CST)
4663 if (cplx_result)
4664 result = int_const_binop (subcode, fold_convert (type, arg0),
4665 fold_convert (type, arg1));
4666 else
4667 result = int_const_binop (subcode, arg0, arg1);
4668 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4670 if (cplx_result)
4671 overflow = build_one_cst (type);
4672 else
4673 result = NULL_TREE;
4676 if (result)
4678 if (result == integer_zero_node)
4679 result = build_zero_cst (type);
4680 else if (cplx_result && TREE_TYPE (result) != type)
4682 if (TREE_CODE (result) == INTEGER_CST)
4684 if (arith_overflowed_p (PLUS_EXPR, type, result,
4685 integer_zero_node))
4686 overflow = build_one_cst (type);
4688 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4689 && TYPE_UNSIGNED (type))
4690 || (TYPE_PRECISION (type)
4691 < (TYPE_PRECISION (TREE_TYPE (result))
4692 + (TYPE_UNSIGNED (TREE_TYPE (result))
4693 && !TYPE_UNSIGNED (type)))))
4694 result = NULL_TREE;
4695 if (result)
4696 result = fold_convert (type, result);
4701 if (result)
4703 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4704 result = drop_tree_overflow (result);
4705 if (cplx_result)
4707 if (overflow == NULL_TREE)
4708 overflow = build_zero_cst (TREE_TYPE (result));
4709 tree ctype = build_complex_type (TREE_TYPE (result));
4710 if (TREE_CODE (result) == INTEGER_CST
4711 && TREE_CODE (overflow) == INTEGER_CST)
4712 result = build_complex (ctype, result, overflow);
4713 else
4714 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4715 ctype, result, overflow);
4717 if (!update_call_from_tree (gsi, result))
4718 gimplify_and_update_call_from_tree (gsi, result);
4719 changed = true;
4723 return changed;
4727 /* Return true whether NAME has a use on STMT. */
4729 static bool
4730 has_use_on_stmt (tree name, gimple *stmt)
4732 imm_use_iterator iter;
4733 use_operand_p use_p;
4734 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4735 if (USE_STMT (use_p) == stmt)
4736 return true;
4737 return false;
4740 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4741 gimple_simplify.
4743 Replaces *GSI with the simplification result in RCODE and OPS
4744 and the associated statements in *SEQ. Does the replacement
4745 according to INPLACE and returns true if the operation succeeded. */
4747 static bool
4748 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4749 gimple_match_op *res_op,
4750 gimple_seq *seq, bool inplace)
4752 gimple *stmt = gsi_stmt (*gsi);
4753 tree *ops = res_op->ops;
4754 unsigned int num_ops = res_op->num_ops;
4756 /* Play safe and do not allow abnormals to be mentioned in
4757 newly created statements. See also maybe_push_res_to_seq.
4758 As an exception allow such uses if there was a use of the
4759 same SSA name on the old stmt. */
4760 for (unsigned int i = 0; i < num_ops; ++i)
4761 if (TREE_CODE (ops[i]) == SSA_NAME
4762 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4763 && !has_use_on_stmt (ops[i], stmt))
4764 return false;
4766 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4767 for (unsigned int i = 0; i < 2; ++i)
4768 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4769 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4770 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4771 return false;
4773 /* Don't insert new statements when INPLACE is true, even if we could
4774 reuse STMT for the final statement. */
4775 if (inplace && !gimple_seq_empty_p (*seq))
4776 return false;
4778 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4780 gcc_assert (res_op->code.is_tree_code ());
4781 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4782 /* GIMPLE_CONDs condition may not throw. */
4783 && (!flag_exceptions
4784 || !cfun->can_throw_non_call_exceptions
4785 || !operation_could_trap_p (res_op->code,
4786 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4787 false, NULL_TREE)))
4788 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4789 else if (res_op->code == SSA_NAME)
4790 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4791 build_zero_cst (TREE_TYPE (ops[0])));
4792 else if (res_op->code == INTEGER_CST)
4794 if (integer_zerop (ops[0]))
4795 gimple_cond_make_false (cond_stmt);
4796 else
4797 gimple_cond_make_true (cond_stmt);
4799 else if (!inplace)
4801 tree res = maybe_push_res_to_seq (res_op, seq);
4802 if (!res)
4803 return false;
4804 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4805 build_zero_cst (TREE_TYPE (res)));
4807 else
4808 return false;
4809 if (dump_file && (dump_flags & TDF_DETAILS))
4811 fprintf (dump_file, "gimple_simplified to ");
4812 if (!gimple_seq_empty_p (*seq))
4813 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4814 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4815 0, TDF_SLIM);
4817 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4818 return true;
4820 else if (is_gimple_assign (stmt)
4821 && res_op->code.is_tree_code ())
4823 if (!inplace
4824 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4826 maybe_build_generic_op (res_op);
4827 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4828 res_op->op_or_null (0),
4829 res_op->op_or_null (1),
4830 res_op->op_or_null (2));
4831 if (dump_file && (dump_flags & TDF_DETAILS))
4833 fprintf (dump_file, "gimple_simplified to ");
4834 if (!gimple_seq_empty_p (*seq))
4835 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4836 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4837 0, TDF_SLIM);
4839 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4840 return true;
4843 else if (res_op->code.is_fn_code ()
4844 && gimple_call_combined_fn (stmt) == res_op->code)
4846 gcc_assert (num_ops == gimple_call_num_args (stmt));
4847 for (unsigned int i = 0; i < num_ops; ++i)
4848 gimple_call_set_arg (stmt, i, ops[i]);
4849 if (dump_file && (dump_flags & TDF_DETAILS))
4851 fprintf (dump_file, "gimple_simplified to ");
4852 if (!gimple_seq_empty_p (*seq))
4853 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4854 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4856 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4857 return true;
4859 else if (!inplace)
4861 if (gimple_has_lhs (stmt))
4863 tree lhs = gimple_get_lhs (stmt);
4864 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4865 return false;
4866 if (dump_file && (dump_flags & TDF_DETAILS))
4868 fprintf (dump_file, "gimple_simplified to ");
4869 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4871 gsi_replace_with_seq_vops (gsi, *seq);
4872 return true;
4874 else
4875 gcc_unreachable ();
4878 return false;
4881 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4883 static bool
4884 maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
4886 bool res = false;
4887 tree *orig_t = t;
4889 if (TREE_CODE (*t) == ADDR_EXPR)
4890 t = &TREE_OPERAND (*t, 0);
4892 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4893 generic vector extension. The actual vector referenced is
4894 view-converted to an array type for this purpose. If the index
4895 is constant the canonical representation in the middle-end is a
4896 BIT_FIELD_REF so re-write the former to the latter here. */
4897 if (TREE_CODE (*t) == ARRAY_REF
4898 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4899 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4900 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4902 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4903 if (VECTOR_TYPE_P (vtype))
4905 tree low = array_ref_low_bound (*t);
4906 if (TREE_CODE (low) == INTEGER_CST)
4908 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4910 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4911 wi::to_widest (low));
4912 idx = wi::mul (idx, wi::to_widest
4913 (TYPE_SIZE (TREE_TYPE (*t))));
4914 widest_int ext
4915 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4916 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4918 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4919 TREE_TYPE (*t),
4920 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4921 TYPE_SIZE (TREE_TYPE (*t)),
4922 wide_int_to_tree (bitsizetype, idx));
4923 res = true;
4930 while (handled_component_p (*t))
4931 t = &TREE_OPERAND (*t, 0);
4933 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4934 of invariant addresses into a SSA name MEM_REF address. */
4935 if (TREE_CODE (*t) == MEM_REF
4936 || TREE_CODE (*t) == TARGET_MEM_REF)
4938 tree addr = TREE_OPERAND (*t, 0);
4939 if (TREE_CODE (addr) == ADDR_EXPR
4940 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4941 || handled_component_p (TREE_OPERAND (addr, 0))))
4943 tree base;
4944 poly_int64 coffset;
4945 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4946 &coffset);
4947 if (!base)
4949 if (is_debug)
4950 return false;
4951 gcc_unreachable ();
4954 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4955 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4956 TREE_OPERAND (*t, 1),
4957 size_int (coffset));
4958 res = true;
4960 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4961 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4964 /* Canonicalize back MEM_REFs to plain reference trees if the object
4965 accessed is a decl that has the same access semantics as the MEM_REF. */
4966 if (TREE_CODE (*t) == MEM_REF
4967 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4968 && integer_zerop (TREE_OPERAND (*t, 1))
4969 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4971 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4972 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4973 if (/* Same volatile qualification. */
4974 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4975 /* Same TBAA behavior with -fstrict-aliasing. */
4976 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4977 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4978 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4979 /* Same alignment. */
4980 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4981 /* We have to look out here to not drop a required conversion
4982 from the rhs to the lhs if *t appears on the lhs or vice-versa
4983 if it appears on the rhs. Thus require strict type
4984 compatibility. */
4985 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4987 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4988 res = true;
4992 else if (TREE_CODE (*orig_t) == ADDR_EXPR
4993 && TREE_CODE (*t) == MEM_REF
4994 && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
4996 tree base;
4997 poly_int64 coffset;
4998 base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
4999 &coffset);
5000 if (base)
5002 gcc_assert (TREE_CODE (base) == MEM_REF);
5003 poly_int64 moffset;
5004 if (mem_ref_offset (base).to_shwi (&moffset))
5006 coffset += moffset;
5007 if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
5009 coffset += moffset;
5010 *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
5011 return true;
5017 /* Canonicalize TARGET_MEM_REF in particular with respect to
5018 the indexes becoming constant. */
5019 else if (TREE_CODE (*t) == TARGET_MEM_REF)
5021 tree tem = maybe_fold_tmr (*t);
5022 if (tem)
5024 *t = tem;
5025 res = true;
5029 return res;
5032 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
5033 distinguishes both cases. */
5035 static bool
5036 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
5038 bool changed = false;
5039 gimple *stmt = gsi_stmt (*gsi);
5040 bool nowarning = gimple_no_warning_p (stmt);
5041 unsigned i;
5042 fold_defer_overflow_warnings ();
5044 /* First do required canonicalization of [TARGET_]MEM_REF addresses
5045 after propagation.
5046 ??? This shouldn't be done in generic folding but in the
5047 propagation helpers which also know whether an address was
5048 propagated.
5049 Also canonicalize operand order. */
5050 switch (gimple_code (stmt))
5052 case GIMPLE_ASSIGN:
5053 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
5055 tree *rhs = gimple_assign_rhs1_ptr (stmt);
5056 if ((REFERENCE_CLASS_P (*rhs)
5057 || TREE_CODE (*rhs) == ADDR_EXPR)
5058 && maybe_canonicalize_mem_ref_addr (rhs))
5059 changed = true;
5060 tree *lhs = gimple_assign_lhs_ptr (stmt);
5061 if (REFERENCE_CLASS_P (*lhs)
5062 && maybe_canonicalize_mem_ref_addr (lhs))
5063 changed = true;
5065 else
5067 /* Canonicalize operand order. */
5068 enum tree_code code = gimple_assign_rhs_code (stmt);
5069 if (TREE_CODE_CLASS (code) == tcc_comparison
5070 || commutative_tree_code (code)
5071 || commutative_ternary_tree_code (code))
5073 tree rhs1 = gimple_assign_rhs1 (stmt);
5074 tree rhs2 = gimple_assign_rhs2 (stmt);
5075 if (tree_swap_operands_p (rhs1, rhs2))
5077 gimple_assign_set_rhs1 (stmt, rhs2);
5078 gimple_assign_set_rhs2 (stmt, rhs1);
5079 if (TREE_CODE_CLASS (code) == tcc_comparison)
5080 gimple_assign_set_rhs_code (stmt,
5081 swap_tree_comparison (code));
5082 changed = true;
5086 break;
5087 case GIMPLE_CALL:
5089 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5091 tree *arg = gimple_call_arg_ptr (stmt, i);
5092 if (REFERENCE_CLASS_P (*arg)
5093 && maybe_canonicalize_mem_ref_addr (arg))
5094 changed = true;
5096 tree *lhs = gimple_call_lhs_ptr (stmt);
5097 if (*lhs
5098 && REFERENCE_CLASS_P (*lhs)
5099 && maybe_canonicalize_mem_ref_addr (lhs))
5100 changed = true;
5101 break;
5103 case GIMPLE_ASM:
5105 gasm *asm_stmt = as_a <gasm *> (stmt);
5106 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
5108 tree link = gimple_asm_output_op (asm_stmt, i);
5109 tree op = TREE_VALUE (link);
5110 if (REFERENCE_CLASS_P (op)
5111 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
5112 changed = true;
5114 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5116 tree link = gimple_asm_input_op (asm_stmt, i);
5117 tree op = TREE_VALUE (link);
5118 if ((REFERENCE_CLASS_P (op)
5119 || TREE_CODE (op) == ADDR_EXPR)
5120 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
5121 changed = true;
5124 break;
5125 case GIMPLE_DEBUG:
5126 if (gimple_debug_bind_p (stmt))
5128 tree *val = gimple_debug_bind_get_value_ptr (stmt);
5129 if (*val
5130 && (REFERENCE_CLASS_P (*val)
5131 || TREE_CODE (*val) == ADDR_EXPR)
5132 && maybe_canonicalize_mem_ref_addr (val, true))
5133 changed = true;
5135 break;
5136 case GIMPLE_COND:
5138 /* Canonicalize operand order. */
5139 tree lhs = gimple_cond_lhs (stmt);
5140 tree rhs = gimple_cond_rhs (stmt);
5141 if (tree_swap_operands_p (lhs, rhs))
5143 gcond *gc = as_a <gcond *> (stmt);
5144 gimple_cond_set_lhs (gc, rhs);
5145 gimple_cond_set_rhs (gc, lhs);
5146 gimple_cond_set_code (gc,
5147 swap_tree_comparison (gimple_cond_code (gc)));
5148 changed = true;
5151 default:;
5154 /* Dispatch to pattern-based folding. */
5155 if (!inplace
5156 || is_gimple_assign (stmt)
5157 || gimple_code (stmt) == GIMPLE_COND)
5159 gimple_seq seq = NULL;
5160 gimple_match_op res_op;
5161 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
5162 valueize, valueize))
5164 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
5165 changed = true;
5166 else
5167 gimple_seq_discard (seq);
5171 stmt = gsi_stmt (*gsi);
5173 /* Fold the main computation performed by the statement. */
5174 switch (gimple_code (stmt))
5176 case GIMPLE_ASSIGN:
5178 /* Try to canonicalize for boolean-typed X the comparisons
5179 X == 0, X == 1, X != 0, and X != 1. */
5180 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
5181 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5183 tree lhs = gimple_assign_lhs (stmt);
5184 tree op1 = gimple_assign_rhs1 (stmt);
5185 tree op2 = gimple_assign_rhs2 (stmt);
5186 tree type = TREE_TYPE (op1);
5188 /* Check whether the comparison operands are of the same boolean
5189 type as the result type is.
5190 Check that second operand is an integer-constant with value
5191 one or zero. */
5192 if (TREE_CODE (op2) == INTEGER_CST
5193 && (integer_zerop (op2) || integer_onep (op2))
5194 && useless_type_conversion_p (TREE_TYPE (lhs), type))
5196 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
5197 bool is_logical_not = false;
5199 /* X == 0 and X != 1 is a logical-not.of X
5200 X == 1 and X != 0 is X */
5201 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
5202 || (cmp_code == NE_EXPR && integer_onep (op2)))
5203 is_logical_not = true;
5205 if (is_logical_not == false)
5206 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
5207 /* Only for one-bit precision typed X the transformation
5208 !X -> ~X is valied. */
5209 else if (TYPE_PRECISION (type) == 1)
5210 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
5211 /* Otherwise we use !X -> X ^ 1. */
5212 else
5213 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
5214 build_int_cst (type, 1));
5215 changed = true;
5216 break;
5220 unsigned old_num_ops = gimple_num_ops (stmt);
5221 tree lhs = gimple_assign_lhs (stmt);
5222 tree new_rhs = fold_gimple_assign (gsi);
5223 if (new_rhs
5224 && !useless_type_conversion_p (TREE_TYPE (lhs),
5225 TREE_TYPE (new_rhs)))
5226 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
5227 if (new_rhs
5228 && (!inplace
5229 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
5231 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
5232 changed = true;
5234 break;
5237 case GIMPLE_CALL:
5238 changed |= gimple_fold_call (gsi, inplace);
5239 break;
5241 case GIMPLE_ASM:
5242 /* Fold *& in asm operands. */
5244 gasm *asm_stmt = as_a <gasm *> (stmt);
5245 size_t noutputs;
5246 const char **oconstraints;
5247 const char *constraint;
5248 bool allows_mem, allows_reg;
5250 noutputs = gimple_asm_noutputs (asm_stmt);
5251 oconstraints = XALLOCAVEC (const char *, noutputs);
5253 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
5255 tree link = gimple_asm_output_op (asm_stmt, i);
5256 tree op = TREE_VALUE (link);
5257 oconstraints[i]
5258 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5259 if (REFERENCE_CLASS_P (op)
5260 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
5262 TREE_VALUE (link) = op;
5263 changed = true;
5266 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5268 tree link = gimple_asm_input_op (asm_stmt, i);
5269 tree op = TREE_VALUE (link);
5270 constraint
5271 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5272 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5273 oconstraints, &allows_mem, &allows_reg);
5274 if (REFERENCE_CLASS_P (op)
5275 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
5276 != NULL_TREE)
5278 TREE_VALUE (link) = op;
5279 changed = true;
5283 break;
5285 case GIMPLE_DEBUG:
5286 if (gimple_debug_bind_p (stmt))
5288 tree val = gimple_debug_bind_get_value (stmt);
5289 if (val
5290 && REFERENCE_CLASS_P (val))
5292 tree tem = maybe_fold_reference (val, false);
5293 if (tem)
5295 gimple_debug_bind_set_value (stmt, tem);
5296 changed = true;
5299 else if (val
5300 && TREE_CODE (val) == ADDR_EXPR)
5302 tree ref = TREE_OPERAND (val, 0);
5303 tree tem = maybe_fold_reference (ref, false);
5304 if (tem)
5306 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
5307 gimple_debug_bind_set_value (stmt, tem);
5308 changed = true;
5312 break;
5314 case GIMPLE_RETURN:
5316 greturn *ret_stmt = as_a<greturn *> (stmt);
5317 tree ret = gimple_return_retval(ret_stmt);
5319 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
5321 tree val = valueize (ret);
5322 if (val && val != ret
5323 && may_propagate_copy (ret, val))
5325 gimple_return_set_retval (ret_stmt, val);
5326 changed = true;
5330 break;
5332 default:;
5335 stmt = gsi_stmt (*gsi);
5337 /* Fold *& on the lhs. */
5338 if (gimple_has_lhs (stmt))
5340 tree lhs = gimple_get_lhs (stmt);
5341 if (lhs && REFERENCE_CLASS_P (lhs))
5343 tree new_lhs = maybe_fold_reference (lhs, true);
5344 if (new_lhs)
5346 gimple_set_lhs (stmt, new_lhs);
5347 changed = true;
5352 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
5353 return changed;
5356 /* Valueziation callback that ends up not following SSA edges. */
5358 tree
5359 no_follow_ssa_edges (tree)
5361 return NULL_TREE;
5364 /* Valueization callback that ends up following single-use SSA edges only. */
5366 tree
5367 follow_single_use_edges (tree val)
5369 if (TREE_CODE (val) == SSA_NAME
5370 && !has_single_use (val))
5371 return NULL_TREE;
5372 return val;
5375 /* Valueization callback that follows all SSA edges. */
5377 tree
5378 follow_all_ssa_edges (tree val)
5380 return val;
5383 /* Fold the statement pointed to by GSI. In some cases, this function may
5384 replace the whole statement with a new one. Returns true iff folding
5385 makes any changes.
5386 The statement pointed to by GSI should be in valid gimple form but may
5387 be in unfolded state as resulting from for example constant propagation
5388 which can produce *&x = 0. */
5390 bool
5391 fold_stmt (gimple_stmt_iterator *gsi)
5393 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5396 bool
5397 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5399 return fold_stmt_1 (gsi, false, valueize);
5402 /* Perform the minimal folding on statement *GSI. Only operations like
5403 *&x created by constant propagation are handled. The statement cannot
5404 be replaced with a new one. Return true if the statement was
5405 changed, false otherwise.
5406 The statement *GSI should be in valid gimple form but may
5407 be in unfolded state as resulting from for example constant propagation
5408 which can produce *&x = 0. */
5410 bool
5411 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5413 gimple *stmt = gsi_stmt (*gsi);
5414 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5415 gcc_assert (gsi_stmt (*gsi) == stmt);
5416 return changed;
5419 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5420 if EXPR is null or we don't know how.
5421 If non-null, the result always has boolean type. */
5423 static tree
5424 canonicalize_bool (tree expr, bool invert)
5426 if (!expr)
5427 return NULL_TREE;
5428 else if (invert)
5430 if (integer_nonzerop (expr))
5431 return boolean_false_node;
5432 else if (integer_zerop (expr))
5433 return boolean_true_node;
5434 else if (TREE_CODE (expr) == SSA_NAME)
5435 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5436 build_int_cst (TREE_TYPE (expr), 0));
5437 else if (COMPARISON_CLASS_P (expr))
5438 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5439 boolean_type_node,
5440 TREE_OPERAND (expr, 0),
5441 TREE_OPERAND (expr, 1));
5442 else
5443 return NULL_TREE;
5445 else
5447 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5448 return expr;
5449 if (integer_nonzerop (expr))
5450 return boolean_true_node;
5451 else if (integer_zerop (expr))
5452 return boolean_false_node;
5453 else if (TREE_CODE (expr) == SSA_NAME)
5454 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5455 build_int_cst (TREE_TYPE (expr), 0));
5456 else if (COMPARISON_CLASS_P (expr))
5457 return fold_build2 (TREE_CODE (expr),
5458 boolean_type_node,
5459 TREE_OPERAND (expr, 0),
5460 TREE_OPERAND (expr, 1));
5461 else
5462 return NULL_TREE;
5466 /* Check to see if a boolean expression EXPR is logically equivalent to the
5467 comparison (OP1 CODE OP2). Check for various identities involving
5468 SSA_NAMEs. */
5470 static bool
5471 same_bool_comparison_p (const_tree expr, enum tree_code code,
5472 const_tree op1, const_tree op2)
5474 gimple *s;
5476 /* The obvious case. */
5477 if (TREE_CODE (expr) == code
5478 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5479 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5480 return true;
5482 /* Check for comparing (name, name != 0) and the case where expr
5483 is an SSA_NAME with a definition matching the comparison. */
5484 if (TREE_CODE (expr) == SSA_NAME
5485 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5487 if (operand_equal_p (expr, op1, 0))
5488 return ((code == NE_EXPR && integer_zerop (op2))
5489 || (code == EQ_EXPR && integer_nonzerop (op2)));
5490 s = SSA_NAME_DEF_STMT (expr);
5491 if (is_gimple_assign (s)
5492 && gimple_assign_rhs_code (s) == code
5493 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5494 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5495 return true;
5498 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5499 of name is a comparison, recurse. */
5500 if (TREE_CODE (op1) == SSA_NAME
5501 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5503 s = SSA_NAME_DEF_STMT (op1);
5504 if (is_gimple_assign (s)
5505 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5507 enum tree_code c = gimple_assign_rhs_code (s);
5508 if ((c == NE_EXPR && integer_zerop (op2))
5509 || (c == EQ_EXPR && integer_nonzerop (op2)))
5510 return same_bool_comparison_p (expr, c,
5511 gimple_assign_rhs1 (s),
5512 gimple_assign_rhs2 (s));
5513 if ((c == EQ_EXPR && integer_zerop (op2))
5514 || (c == NE_EXPR && integer_nonzerop (op2)))
5515 return same_bool_comparison_p (expr,
5516 invert_tree_comparison (c, false),
5517 gimple_assign_rhs1 (s),
5518 gimple_assign_rhs2 (s));
5521 return false;
5524 /* Check to see if two boolean expressions OP1 and OP2 are logically
5525 equivalent. */
5527 static bool
5528 same_bool_result_p (const_tree op1, const_tree op2)
5530 /* Simple cases first. */
5531 if (operand_equal_p (op1, op2, 0))
5532 return true;
5534 /* Check the cases where at least one of the operands is a comparison.
5535 These are a bit smarter than operand_equal_p in that they apply some
5536 identifies on SSA_NAMEs. */
5537 if (COMPARISON_CLASS_P (op2)
5538 && same_bool_comparison_p (op1, TREE_CODE (op2),
5539 TREE_OPERAND (op2, 0),
5540 TREE_OPERAND (op2, 1)))
5541 return true;
5542 if (COMPARISON_CLASS_P (op1)
5543 && same_bool_comparison_p (op2, TREE_CODE (op1),
5544 TREE_OPERAND (op1, 0),
5545 TREE_OPERAND (op1, 1)))
5546 return true;
5548 /* Default case. */
5549 return false;
5552 /* Forward declarations for some mutually recursive functions. */
5554 static tree
5555 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
5556 enum tree_code code2, tree op2a, tree op2b);
5557 static tree
5558 and_var_with_comparison (tree type, tree var, bool invert,
5559 enum tree_code code2, tree op2a, tree op2b);
5560 static tree
5561 and_var_with_comparison_1 (tree type, gimple *stmt,
5562 enum tree_code code2, tree op2a, tree op2b);
5563 static tree
5564 or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
5565 enum tree_code code2, tree op2a, tree op2b);
5566 static tree
5567 or_var_with_comparison (tree, tree var, bool invert,
5568 enum tree_code code2, tree op2a, tree op2b);
5569 static tree
5570 or_var_with_comparison_1 (tree, gimple *stmt,
5571 enum tree_code code2, tree op2a, tree op2b);
5573 /* Helper function for and_comparisons_1: try to simplify the AND of the
5574 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5575 If INVERT is true, invert the value of the VAR before doing the AND.
5576 Return NULL_EXPR if we can't simplify this to a single expression. */
5578 static tree
5579 and_var_with_comparison (tree type, tree var, bool invert,
5580 enum tree_code code2, tree op2a, tree op2b)
5582 tree t;
5583 gimple *stmt = SSA_NAME_DEF_STMT (var);
5585 /* We can only deal with variables whose definitions are assignments. */
5586 if (!is_gimple_assign (stmt))
5587 return NULL_TREE;
5589 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5590 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5591 Then we only have to consider the simpler non-inverted cases. */
5592 if (invert)
5593 t = or_var_with_comparison_1 (type, stmt,
5594 invert_tree_comparison (code2, false),
5595 op2a, op2b);
5596 else
5597 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
5598 return canonicalize_bool (t, invert);
5601 /* Try to simplify the AND of the ssa variable defined by the assignment
5602 STMT with the comparison specified by (OP2A CODE2 OP2B).
5603 Return NULL_EXPR if we can't simplify this to a single expression. */
5605 static tree
5606 and_var_with_comparison_1 (tree type, gimple *stmt,
5607 enum tree_code code2, tree op2a, tree op2b)
5609 tree var = gimple_assign_lhs (stmt);
5610 tree true_test_var = NULL_TREE;
5611 tree false_test_var = NULL_TREE;
5612 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5614 /* Check for identities like (var AND (var == 0)) => false. */
5615 if (TREE_CODE (op2a) == SSA_NAME
5616 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5618 if ((code2 == NE_EXPR && integer_zerop (op2b))
5619 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5621 true_test_var = op2a;
5622 if (var == true_test_var)
5623 return var;
5625 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5626 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5628 false_test_var = op2a;
5629 if (var == false_test_var)
5630 return boolean_false_node;
5634 /* If the definition is a comparison, recurse on it. */
5635 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5637 tree t = and_comparisons_1 (type, innercode,
5638 gimple_assign_rhs1 (stmt),
5639 gimple_assign_rhs2 (stmt),
5640 code2,
5641 op2a,
5642 op2b);
5643 if (t)
5644 return t;
5647 /* If the definition is an AND or OR expression, we may be able to
5648 simplify by reassociating. */
5649 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5650 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5652 tree inner1 = gimple_assign_rhs1 (stmt);
5653 tree inner2 = gimple_assign_rhs2 (stmt);
5654 gimple *s;
5655 tree t;
5656 tree partial = NULL_TREE;
5657 bool is_and = (innercode == BIT_AND_EXPR);
5659 /* Check for boolean identities that don't require recursive examination
5660 of inner1/inner2:
5661 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5662 inner1 AND (inner1 OR inner2) => inner1
5663 !inner1 AND (inner1 AND inner2) => false
5664 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5665 Likewise for similar cases involving inner2. */
5666 if (inner1 == true_test_var)
5667 return (is_and ? var : inner1);
5668 else if (inner2 == true_test_var)
5669 return (is_and ? var : inner2);
5670 else if (inner1 == false_test_var)
5671 return (is_and
5672 ? boolean_false_node
5673 : and_var_with_comparison (type, inner2, false, code2, op2a,
5674 op2b));
5675 else if (inner2 == false_test_var)
5676 return (is_and
5677 ? boolean_false_node
5678 : and_var_with_comparison (type, inner1, false, code2, op2a,
5679 op2b));
5681 /* Next, redistribute/reassociate the AND across the inner tests.
5682 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5683 if (TREE_CODE (inner1) == SSA_NAME
5684 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5685 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5686 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
5687 gimple_assign_rhs1 (s),
5688 gimple_assign_rhs2 (s),
5689 code2, op2a, op2b)))
5691 /* Handle the AND case, where we are reassociating:
5692 (inner1 AND inner2) AND (op2a code2 op2b)
5693 => (t AND inner2)
5694 If the partial result t is a constant, we win. Otherwise
5695 continue on to try reassociating with the other inner test. */
5696 if (is_and)
5698 if (integer_onep (t))
5699 return inner2;
5700 else if (integer_zerop (t))
5701 return boolean_false_node;
5704 /* Handle the OR case, where we are redistributing:
5705 (inner1 OR inner2) AND (op2a code2 op2b)
5706 => (t OR (inner2 AND (op2a code2 op2b))) */
5707 else if (integer_onep (t))
5708 return boolean_true_node;
5710 /* Save partial result for later. */
5711 partial = t;
5714 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5715 if (TREE_CODE (inner2) == SSA_NAME
5716 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5717 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5718 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
5719 gimple_assign_rhs1 (s),
5720 gimple_assign_rhs2 (s),
5721 code2, op2a, op2b)))
5723 /* Handle the AND case, where we are reassociating:
5724 (inner1 AND inner2) AND (op2a code2 op2b)
5725 => (inner1 AND t) */
5726 if (is_and)
5728 if (integer_onep (t))
5729 return inner1;
5730 else if (integer_zerop (t))
5731 return boolean_false_node;
5732 /* If both are the same, we can apply the identity
5733 (x AND x) == x. */
5734 else if (partial && same_bool_result_p (t, partial))
5735 return t;
5738 /* Handle the OR case. where we are redistributing:
5739 (inner1 OR inner2) AND (op2a code2 op2b)
5740 => (t OR (inner1 AND (op2a code2 op2b)))
5741 => (t OR partial) */
5742 else
5744 if (integer_onep (t))
5745 return boolean_true_node;
5746 else if (partial)
5748 /* We already got a simplification for the other
5749 operand to the redistributed OR expression. The
5750 interesting case is when at least one is false.
5751 Or, if both are the same, we can apply the identity
5752 (x OR x) == x. */
5753 if (integer_zerop (partial))
5754 return t;
5755 else if (integer_zerop (t))
5756 return partial;
5757 else if (same_bool_result_p (t, partial))
5758 return t;
5763 return NULL_TREE;
5766 /* Try to simplify the AND of two comparisons defined by
5767 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5768 If this can be done without constructing an intermediate value,
5769 return the resulting tree; otherwise NULL_TREE is returned.
5770 This function is deliberately asymmetric as it recurses on SSA_DEFs
5771 in the first comparison but not the second. */
5773 static tree
5774 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
5775 enum tree_code code2, tree op2a, tree op2b)
5777 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5779 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5780 if (operand_equal_p (op1a, op2a, 0)
5781 && operand_equal_p (op1b, op2b, 0))
5783 /* Result will be either NULL_TREE, or a combined comparison. */
5784 tree t = combine_comparisons (UNKNOWN_LOCATION,
5785 TRUTH_ANDIF_EXPR, code1, code2,
5786 truth_type, op1a, op1b);
5787 if (t)
5788 return t;
5791 /* Likewise the swapped case of the above. */
5792 if (operand_equal_p (op1a, op2b, 0)
5793 && operand_equal_p (op1b, op2a, 0))
5795 /* Result will be either NULL_TREE, or a combined comparison. */
5796 tree t = combine_comparisons (UNKNOWN_LOCATION,
5797 TRUTH_ANDIF_EXPR, code1,
5798 swap_tree_comparison (code2),
5799 truth_type, op1a, op1b);
5800 if (t)
5801 return t;
5804 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5805 NAME's definition is a truth value. See if there are any simplifications
5806 that can be done against the NAME's definition. */
5807 if (TREE_CODE (op1a) == SSA_NAME
5808 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5809 && (integer_zerop (op1b) || integer_onep (op1b)))
5811 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5812 || (code1 == NE_EXPR && integer_onep (op1b)));
5813 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5814 switch (gimple_code (stmt))
5816 case GIMPLE_ASSIGN:
5817 /* Try to simplify by copy-propagating the definition. */
5818 return and_var_with_comparison (type, op1a, invert, code2, op2a,
5819 op2b);
5821 case GIMPLE_PHI:
5822 /* If every argument to the PHI produces the same result when
5823 ANDed with the second comparison, we win.
5824 Do not do this unless the type is bool since we need a bool
5825 result here anyway. */
5826 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5828 tree result = NULL_TREE;
5829 unsigned i;
5830 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5832 tree arg = gimple_phi_arg_def (stmt, i);
5834 /* If this PHI has itself as an argument, ignore it.
5835 If all the other args produce the same result,
5836 we're still OK. */
5837 if (arg == gimple_phi_result (stmt))
5838 continue;
5839 else if (TREE_CODE (arg) == INTEGER_CST)
5841 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5843 if (!result)
5844 result = boolean_false_node;
5845 else if (!integer_zerop (result))
5846 return NULL_TREE;
5848 else if (!result)
5849 result = fold_build2 (code2, boolean_type_node,
5850 op2a, op2b);
5851 else if (!same_bool_comparison_p (result,
5852 code2, op2a, op2b))
5853 return NULL_TREE;
5855 else if (TREE_CODE (arg) == SSA_NAME
5856 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5858 tree temp;
5859 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5860 /* In simple cases we can look through PHI nodes,
5861 but we have to be careful with loops.
5862 See PR49073. */
5863 if (! dom_info_available_p (CDI_DOMINATORS)
5864 || gimple_bb (def_stmt) == gimple_bb (stmt)
5865 || dominated_by_p (CDI_DOMINATORS,
5866 gimple_bb (def_stmt),
5867 gimple_bb (stmt)))
5868 return NULL_TREE;
5869 temp = and_var_with_comparison (type, arg, invert, code2,
5870 op2a, op2b);
5871 if (!temp)
5872 return NULL_TREE;
5873 else if (!result)
5874 result = temp;
5875 else if (!same_bool_result_p (result, temp))
5876 return NULL_TREE;
5878 else
5879 return NULL_TREE;
5881 return result;
5884 default:
5885 break;
5888 return NULL_TREE;
5891 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
5892 : try to simplify the AND/OR of the ssa variable VAR with the comparison
5893 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
5894 simplify this to a single expression. As we are going to lower the cost
5895 of building SSA names / gimple stmts significantly, we need to allocate
5896 them ont the stack. This will cause the code to be a bit ugly. */
5898 static tree
5899 maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
5900 enum tree_code code1,
5901 tree op1a, tree op1b,
5902 enum tree_code code2, tree op2a,
5903 tree op2b)
5905 /* Allocate gimple stmt1 on the stack. */
5906 gassign *stmt1
5907 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
5908 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
5909 gimple_assign_set_rhs_code (stmt1, code1);
5910 gimple_assign_set_rhs1 (stmt1, op1a);
5911 gimple_assign_set_rhs2 (stmt1, op1b);
5913 /* Allocate gimple stmt2 on the stack. */
5914 gassign *stmt2
5915 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
5916 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
5917 gimple_assign_set_rhs_code (stmt2, code2);
5918 gimple_assign_set_rhs1 (stmt2, op2a);
5919 gimple_assign_set_rhs2 (stmt2, op2b);
5921 /* Allocate SSA names(lhs1) on the stack. */
5922 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
5923 memset (lhs1, 0, sizeof (tree_ssa_name));
5924 TREE_SET_CODE (lhs1, SSA_NAME);
5925 TREE_TYPE (lhs1) = type;
5926 init_ssa_name_imm_use (lhs1);
5928 /* Allocate SSA names(lhs2) on the stack. */
5929 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
5930 memset (lhs2, 0, sizeof (tree_ssa_name));
5931 TREE_SET_CODE (lhs2, SSA_NAME);
5932 TREE_TYPE (lhs2) = type;
5933 init_ssa_name_imm_use (lhs2);
5935 gimple_assign_set_lhs (stmt1, lhs1);
5936 gimple_assign_set_lhs (stmt2, lhs2);
5938 gimple_match_op op (gimple_match_cond::UNCOND, code,
5939 type, gimple_assign_lhs (stmt1),
5940 gimple_assign_lhs (stmt2));
5941 if (op.resimplify (NULL, follow_all_ssa_edges))
5943 if (gimple_simplified_result_is_gimple_val (&op))
5945 tree res = op.ops[0];
5946 if (res == lhs1)
5947 return build2 (code1, type, op1a, op1b);
5948 else if (res == lhs2)
5949 return build2 (code2, type, op2a, op2b);
5950 else
5951 return res;
5953 else if (op.code.is_tree_code ()
5954 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
5956 tree op0 = op.ops[0];
5957 tree op1 = op.ops[1];
5958 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
5959 return NULL_TREE; /* not simple */
5961 return build2 ((enum tree_code)op.code, op.type, op0, op1);
5965 return NULL_TREE;
5968 /* Try to simplify the AND of two comparisons, specified by
5969 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5970 If this can be simplified to a single expression (without requiring
5971 introducing more SSA variables to hold intermediate values),
5972 return the resulting tree. Otherwise return NULL_TREE.
5973 If the result expression is non-null, it has boolean type. */
5975 tree
5976 maybe_fold_and_comparisons (tree type,
5977 enum tree_code code1, tree op1a, tree op1b,
5978 enum tree_code code2, tree op2a, tree op2b)
5980 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
5981 return t;
5983 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
5984 return t;
5986 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
5987 op1a, op1b, code2, op2a,
5988 op2b))
5989 return t;
5991 return NULL_TREE;
5994 /* Helper function for or_comparisons_1: try to simplify the OR of the
5995 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5996 If INVERT is true, invert the value of VAR before doing the OR.
5997 Return NULL_EXPR if we can't simplify this to a single expression. */
5999 static tree
6000 or_var_with_comparison (tree type, tree var, bool invert,
6001 enum tree_code code2, tree op2a, tree op2b)
6003 tree t;
6004 gimple *stmt = SSA_NAME_DEF_STMT (var);
6006 /* We can only deal with variables whose definitions are assignments. */
6007 if (!is_gimple_assign (stmt))
6008 return NULL_TREE;
6010 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6011 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
6012 Then we only have to consider the simpler non-inverted cases. */
6013 if (invert)
6014 t = and_var_with_comparison_1 (type, stmt,
6015 invert_tree_comparison (code2, false),
6016 op2a, op2b);
6017 else
6018 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
6019 return canonicalize_bool (t, invert);
6022 /* Try to simplify the OR of the ssa variable defined by the assignment
6023 STMT with the comparison specified by (OP2A CODE2 OP2B).
6024 Return NULL_EXPR if we can't simplify this to a single expression. */
6026 static tree
6027 or_var_with_comparison_1 (tree type, gimple *stmt,
6028 enum tree_code code2, tree op2a, tree op2b)
6030 tree var = gimple_assign_lhs (stmt);
6031 tree true_test_var = NULL_TREE;
6032 tree false_test_var = NULL_TREE;
6033 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6035 /* Check for identities like (var OR (var != 0)) => true . */
6036 if (TREE_CODE (op2a) == SSA_NAME
6037 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6039 if ((code2 == NE_EXPR && integer_zerop (op2b))
6040 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6042 true_test_var = op2a;
6043 if (var == true_test_var)
6044 return var;
6046 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6047 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6049 false_test_var = op2a;
6050 if (var == false_test_var)
6051 return boolean_true_node;
6055 /* If the definition is a comparison, recurse on it. */
6056 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6058 tree t = or_comparisons_1 (type, innercode,
6059 gimple_assign_rhs1 (stmt),
6060 gimple_assign_rhs2 (stmt),
6061 code2,
6062 op2a,
6063 op2b);
6064 if (t)
6065 return t;
6068 /* If the definition is an AND or OR expression, we may be able to
6069 simplify by reassociating. */
6070 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6071 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
6073 tree inner1 = gimple_assign_rhs1 (stmt);
6074 tree inner2 = gimple_assign_rhs2 (stmt);
6075 gimple *s;
6076 tree t;
6077 tree partial = NULL_TREE;
6078 bool is_or = (innercode == BIT_IOR_EXPR);
6080 /* Check for boolean identities that don't require recursive examination
6081 of inner1/inner2:
6082 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
6083 inner1 OR (inner1 AND inner2) => inner1
6084 !inner1 OR (inner1 OR inner2) => true
6085 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
6087 if (inner1 == true_test_var)
6088 return (is_or ? var : inner1);
6089 else if (inner2 == true_test_var)
6090 return (is_or ? var : inner2);
6091 else if (inner1 == false_test_var)
6092 return (is_or
6093 ? boolean_true_node
6094 : or_var_with_comparison (type, inner2, false, code2, op2a,
6095 op2b));
6096 else if (inner2 == false_test_var)
6097 return (is_or
6098 ? boolean_true_node
6099 : or_var_with_comparison (type, inner1, false, code2, op2a,
6100 op2b));
6102 /* Next, redistribute/reassociate the OR across the inner tests.
6103 Compute the first partial result, (inner1 OR (op2a code op2b)) */
6104 if (TREE_CODE (inner1) == SSA_NAME
6105 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6106 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6107 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
6108 gimple_assign_rhs1 (s),
6109 gimple_assign_rhs2 (s),
6110 code2, op2a, op2b)))
6112 /* Handle the OR case, where we are reassociating:
6113 (inner1 OR inner2) OR (op2a code2 op2b)
6114 => (t OR inner2)
6115 If the partial result t is a constant, we win. Otherwise
6116 continue on to try reassociating with the other inner test. */
6117 if (is_or)
6119 if (integer_onep (t))
6120 return boolean_true_node;
6121 else if (integer_zerop (t))
6122 return inner2;
6125 /* Handle the AND case, where we are redistributing:
6126 (inner1 AND inner2) OR (op2a code2 op2b)
6127 => (t AND (inner2 OR (op2a code op2b))) */
6128 else if (integer_zerop (t))
6129 return boolean_false_node;
6131 /* Save partial result for later. */
6132 partial = t;
6135 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
6136 if (TREE_CODE (inner2) == SSA_NAME
6137 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6138 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6139 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
6140 gimple_assign_rhs1 (s),
6141 gimple_assign_rhs2 (s),
6142 code2, op2a, op2b)))
6144 /* Handle the OR case, where we are reassociating:
6145 (inner1 OR inner2) OR (op2a code2 op2b)
6146 => (inner1 OR t)
6147 => (t OR partial) */
6148 if (is_or)
6150 if (integer_zerop (t))
6151 return inner1;
6152 else if (integer_onep (t))
6153 return boolean_true_node;
6154 /* If both are the same, we can apply the identity
6155 (x OR x) == x. */
6156 else if (partial && same_bool_result_p (t, partial))
6157 return t;
6160 /* Handle the AND case, where we are redistributing:
6161 (inner1 AND inner2) OR (op2a code2 op2b)
6162 => (t AND (inner1 OR (op2a code2 op2b)))
6163 => (t AND partial) */
6164 else
6166 if (integer_zerop (t))
6167 return boolean_false_node;
6168 else if (partial)
6170 /* We already got a simplification for the other
6171 operand to the redistributed AND expression. The
6172 interesting case is when at least one is true.
6173 Or, if both are the same, we can apply the identity
6174 (x AND x) == x. */
6175 if (integer_onep (partial))
6176 return t;
6177 else if (integer_onep (t))
6178 return partial;
6179 else if (same_bool_result_p (t, partial))
6180 return t;
6185 return NULL_TREE;
6188 /* Try to simplify the OR of two comparisons defined by
6189 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6190 If this can be done without constructing an intermediate value,
6191 return the resulting tree; otherwise NULL_TREE is returned.
6192 This function is deliberately asymmetric as it recurses on SSA_DEFs
6193 in the first comparison but not the second. */
6195 static tree
6196 or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6197 enum tree_code code2, tree op2a, tree op2b)
6199 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6201 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
6202 if (operand_equal_p (op1a, op2a, 0)
6203 && operand_equal_p (op1b, op2b, 0))
6205 /* Result will be either NULL_TREE, or a combined comparison. */
6206 tree t = combine_comparisons (UNKNOWN_LOCATION,
6207 TRUTH_ORIF_EXPR, code1, code2,
6208 truth_type, op1a, op1b);
6209 if (t)
6210 return t;
6213 /* Likewise the swapped case of the above. */
6214 if (operand_equal_p (op1a, op2b, 0)
6215 && operand_equal_p (op1b, op2a, 0))
6217 /* Result will be either NULL_TREE, or a combined comparison. */
6218 tree t = combine_comparisons (UNKNOWN_LOCATION,
6219 TRUTH_ORIF_EXPR, code1,
6220 swap_tree_comparison (code2),
6221 truth_type, op1a, op1b);
6222 if (t)
6223 return t;
6226 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6227 NAME's definition is a truth value. See if there are any simplifications
6228 that can be done against the NAME's definition. */
6229 if (TREE_CODE (op1a) == SSA_NAME
6230 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6231 && (integer_zerop (op1b) || integer_onep (op1b)))
6233 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6234 || (code1 == NE_EXPR && integer_onep (op1b)));
6235 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6236 switch (gimple_code (stmt))
6238 case GIMPLE_ASSIGN:
6239 /* Try to simplify by copy-propagating the definition. */
6240 return or_var_with_comparison (type, op1a, invert, code2, op2a,
6241 op2b);
6243 case GIMPLE_PHI:
6244 /* If every argument to the PHI produces the same result when
6245 ORed with the second comparison, we win.
6246 Do not do this unless the type is bool since we need a bool
6247 result here anyway. */
6248 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6250 tree result = NULL_TREE;
6251 unsigned i;
6252 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6254 tree arg = gimple_phi_arg_def (stmt, i);
6256 /* If this PHI has itself as an argument, ignore it.
6257 If all the other args produce the same result,
6258 we're still OK. */
6259 if (arg == gimple_phi_result (stmt))
6260 continue;
6261 else if (TREE_CODE (arg) == INTEGER_CST)
6263 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6265 if (!result)
6266 result = boolean_true_node;
6267 else if (!integer_onep (result))
6268 return NULL_TREE;
6270 else if (!result)
6271 result = fold_build2 (code2, boolean_type_node,
6272 op2a, op2b);
6273 else if (!same_bool_comparison_p (result,
6274 code2, op2a, op2b))
6275 return NULL_TREE;
6277 else if (TREE_CODE (arg) == SSA_NAME
6278 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6280 tree temp;
6281 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6282 /* In simple cases we can look through PHI nodes,
6283 but we have to be careful with loops.
6284 See PR49073. */
6285 if (! dom_info_available_p (CDI_DOMINATORS)
6286 || gimple_bb (def_stmt) == gimple_bb (stmt)
6287 || dominated_by_p (CDI_DOMINATORS,
6288 gimple_bb (def_stmt),
6289 gimple_bb (stmt)))
6290 return NULL_TREE;
6291 temp = or_var_with_comparison (type, arg, invert, code2,
6292 op2a, op2b);
6293 if (!temp)
6294 return NULL_TREE;
6295 else if (!result)
6296 result = temp;
6297 else if (!same_bool_result_p (result, temp))
6298 return NULL_TREE;
6300 else
6301 return NULL_TREE;
6303 return result;
6306 default:
6307 break;
6310 return NULL_TREE;
6313 /* Try to simplify the OR of two comparisons, specified by
6314 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6315 If this can be simplified to a single expression (without requiring
6316 introducing more SSA variables to hold intermediate values),
6317 return the resulting tree. Otherwise return NULL_TREE.
6318 If the result expression is non-null, it has boolean type. */
6320 tree
6321 maybe_fold_or_comparisons (tree type,
6322 enum tree_code code1, tree op1a, tree op1b,
6323 enum tree_code code2, tree op2a, tree op2b)
6325 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
6326 return t;
6328 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
6329 return t;
6331 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
6332 op1a, op1b, code2, op2a,
6333 op2b))
6334 return t;
6336 return NULL_TREE;
6339 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6341 Either NULL_TREE, a simplified but non-constant or a constant
6342 is returned.
6344 ??? This should go into a gimple-fold-inline.h file to be eventually
6345 privatized with the single valueize function used in the various TUs
6346 to avoid the indirect function call overhead. */
6348 tree
6349 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6350 tree (*gvalueize) (tree))
6352 gimple_match_op res_op;
6353 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6354 edges if there are intermediate VARYING defs. For this reason
6355 do not follow SSA edges here even though SCCVN can technically
6356 just deal fine with that. */
6357 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6359 tree res = NULL_TREE;
6360 if (gimple_simplified_result_is_gimple_val (&res_op))
6361 res = res_op.ops[0];
6362 else if (mprts_hook)
6363 res = mprts_hook (&res_op);
6364 if (res)
6366 if (dump_file && dump_flags & TDF_DETAILS)
6368 fprintf (dump_file, "Match-and-simplified ");
6369 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6370 fprintf (dump_file, " to ");
6371 print_generic_expr (dump_file, res);
6372 fprintf (dump_file, "\n");
6374 return res;
6378 location_t loc = gimple_location (stmt);
6379 switch (gimple_code (stmt))
6381 case GIMPLE_ASSIGN:
6383 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6385 switch (get_gimple_rhs_class (subcode))
6387 case GIMPLE_SINGLE_RHS:
6389 tree rhs = gimple_assign_rhs1 (stmt);
6390 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6392 if (TREE_CODE (rhs) == SSA_NAME)
6394 /* If the RHS is an SSA_NAME, return its known constant value,
6395 if any. */
6396 return (*valueize) (rhs);
6398 /* Handle propagating invariant addresses into address
6399 operations. */
6400 else if (TREE_CODE (rhs) == ADDR_EXPR
6401 && !is_gimple_min_invariant (rhs))
6403 poly_int64 offset = 0;
6404 tree base;
6405 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6406 &offset,
6407 valueize);
6408 if (base
6409 && (CONSTANT_CLASS_P (base)
6410 || decl_address_invariant_p (base)))
6411 return build_invariant_address (TREE_TYPE (rhs),
6412 base, offset);
6414 else if (TREE_CODE (rhs) == CONSTRUCTOR
6415 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6416 && known_eq (CONSTRUCTOR_NELTS (rhs),
6417 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6419 unsigned i, nelts;
6420 tree val;
6422 nelts = CONSTRUCTOR_NELTS (rhs);
6423 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6424 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6426 val = (*valueize) (val);
6427 if (TREE_CODE (val) == INTEGER_CST
6428 || TREE_CODE (val) == REAL_CST
6429 || TREE_CODE (val) == FIXED_CST)
6430 vec.quick_push (val);
6431 else
6432 return NULL_TREE;
6435 return vec.build ();
6437 if (subcode == OBJ_TYPE_REF)
6439 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6440 /* If callee is constant, we can fold away the wrapper. */
6441 if (is_gimple_min_invariant (val))
6442 return val;
6445 if (kind == tcc_reference)
6447 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6448 || TREE_CODE (rhs) == REALPART_EXPR
6449 || TREE_CODE (rhs) == IMAGPART_EXPR)
6450 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6452 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6453 return fold_unary_loc (EXPR_LOCATION (rhs),
6454 TREE_CODE (rhs),
6455 TREE_TYPE (rhs), val);
6457 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6458 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6460 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6461 return fold_ternary_loc (EXPR_LOCATION (rhs),
6462 TREE_CODE (rhs),
6463 TREE_TYPE (rhs), val,
6464 TREE_OPERAND (rhs, 1),
6465 TREE_OPERAND (rhs, 2));
6467 else if (TREE_CODE (rhs) == MEM_REF
6468 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6470 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6471 if (TREE_CODE (val) == ADDR_EXPR
6472 && is_gimple_min_invariant (val))
6474 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6475 unshare_expr (val),
6476 TREE_OPERAND (rhs, 1));
6477 if (tem)
6478 rhs = tem;
6481 return fold_const_aggregate_ref_1 (rhs, valueize);
6483 else if (kind == tcc_declaration)
6484 return get_symbol_constant_value (rhs);
6485 return rhs;
6488 case GIMPLE_UNARY_RHS:
6489 return NULL_TREE;
6491 case GIMPLE_BINARY_RHS:
6492 /* Translate &x + CST into an invariant form suitable for
6493 further propagation. */
6494 if (subcode == POINTER_PLUS_EXPR)
6496 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6497 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6498 if (TREE_CODE (op0) == ADDR_EXPR
6499 && TREE_CODE (op1) == INTEGER_CST)
6501 tree off = fold_convert (ptr_type_node, op1);
6502 return build1_loc
6503 (loc, ADDR_EXPR, TREE_TYPE (op0),
6504 fold_build2 (MEM_REF,
6505 TREE_TYPE (TREE_TYPE (op0)),
6506 unshare_expr (op0), off));
6509 /* Canonicalize bool != 0 and bool == 0 appearing after
6510 valueization. While gimple_simplify handles this
6511 it can get confused by the ~X == 1 -> X == 0 transform
6512 which we cant reduce to a SSA name or a constant
6513 (and we have no way to tell gimple_simplify to not
6514 consider those transforms in the first place). */
6515 else if (subcode == EQ_EXPR
6516 || subcode == NE_EXPR)
6518 tree lhs = gimple_assign_lhs (stmt);
6519 tree op0 = gimple_assign_rhs1 (stmt);
6520 if (useless_type_conversion_p (TREE_TYPE (lhs),
6521 TREE_TYPE (op0)))
6523 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6524 op0 = (*valueize) (op0);
6525 if (TREE_CODE (op0) == INTEGER_CST)
6526 std::swap (op0, op1);
6527 if (TREE_CODE (op1) == INTEGER_CST
6528 && ((subcode == NE_EXPR && integer_zerop (op1))
6529 || (subcode == EQ_EXPR && integer_onep (op1))))
6530 return op0;
6533 return NULL_TREE;
6535 case GIMPLE_TERNARY_RHS:
6537 /* Handle ternary operators that can appear in GIMPLE form. */
6538 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6539 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6540 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6541 return fold_ternary_loc (loc, subcode,
6542 gimple_expr_type (stmt), op0, op1, op2);
6545 default:
6546 gcc_unreachable ();
6550 case GIMPLE_CALL:
6552 tree fn;
6553 gcall *call_stmt = as_a <gcall *> (stmt);
6555 if (gimple_call_internal_p (stmt))
6557 enum tree_code subcode = ERROR_MARK;
6558 switch (gimple_call_internal_fn (stmt))
6560 case IFN_UBSAN_CHECK_ADD:
6561 subcode = PLUS_EXPR;
6562 break;
6563 case IFN_UBSAN_CHECK_SUB:
6564 subcode = MINUS_EXPR;
6565 break;
6566 case IFN_UBSAN_CHECK_MUL:
6567 subcode = MULT_EXPR;
6568 break;
6569 case IFN_BUILTIN_EXPECT:
6571 tree arg0 = gimple_call_arg (stmt, 0);
6572 tree op0 = (*valueize) (arg0);
6573 if (TREE_CODE (op0) == INTEGER_CST)
6574 return op0;
6575 return NULL_TREE;
6577 default:
6578 return NULL_TREE;
6580 tree arg0 = gimple_call_arg (stmt, 0);
6581 tree arg1 = gimple_call_arg (stmt, 1);
6582 tree op0 = (*valueize) (arg0);
6583 tree op1 = (*valueize) (arg1);
6585 if (TREE_CODE (op0) != INTEGER_CST
6586 || TREE_CODE (op1) != INTEGER_CST)
6588 switch (subcode)
6590 case MULT_EXPR:
6591 /* x * 0 = 0 * x = 0 without overflow. */
6592 if (integer_zerop (op0) || integer_zerop (op1))
6593 return build_zero_cst (TREE_TYPE (arg0));
6594 break;
6595 case MINUS_EXPR:
6596 /* y - y = 0 without overflow. */
6597 if (operand_equal_p (op0, op1, 0))
6598 return build_zero_cst (TREE_TYPE (arg0));
6599 break;
6600 default:
6601 break;
6604 tree res
6605 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6606 if (res
6607 && TREE_CODE (res) == INTEGER_CST
6608 && !TREE_OVERFLOW (res))
6609 return res;
6610 return NULL_TREE;
6613 fn = (*valueize) (gimple_call_fn (stmt));
6614 if (TREE_CODE (fn) == ADDR_EXPR
6615 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6616 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
6617 && gimple_builtin_call_types_compatible_p (stmt,
6618 TREE_OPERAND (fn, 0)))
6620 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6621 tree retval;
6622 unsigned i;
6623 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6624 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6625 retval = fold_builtin_call_array (loc,
6626 gimple_call_return_type (call_stmt),
6627 fn, gimple_call_num_args (stmt), args);
6628 if (retval)
6630 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6631 STRIP_NOPS (retval);
6632 retval = fold_convert (gimple_call_return_type (call_stmt),
6633 retval);
6635 return retval;
6637 return NULL_TREE;
6640 default:
6641 return NULL_TREE;
6645 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6646 Returns NULL_TREE if folding to a constant is not possible, otherwise
6647 returns a constant according to is_gimple_min_invariant. */
6649 tree
6650 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6652 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6653 if (res && is_gimple_min_invariant (res))
6654 return res;
6655 return NULL_TREE;
6659 /* The following set of functions are supposed to fold references using
6660 their constant initializers. */
6662 /* See if we can find constructor defining value of BASE.
6663 When we know the consructor with constant offset (such as
6664 base is array[40] and we do know constructor of array), then
6665 BIT_OFFSET is adjusted accordingly.
6667 As a special case, return error_mark_node when constructor
6668 is not explicitly available, but it is known to be zero
6669 such as 'static const int a;'. */
6670 static tree
6671 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6672 tree (*valueize)(tree))
6674 poly_int64 bit_offset2, size, max_size;
6675 bool reverse;
6677 if (TREE_CODE (base) == MEM_REF)
6679 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6680 if (!boff.to_shwi (bit_offset))
6681 return NULL_TREE;
6683 if (valueize
6684 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6685 base = valueize (TREE_OPERAND (base, 0));
6686 if (!base || TREE_CODE (base) != ADDR_EXPR)
6687 return NULL_TREE;
6688 base = TREE_OPERAND (base, 0);
6690 else if (valueize
6691 && TREE_CODE (base) == SSA_NAME)
6692 base = valueize (base);
6694 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6695 DECL_INITIAL. If BASE is a nested reference into another
6696 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6697 the inner reference. */
6698 switch (TREE_CODE (base))
6700 case VAR_DECL:
6701 case CONST_DECL:
6703 tree init = ctor_for_folding (base);
6705 /* Our semantic is exact opposite of ctor_for_folding;
6706 NULL means unknown, while error_mark_node is 0. */
6707 if (init == error_mark_node)
6708 return NULL_TREE;
6709 if (!init)
6710 return error_mark_node;
6711 return init;
6714 case VIEW_CONVERT_EXPR:
6715 return get_base_constructor (TREE_OPERAND (base, 0),
6716 bit_offset, valueize);
6718 case ARRAY_REF:
6719 case COMPONENT_REF:
6720 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6721 &reverse);
6722 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6723 return NULL_TREE;
6724 *bit_offset += bit_offset2;
6725 return get_base_constructor (base, bit_offset, valueize);
6727 case CONSTRUCTOR:
6728 return base;
6730 default:
6731 if (CONSTANT_CLASS_P (base))
6732 return base;
6734 return NULL_TREE;
6738 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6739 to the memory at bit OFFSET. When non-null, TYPE is the expected
6740 type of the reference; otherwise the type of the referenced element
6741 is used instead. When SIZE is zero, attempt to fold a reference to
6742 the entire element which OFFSET refers to. Increment *SUBOFF by
6743 the bit offset of the accessed element. */
6745 static tree
6746 fold_array_ctor_reference (tree type, tree ctor,
6747 unsigned HOST_WIDE_INT offset,
6748 unsigned HOST_WIDE_INT size,
6749 tree from_decl,
6750 unsigned HOST_WIDE_INT *suboff)
6752 offset_int low_bound;
6753 offset_int elt_size;
6754 offset_int access_index;
6755 tree domain_type = NULL_TREE;
6756 HOST_WIDE_INT inner_offset;
6758 /* Compute low bound and elt size. */
6759 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6760 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6761 if (domain_type && TYPE_MIN_VALUE (domain_type))
6763 /* Static constructors for variably sized objects make no sense. */
6764 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6765 return NULL_TREE;
6766 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6768 else
6769 low_bound = 0;
6770 /* Static constructors for variably sized objects make no sense. */
6771 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6772 return NULL_TREE;
6773 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6775 /* When TYPE is non-null, verify that it specifies a constant-sized
6776 access of a multiple of the array element size. Avoid division
6777 by zero below when ELT_SIZE is zero, such as with the result of
6778 an initializer for a zero-length array or an empty struct. */
6779 if (elt_size == 0
6780 || (type
6781 && (!TYPE_SIZE_UNIT (type)
6782 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
6783 return NULL_TREE;
6785 /* Compute the array index we look for. */
6786 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6787 elt_size);
6788 access_index += low_bound;
6790 /* And offset within the access. */
6791 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6793 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
6794 if (size > elt_sz * BITS_PER_UNIT)
6796 /* native_encode_expr constraints. */
6797 if (size > MAX_BITSIZE_MODE_ANY_MODE
6798 || size % BITS_PER_UNIT != 0
6799 || inner_offset % BITS_PER_UNIT != 0
6800 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
6801 return NULL_TREE;
6803 unsigned ctor_idx;
6804 tree val = get_array_ctor_element_at_index (ctor, access_index,
6805 &ctor_idx);
6806 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
6807 return build_zero_cst (type);
6809 /* native-encode adjacent ctor elements. */
6810 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6811 unsigned bufoff = 0;
6812 offset_int index = 0;
6813 offset_int max_index = access_index;
6814 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
6815 if (!val)
6816 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6817 else if (!CONSTANT_CLASS_P (val))
6818 return NULL_TREE;
6819 if (!elt->index)
6821 else if (TREE_CODE (elt->index) == RANGE_EXPR)
6823 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
6824 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
6826 else
6827 index = max_index = wi::to_offset (elt->index);
6828 index = wi::umax (index, access_index);
6831 if (bufoff + elt_sz > sizeof (buf))
6832 elt_sz = sizeof (buf) - bufoff;
6833 int len = native_encode_expr (val, buf + bufoff, elt_sz,
6834 inner_offset / BITS_PER_UNIT);
6835 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
6836 return NULL_TREE;
6837 inner_offset = 0;
6838 bufoff += len;
6840 access_index += 1;
6841 if (wi::cmpu (access_index, index) == 0)
6842 val = elt->value;
6843 else if (wi::cmpu (access_index, max_index) > 0)
6845 ctor_idx++;
6846 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
6848 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6849 ++max_index;
6851 else
6853 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
6854 index = 0;
6855 max_index = access_index;
6856 if (!elt->index)
6858 else if (TREE_CODE (elt->index) == RANGE_EXPR)
6860 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
6861 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
6863 else
6864 index = max_index = wi::to_offset (elt->index);
6865 index = wi::umax (index, access_index);
6866 if (wi::cmpu (access_index, index) == 0)
6867 val = elt->value;
6868 else
6869 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6873 while (bufoff < size / BITS_PER_UNIT);
6874 *suboff += size;
6875 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
6878 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6880 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6882 /* For the final reference to the entire accessed element
6883 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6884 may be null) in favor of the type of the element, and set
6885 SIZE to the size of the accessed element. */
6886 inner_offset = 0;
6887 type = TREE_TYPE (val);
6888 size = elt_sz * BITS_PER_UNIT;
6890 else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
6891 && TREE_CODE (val) == CONSTRUCTOR
6892 && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
6893 /* If this isn't the last element in the CTOR and a CTOR itself
6894 and it does not cover the whole object we are requesting give up
6895 since we're not set up for combining from multiple CTORs. */
6896 return NULL_TREE;
6898 *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
6899 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6900 suboff);
6903 /* Memory not explicitly mentioned in constructor is 0 (or
6904 the reference is out of range). */
6905 return type ? build_zero_cst (type) : NULL_TREE;
6908 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6909 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6910 is the expected type of the reference; otherwise the type of
6911 the referenced member is used instead. When SIZE is zero,
6912 attempt to fold a reference to the entire member which OFFSET
6913 refers to; in this case. Increment *SUBOFF by the bit offset
6914 of the accessed member. */
6916 static tree
6917 fold_nonarray_ctor_reference (tree type, tree ctor,
6918 unsigned HOST_WIDE_INT offset,
6919 unsigned HOST_WIDE_INT size,
6920 tree from_decl,
6921 unsigned HOST_WIDE_INT *suboff)
6923 unsigned HOST_WIDE_INT cnt;
6924 tree cfield, cval;
6926 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6927 cval)
6929 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6930 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6931 tree field_size = DECL_SIZE (cfield);
6933 if (!field_size)
6935 /* Determine the size of the flexible array member from
6936 the size of the initializer provided for it. */
6937 field_size = TYPE_SIZE (TREE_TYPE (cval));
6940 /* Variable sized objects in static constructors makes no sense,
6941 but field_size can be NULL for flexible array members. */
6942 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6943 && TREE_CODE (byte_offset) == INTEGER_CST
6944 && (field_size != NULL_TREE
6945 ? TREE_CODE (field_size) == INTEGER_CST
6946 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6948 /* Compute bit offset of the field. */
6949 offset_int bitoffset
6950 = (wi::to_offset (field_offset)
6951 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6952 /* Compute bit offset where the field ends. */
6953 offset_int bitoffset_end;
6954 if (field_size != NULL_TREE)
6955 bitoffset_end = bitoffset + wi::to_offset (field_size);
6956 else
6957 bitoffset_end = 0;
6959 /* Compute the bit offset of the end of the desired access.
6960 As a special case, if the size of the desired access is
6961 zero, assume the access is to the entire field (and let
6962 the caller make any necessary adjustments by storing
6963 the actual bounds of the field in FIELDBOUNDS). */
6964 offset_int access_end = offset_int (offset);
6965 if (size)
6966 access_end += size;
6967 else
6968 access_end = bitoffset_end;
6970 /* Is there any overlap between the desired access at
6971 [OFFSET, OFFSET+SIZE) and the offset of the field within
6972 the object at [BITOFFSET, BITOFFSET_END)? */
6973 if (wi::cmps (access_end, bitoffset) > 0
6974 && (field_size == NULL_TREE
6975 || wi::lts_p (offset, bitoffset_end)))
6977 *suboff += bitoffset.to_uhwi ();
6979 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6981 /* For the final reference to the entire accessed member
6982 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6983 be null) in favor of the type of the member, and set
6984 SIZE to the size of the accessed member. */
6985 offset = bitoffset.to_uhwi ();
6986 type = TREE_TYPE (cval);
6987 size = (bitoffset_end - bitoffset).to_uhwi ();
6990 /* We do have overlap. Now see if the field is large enough
6991 to cover the access. Give up for accesses that extend
6992 beyond the end of the object or that span multiple fields. */
6993 if (wi::cmps (access_end, bitoffset_end) > 0)
6994 return NULL_TREE;
6995 if (offset < bitoffset)
6996 return NULL_TREE;
6998 offset_int inner_offset = offset_int (offset) - bitoffset;
6999 return fold_ctor_reference (type, cval,
7000 inner_offset.to_uhwi (), size,
7001 from_decl, suboff);
7005 if (!type)
7006 return NULL_TREE;
7008 return build_zero_cst (type);
7011 /* CTOR is value initializing memory. Fold a reference of TYPE and
7012 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
7013 is zero, attempt to fold a reference to the entire subobject
7014 which OFFSET refers to. This is used when folding accesses to
7015 string members of aggregates. When non-null, set *SUBOFF to
7016 the bit offset of the accessed subobject. */
7018 tree
7019 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
7020 const poly_uint64 &poly_size, tree from_decl,
7021 unsigned HOST_WIDE_INT *suboff /* = NULL */)
7023 tree ret;
7025 /* We found the field with exact match. */
7026 if (type
7027 && useless_type_conversion_p (type, TREE_TYPE (ctor))
7028 && known_eq (poly_offset, 0U))
7029 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
7031 /* The remaining optimizations need a constant size and offset. */
7032 unsigned HOST_WIDE_INT size, offset;
7033 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
7034 return NULL_TREE;
7036 /* We are at the end of walk, see if we can view convert the
7037 result. */
7038 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
7039 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
7040 && !compare_tree_int (TYPE_SIZE (type), size)
7041 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
7043 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
7044 if (ret)
7046 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
7047 if (ret)
7048 STRIP_USELESS_TYPE_CONVERSION (ret);
7050 return ret;
7052 /* For constants and byte-aligned/sized reads try to go through
7053 native_encode/interpret. */
7054 if (CONSTANT_CLASS_P (ctor)
7055 && BITS_PER_UNIT == 8
7056 && offset % BITS_PER_UNIT == 0
7057 && offset / BITS_PER_UNIT <= INT_MAX
7058 && size % BITS_PER_UNIT == 0
7059 && size <= MAX_BITSIZE_MODE_ANY_MODE
7060 && can_native_interpret_type_p (type))
7062 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7063 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
7064 offset / BITS_PER_UNIT);
7065 if (len > 0)
7066 return native_interpret_expr (type, buf, len);
7068 if (TREE_CODE (ctor) == CONSTRUCTOR)
7070 unsigned HOST_WIDE_INT dummy = 0;
7071 if (!suboff)
7072 suboff = &dummy;
7074 tree ret;
7075 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
7076 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
7077 ret = fold_array_ctor_reference (type, ctor, offset, size,
7078 from_decl, suboff);
7079 else
7080 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
7081 from_decl, suboff);
7083 /* Fall back to native_encode_initializer. Needs to be done
7084 only in the outermost fold_ctor_reference call (because it itself
7085 recurses into CONSTRUCTORs) and doesn't update suboff. */
7086 if (ret == NULL_TREE
7087 && suboff == &dummy
7088 && BITS_PER_UNIT == 8
7089 && offset % BITS_PER_UNIT == 0
7090 && offset / BITS_PER_UNIT <= INT_MAX
7091 && size % BITS_PER_UNIT == 0
7092 && size <= MAX_BITSIZE_MODE_ANY_MODE
7093 && can_native_interpret_type_p (type))
7095 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7096 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
7097 offset / BITS_PER_UNIT);
7098 if (len > 0)
7099 return native_interpret_expr (type, buf, len);
7102 return ret;
7105 return NULL_TREE;
7108 /* Return the tree representing the element referenced by T if T is an
7109 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
7110 names using VALUEIZE. Return NULL_TREE otherwise. */
7112 tree
7113 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
7115 tree ctor, idx, base;
7116 poly_int64 offset, size, max_size;
7117 tree tem;
7118 bool reverse;
7120 if (TREE_THIS_VOLATILE (t))
7121 return NULL_TREE;
7123 if (DECL_P (t))
7124 return get_symbol_constant_value (t);
7126 tem = fold_read_from_constant_string (t);
7127 if (tem)
7128 return tem;
7130 switch (TREE_CODE (t))
7132 case ARRAY_REF:
7133 case ARRAY_RANGE_REF:
7134 /* Constant indexes are handled well by get_base_constructor.
7135 Only special case variable offsets.
7136 FIXME: This code can't handle nested references with variable indexes
7137 (they will be handled only by iteration of ccp). Perhaps we can bring
7138 get_ref_base_and_extent here and make it use a valueize callback. */
7139 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
7140 && valueize
7141 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
7142 && poly_int_tree_p (idx))
7144 tree low_bound, unit_size;
7146 /* If the resulting bit-offset is constant, track it. */
7147 if ((low_bound = array_ref_low_bound (t),
7148 poly_int_tree_p (low_bound))
7149 && (unit_size = array_ref_element_size (t),
7150 tree_fits_uhwi_p (unit_size)))
7152 poly_offset_int woffset
7153 = wi::sext (wi::to_poly_offset (idx)
7154 - wi::to_poly_offset (low_bound),
7155 TYPE_PRECISION (TREE_TYPE (idx)));
7156 woffset *= tree_to_uhwi (unit_size);
7157 woffset *= BITS_PER_UNIT;
7158 if (woffset.to_shwi (&offset))
7160 base = TREE_OPERAND (t, 0);
7161 ctor = get_base_constructor (base, &offset, valueize);
7162 /* Empty constructor. Always fold to 0. */
7163 if (ctor == error_mark_node)
7164 return build_zero_cst (TREE_TYPE (t));
7165 /* Out of bound array access. Value is undefined,
7166 but don't fold. */
7167 if (maybe_lt (offset, 0))
7168 return NULL_TREE;
7169 /* We cannot determine ctor. */
7170 if (!ctor)
7171 return NULL_TREE;
7172 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
7173 tree_to_uhwi (unit_size)
7174 * BITS_PER_UNIT,
7175 base);
7179 /* Fallthru. */
7181 case COMPONENT_REF:
7182 case BIT_FIELD_REF:
7183 case TARGET_MEM_REF:
7184 case MEM_REF:
7185 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
7186 ctor = get_base_constructor (base, &offset, valueize);
7188 /* Empty constructor. Always fold to 0. */
7189 if (ctor == error_mark_node)
7190 return build_zero_cst (TREE_TYPE (t));
7191 /* We do not know precise address. */
7192 if (!known_size_p (max_size) || maybe_ne (max_size, size))
7193 return NULL_TREE;
7194 /* We cannot determine ctor. */
7195 if (!ctor)
7196 return NULL_TREE;
7198 /* Out of bound array access. Value is undefined, but don't fold. */
7199 if (maybe_lt (offset, 0))
7200 return NULL_TREE;
7202 tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
7203 if (tem)
7204 return tem;
7206 /* For bit field reads try to read the representative and
7207 adjust. */
7208 if (TREE_CODE (t) == COMPONENT_REF
7209 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
7210 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
7212 HOST_WIDE_INT csize, coffset;
7213 tree field = TREE_OPERAND (t, 1);
7214 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
7215 if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
7216 && size.is_constant (&csize)
7217 && offset.is_constant (&coffset)
7218 && (coffset % BITS_PER_UNIT != 0
7219 || csize % BITS_PER_UNIT != 0)
7220 && !reverse
7221 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
7223 poly_int64 bitoffset;
7224 poly_uint64 field_offset, repr_offset;
7225 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
7226 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
7227 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
7228 else
7229 bitoffset = 0;
7230 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
7231 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
7232 HOST_WIDE_INT bitoff;
7233 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
7234 - TYPE_PRECISION (TREE_TYPE (field)));
7235 if (bitoffset.is_constant (&bitoff)
7236 && bitoff >= 0
7237 && bitoff <= diff)
7239 offset -= bitoff;
7240 size = tree_to_uhwi (DECL_SIZE (repr));
7242 tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
7243 size, base);
7244 if (tem && TREE_CODE (tem) == INTEGER_CST)
7246 if (!BYTES_BIG_ENDIAN)
7247 tem = wide_int_to_tree (TREE_TYPE (field),
7248 wi::lrshift (wi::to_wide (tem),
7249 bitoff));
7250 else
7251 tem = wide_int_to_tree (TREE_TYPE (field),
7252 wi::lrshift (wi::to_wide (tem),
7253 diff - bitoff));
7254 return tem;
7259 break;
7261 case REALPART_EXPR:
7262 case IMAGPART_EXPR:
7264 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
7265 if (c && TREE_CODE (c) == COMPLEX_CST)
7266 return fold_build1_loc (EXPR_LOCATION (t),
7267 TREE_CODE (t), TREE_TYPE (t), c);
7268 break;
7271 default:
7272 break;
7275 return NULL_TREE;
7278 tree
7279 fold_const_aggregate_ref (tree t)
7281 return fold_const_aggregate_ref_1 (t, NULL);
7284 /* Lookup virtual method with index TOKEN in a virtual table V
7285 at OFFSET.
7286 Set CAN_REFER if non-NULL to false if method
7287 is not referable or if the virtual table is ill-formed (such as rewriten
7288 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7290 tree
7291 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
7292 tree v,
7293 unsigned HOST_WIDE_INT offset,
7294 bool *can_refer)
7296 tree vtable = v, init, fn;
7297 unsigned HOST_WIDE_INT size;
7298 unsigned HOST_WIDE_INT elt_size, access_index;
7299 tree domain_type;
7301 if (can_refer)
7302 *can_refer = true;
7304 /* First of all double check we have virtual table. */
7305 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
7307 /* Pass down that we lost track of the target. */
7308 if (can_refer)
7309 *can_refer = false;
7310 return NULL_TREE;
7313 init = ctor_for_folding (v);
7315 /* The virtual tables should always be born with constructors
7316 and we always should assume that they are avaialble for
7317 folding. At the moment we do not stream them in all cases,
7318 but it should never happen that ctor seem unreachable. */
7319 gcc_assert (init);
7320 if (init == error_mark_node)
7322 /* Pass down that we lost track of the target. */
7323 if (can_refer)
7324 *can_refer = false;
7325 return NULL_TREE;
7327 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
7328 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
7329 offset *= BITS_PER_UNIT;
7330 offset += token * size;
7332 /* Lookup the value in the constructor that is assumed to be array.
7333 This is equivalent to
7334 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7335 offset, size, NULL);
7336 but in a constant time. We expect that frontend produced a simple
7337 array without indexed initializers. */
7339 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7340 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7341 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7342 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7344 access_index = offset / BITS_PER_UNIT / elt_size;
7345 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7347 /* The C++ FE can now produce indexed fields, and we check if the indexes
7348 match. */
7349 if (access_index < CONSTRUCTOR_NELTS (init))
7351 fn = CONSTRUCTOR_ELT (init, access_index)->value;
7352 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7353 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
7354 STRIP_NOPS (fn);
7356 else
7357 fn = NULL;
7359 /* For type inconsistent program we may end up looking up virtual method
7360 in virtual table that does not contain TOKEN entries. We may overrun
7361 the virtual table and pick up a constant or RTTI info pointer.
7362 In any case the call is undefined. */
7363 if (!fn
7364 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7365 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7366 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7367 else
7369 fn = TREE_OPERAND (fn, 0);
7371 /* When cgraph node is missing and function is not public, we cannot
7372 devirtualize. This can happen in WHOPR when the actual method
7373 ends up in other partition, because we found devirtualization
7374 possibility too late. */
7375 if (!can_refer_decl_in_current_unit_p (fn, vtable))
7377 if (can_refer)
7379 *can_refer = false;
7380 return fn;
7382 return NULL_TREE;
7386 /* Make sure we create a cgraph node for functions we'll reference.
7387 They can be non-existent if the reference comes from an entry
7388 of an external vtable for example. */
7389 cgraph_node::get_create (fn);
7391 return fn;
7394 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7395 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7396 KNOWN_BINFO carries the binfo describing the true type of
7397 OBJ_TYPE_REF_OBJECT(REF).
7398 Set CAN_REFER if non-NULL to false if method
7399 is not referable or if the virtual table is ill-formed (such as rewriten
7400 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7402 tree
7403 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7404 bool *can_refer)
7406 unsigned HOST_WIDE_INT offset;
7407 tree v;
7409 v = BINFO_VTABLE (known_binfo);
7410 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7411 if (!v)
7412 return NULL_TREE;
7414 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
7416 if (can_refer)
7417 *can_refer = false;
7418 return NULL_TREE;
7420 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
7423 /* Given a pointer value T, return a simplified version of an
7424 indirection through T, or NULL_TREE if no simplification is
7425 possible. Note that the resulting type may be different from
7426 the type pointed to in the sense that it is still compatible
7427 from the langhooks point of view. */
7429 tree
7430 gimple_fold_indirect_ref (tree t)
7432 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7433 tree sub = t;
7434 tree subtype;
7436 STRIP_NOPS (sub);
7437 subtype = TREE_TYPE (sub);
7438 if (!POINTER_TYPE_P (subtype)
7439 || TYPE_REF_CAN_ALIAS_ALL (ptype))
7440 return NULL_TREE;
7442 if (TREE_CODE (sub) == ADDR_EXPR)
7444 tree op = TREE_OPERAND (sub, 0);
7445 tree optype = TREE_TYPE (op);
7446 /* *&p => p */
7447 if (useless_type_conversion_p (type, optype))
7448 return op;
7450 /* *(foo *)&fooarray => fooarray[0] */
7451 if (TREE_CODE (optype) == ARRAY_TYPE
7452 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7453 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7455 tree type_domain = TYPE_DOMAIN (optype);
7456 tree min_val = size_zero_node;
7457 if (type_domain && TYPE_MIN_VALUE (type_domain))
7458 min_val = TYPE_MIN_VALUE (type_domain);
7459 if (TREE_CODE (min_val) == INTEGER_CST)
7460 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7462 /* *(foo *)&complexfoo => __real__ complexfoo */
7463 else if (TREE_CODE (optype) == COMPLEX_TYPE
7464 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7465 return fold_build1 (REALPART_EXPR, type, op);
7466 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7467 else if (TREE_CODE (optype) == VECTOR_TYPE
7468 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7470 tree part_width = TYPE_SIZE (type);
7471 tree index = bitsize_int (0);
7472 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7476 /* *(p + CST) -> ... */
7477 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7478 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7480 tree addr = TREE_OPERAND (sub, 0);
7481 tree off = TREE_OPERAND (sub, 1);
7482 tree addrtype;
7484 STRIP_NOPS (addr);
7485 addrtype = TREE_TYPE (addr);
7487 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7488 if (TREE_CODE (addr) == ADDR_EXPR
7489 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7490 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7491 && tree_fits_uhwi_p (off))
7493 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7494 tree part_width = TYPE_SIZE (type);
7495 unsigned HOST_WIDE_INT part_widthi
7496 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7497 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7498 tree index = bitsize_int (indexi);
7499 if (known_lt (offset / part_widthi,
7500 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7501 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7502 part_width, index);
7505 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7506 if (TREE_CODE (addr) == ADDR_EXPR
7507 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7508 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7510 tree size = TYPE_SIZE_UNIT (type);
7511 if (tree_int_cst_equal (size, off))
7512 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7515 /* *(p + CST) -> MEM_REF <p, CST>. */
7516 if (TREE_CODE (addr) != ADDR_EXPR
7517 || DECL_P (TREE_OPERAND (addr, 0)))
7518 return fold_build2 (MEM_REF, type,
7519 addr,
7520 wide_int_to_tree (ptype, wi::to_wide (off)));
7523 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7524 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7525 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7526 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7528 tree type_domain;
7529 tree min_val = size_zero_node;
7530 tree osub = sub;
7531 sub = gimple_fold_indirect_ref (sub);
7532 if (! sub)
7533 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7534 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7535 if (type_domain && TYPE_MIN_VALUE (type_domain))
7536 min_val = TYPE_MIN_VALUE (type_domain);
7537 if (TREE_CODE (min_val) == INTEGER_CST)
7538 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7541 return NULL_TREE;
7544 /* Return true if CODE is an operation that when operating on signed
7545 integer types involves undefined behavior on overflow and the
7546 operation can be expressed with unsigned arithmetic. */
7548 bool
7549 arith_code_with_undefined_signed_overflow (tree_code code)
7551 switch (code)
7553 case ABS_EXPR:
7554 case PLUS_EXPR:
7555 case MINUS_EXPR:
7556 case MULT_EXPR:
7557 case NEGATE_EXPR:
7558 case POINTER_PLUS_EXPR:
7559 return true;
7560 default:
7561 return false;
7565 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7566 operation that can be transformed to unsigned arithmetic by converting
7567 its operand, carrying out the operation in the corresponding unsigned
7568 type and converting the result back to the original type.
7570 Returns a sequence of statements that replace STMT and also contain
7571 a modified form of STMT itself. */
7573 gimple_seq
7574 rewrite_to_defined_overflow (gimple *stmt)
7576 if (dump_file && (dump_flags & TDF_DETAILS))
7578 fprintf (dump_file, "rewriting stmt with undefined signed "
7579 "overflow ");
7580 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7583 tree lhs = gimple_assign_lhs (stmt);
7584 tree type = unsigned_type_for (TREE_TYPE (lhs));
7585 gimple_seq stmts = NULL;
7586 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
7587 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
7588 else
7589 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7591 tree op = gimple_op (stmt, i);
7592 op = gimple_convert (&stmts, type, op);
7593 gimple_set_op (stmt, i, op);
7595 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7596 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7597 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7598 gimple_set_modified (stmt, true);
7599 gimple_seq_add_stmt (&stmts, stmt);
7600 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7601 gimple_seq_add_stmt (&stmts, cvt);
7603 return stmts;
7607 /* The valueization hook we use for the gimple_build API simplification.
7608 This makes us match fold_buildN behavior by only combining with
7609 statements in the sequence(s) we are currently building. */
7611 static tree
7612 gimple_build_valueize (tree op)
7614 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7615 return op;
7616 return NULL_TREE;
7619 /* Build the expression CODE OP0 of type TYPE with location LOC,
7620 simplifying it first if possible. Returns the built
7621 expression value and appends statements possibly defining it
7622 to SEQ. */
7624 tree
7625 gimple_build (gimple_seq *seq, location_t loc,
7626 enum tree_code code, tree type, tree op0)
7628 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7629 if (!res)
7631 res = create_tmp_reg_or_ssa_name (type);
7632 gimple *stmt;
7633 if (code == REALPART_EXPR
7634 || code == IMAGPART_EXPR
7635 || code == VIEW_CONVERT_EXPR)
7636 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7637 else
7638 stmt = gimple_build_assign (res, code, op0);
7639 gimple_set_location (stmt, loc);
7640 gimple_seq_add_stmt_without_update (seq, stmt);
7642 return res;
7645 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7646 simplifying it first if possible. Returns the built
7647 expression value and appends statements possibly defining it
7648 to SEQ. */
7650 tree
7651 gimple_build (gimple_seq *seq, location_t loc,
7652 enum tree_code code, tree type, tree op0, tree op1)
7654 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7655 if (!res)
7657 res = create_tmp_reg_or_ssa_name (type);
7658 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7659 gimple_set_location (stmt, loc);
7660 gimple_seq_add_stmt_without_update (seq, stmt);
7662 return res;
7665 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7666 simplifying it first if possible. Returns the built
7667 expression value and appends statements possibly defining it
7668 to SEQ. */
7670 tree
7671 gimple_build (gimple_seq *seq, location_t loc,
7672 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7674 tree res = gimple_simplify (code, type, op0, op1, op2,
7675 seq, gimple_build_valueize);
7676 if (!res)
7678 res = create_tmp_reg_or_ssa_name (type);
7679 gimple *stmt;
7680 if (code == BIT_FIELD_REF)
7681 stmt = gimple_build_assign (res, code,
7682 build3 (code, type, op0, op1, op2));
7683 else
7684 stmt = gimple_build_assign (res, code, op0, op1, op2);
7685 gimple_set_location (stmt, loc);
7686 gimple_seq_add_stmt_without_update (seq, stmt);
7688 return res;
7691 /* Build the call FN (ARG0) with a result of type TYPE
7692 (or no result if TYPE is void) with location LOC,
7693 simplifying it first if possible. Returns the built
7694 expression value (or NULL_TREE if TYPE is void) and appends
7695 statements possibly defining it to SEQ. */
7697 tree
7698 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7699 tree type, tree arg0)
7701 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7702 if (!res)
7704 gcall *stmt;
7705 if (internal_fn_p (fn))
7706 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7707 else
7709 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7710 stmt = gimple_build_call (decl, 1, arg0);
7712 if (!VOID_TYPE_P (type))
7714 res = create_tmp_reg_or_ssa_name (type);
7715 gimple_call_set_lhs (stmt, res);
7717 gimple_set_location (stmt, loc);
7718 gimple_seq_add_stmt_without_update (seq, stmt);
7720 return res;
7723 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7724 (or no result if TYPE is void) with location LOC,
7725 simplifying it first if possible. Returns the built
7726 expression value (or NULL_TREE if TYPE is void) and appends
7727 statements possibly defining it to SEQ. */
7729 tree
7730 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7731 tree type, tree arg0, tree arg1)
7733 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7734 if (!res)
7736 gcall *stmt;
7737 if (internal_fn_p (fn))
7738 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7739 else
7741 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7742 stmt = gimple_build_call (decl, 2, arg0, arg1);
7744 if (!VOID_TYPE_P (type))
7746 res = create_tmp_reg_or_ssa_name (type);
7747 gimple_call_set_lhs (stmt, res);
7749 gimple_set_location (stmt, loc);
7750 gimple_seq_add_stmt_without_update (seq, stmt);
7752 return res;
7755 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7756 (or no result if TYPE is void) with location LOC,
7757 simplifying it first if possible. Returns the built
7758 expression value (or NULL_TREE if TYPE is void) and appends
7759 statements possibly defining it to SEQ. */
7761 tree
7762 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7763 tree type, tree arg0, tree arg1, tree arg2)
7765 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7766 seq, gimple_build_valueize);
7767 if (!res)
7769 gcall *stmt;
7770 if (internal_fn_p (fn))
7771 stmt = gimple_build_call_internal (as_internal_fn (fn),
7772 3, arg0, arg1, arg2);
7773 else
7775 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7776 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7778 if (!VOID_TYPE_P (type))
7780 res = create_tmp_reg_or_ssa_name (type);
7781 gimple_call_set_lhs (stmt, res);
7783 gimple_set_location (stmt, loc);
7784 gimple_seq_add_stmt_without_update (seq, stmt);
7786 return res;
7789 /* Build the conversion (TYPE) OP with a result of type TYPE
7790 with location LOC if such conversion is neccesary in GIMPLE,
7791 simplifying it first.
7792 Returns the built expression value and appends
7793 statements possibly defining it to SEQ. */
7795 tree
7796 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7798 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7799 return op;
7800 return gimple_build (seq, loc, NOP_EXPR, type, op);
7803 /* Build the conversion (ptrofftype) OP with a result of a type
7804 compatible with ptrofftype with location LOC if such conversion
7805 is neccesary in GIMPLE, simplifying it first.
7806 Returns the built expression value and appends
7807 statements possibly defining it to SEQ. */
7809 tree
7810 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7812 if (ptrofftype_p (TREE_TYPE (op)))
7813 return op;
7814 return gimple_convert (seq, loc, sizetype, op);
7817 /* Build a vector of type TYPE in which each element has the value OP.
7818 Return a gimple value for the result, appending any new statements
7819 to SEQ. */
7821 tree
7822 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7823 tree op)
7825 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7826 && !CONSTANT_CLASS_P (op))
7827 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7829 tree res, vec = build_vector_from_val (type, op);
7830 if (is_gimple_val (vec))
7831 return vec;
7832 if (gimple_in_ssa_p (cfun))
7833 res = make_ssa_name (type);
7834 else
7835 res = create_tmp_reg (type);
7836 gimple *stmt = gimple_build_assign (res, vec);
7837 gimple_set_location (stmt, loc);
7838 gimple_seq_add_stmt_without_update (seq, stmt);
7839 return res;
7842 /* Build a vector from BUILDER, handling the case in which some elements
7843 are non-constant. Return a gimple value for the result, appending any
7844 new instructions to SEQ.
7846 BUILDER must not have a stepped encoding on entry. This is because
7847 the function is not geared up to handle the arithmetic that would
7848 be needed in the variable case, and any code building a vector that
7849 is known to be constant should use BUILDER->build () directly. */
7851 tree
7852 gimple_build_vector (gimple_seq *seq, location_t loc,
7853 tree_vector_builder *builder)
7855 gcc_assert (builder->nelts_per_pattern () <= 2);
7856 unsigned int encoded_nelts = builder->encoded_nelts ();
7857 for (unsigned int i = 0; i < encoded_nelts; ++i)
7858 if (!TREE_CONSTANT ((*builder)[i]))
7860 tree type = builder->type ();
7861 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7862 vec<constructor_elt, va_gc> *v;
7863 vec_alloc (v, nelts);
7864 for (i = 0; i < nelts; ++i)
7865 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7867 tree res;
7868 if (gimple_in_ssa_p (cfun))
7869 res = make_ssa_name (type);
7870 else
7871 res = create_tmp_reg (type);
7872 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7873 gimple_set_location (stmt, loc);
7874 gimple_seq_add_stmt_without_update (seq, stmt);
7875 return res;
7877 return builder->build ();
7880 /* Return true if the result of assignment STMT is known to be non-negative.
7881 If the return value is based on the assumption that signed overflow is
7882 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7883 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7885 static bool
7886 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7887 int depth)
7889 enum tree_code code = gimple_assign_rhs_code (stmt);
7890 switch (get_gimple_rhs_class (code))
7892 case GIMPLE_UNARY_RHS:
7893 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7894 gimple_expr_type (stmt),
7895 gimple_assign_rhs1 (stmt),
7896 strict_overflow_p, depth);
7897 case GIMPLE_BINARY_RHS:
7898 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7899 gimple_expr_type (stmt),
7900 gimple_assign_rhs1 (stmt),
7901 gimple_assign_rhs2 (stmt),
7902 strict_overflow_p, depth);
7903 case GIMPLE_TERNARY_RHS:
7904 return false;
7905 case GIMPLE_SINGLE_RHS:
7906 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7907 strict_overflow_p, depth);
7908 case GIMPLE_INVALID_RHS:
7909 break;
7911 gcc_unreachable ();
7914 /* Return true if return value of call STMT is known to be non-negative.
7915 If the return value is based on the assumption that signed overflow is
7916 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7917 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7919 static bool
7920 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7921 int depth)
7923 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7924 gimple_call_arg (stmt, 0) : NULL_TREE;
7925 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7926 gimple_call_arg (stmt, 1) : NULL_TREE;
7928 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7929 gimple_call_combined_fn (stmt),
7930 arg0,
7931 arg1,
7932 strict_overflow_p, depth);
7935 /* Return true if return value of call STMT is known to be non-negative.
7936 If the return value is based on the assumption that signed overflow is
7937 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7938 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7940 static bool
7941 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7942 int depth)
7944 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7946 tree arg = gimple_phi_arg_def (stmt, i);
7947 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7948 return false;
7950 return true;
7953 /* Return true if STMT is known to compute a non-negative value.
7954 If the return value is based on the assumption that signed overflow is
7955 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7956 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7958 bool
7959 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7960 int depth)
7962 switch (gimple_code (stmt))
7964 case GIMPLE_ASSIGN:
7965 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7966 depth);
7967 case GIMPLE_CALL:
7968 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7969 depth);
7970 case GIMPLE_PHI:
7971 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7972 depth);
7973 default:
7974 return false;
7978 /* Return true if the floating-point value computed by assignment STMT
7979 is known to have an integer value. We also allow +Inf, -Inf and NaN
7980 to be considered integer values. Return false for signaling NaN.
7982 DEPTH is the current nesting depth of the query. */
7984 static bool
7985 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7987 enum tree_code code = gimple_assign_rhs_code (stmt);
7988 switch (get_gimple_rhs_class (code))
7990 case GIMPLE_UNARY_RHS:
7991 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7992 gimple_assign_rhs1 (stmt), depth);
7993 case GIMPLE_BINARY_RHS:
7994 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7995 gimple_assign_rhs1 (stmt),
7996 gimple_assign_rhs2 (stmt), depth);
7997 case GIMPLE_TERNARY_RHS:
7998 return false;
7999 case GIMPLE_SINGLE_RHS:
8000 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
8001 case GIMPLE_INVALID_RHS:
8002 break;
8004 gcc_unreachable ();
8007 /* Return true if the floating-point value computed by call STMT is known
8008 to have an integer value. We also allow +Inf, -Inf and NaN to be
8009 considered integer values. Return false for signaling NaN.
8011 DEPTH is the current nesting depth of the query. */
8013 static bool
8014 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
8016 tree arg0 = (gimple_call_num_args (stmt) > 0
8017 ? gimple_call_arg (stmt, 0)
8018 : NULL_TREE);
8019 tree arg1 = (gimple_call_num_args (stmt) > 1
8020 ? gimple_call_arg (stmt, 1)
8021 : NULL_TREE);
8022 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
8023 arg0, arg1, depth);
8026 /* Return true if the floating-point result of phi STMT is known to have
8027 an integer value. We also allow +Inf, -Inf and NaN to be considered
8028 integer values. Return false for signaling NaN.
8030 DEPTH is the current nesting depth of the query. */
8032 static bool
8033 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
8035 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
8037 tree arg = gimple_phi_arg_def (stmt, i);
8038 if (!integer_valued_real_single_p (arg, depth + 1))
8039 return false;
8041 return true;
8044 /* Return true if the floating-point value computed by STMT is known
8045 to have an integer value. We also allow +Inf, -Inf and NaN to be
8046 considered integer values. Return false for signaling NaN.
8048 DEPTH is the current nesting depth of the query. */
8050 bool
8051 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
8053 switch (gimple_code (stmt))
8055 case GIMPLE_ASSIGN:
8056 return gimple_assign_integer_valued_real_p (stmt, depth);
8057 case GIMPLE_CALL:
8058 return gimple_call_integer_valued_real_p (stmt, depth);
8059 case GIMPLE_PHI:
8060 return gimple_phi_integer_valued_real_p (stmt, depth);
8061 default:
8062 return false;