Skip several gcc.dg/builtin-dynamic-object-size tests on hppa*-*-hpux*
[official-gcc.git] / gcc / gimple-fold.cc
bloba46e640692d3bcc4ae9265e282f304b85850d4c9
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2024 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.cc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-access.h"
34 #include "gimple-ssa-warn-restrict.h"
35 #include "fold-const.h"
36 #include "stmt.h"
37 #include "expr.h"
38 #include "stor-layout.h"
39 #include "dumpfile.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
42 #include "gimplify.h"
43 #include "tree-into-ssa.h"
44 #include "tree-dfa.h"
45 #include "tree-object-size.h"
46 #include "tree-ssa.h"
47 #include "tree-ssa-propagate.h"
48 #include "ipa-utils.h"
49 #include "tree-ssa-address.h"
50 #include "langhooks.h"
51 #include "gimplify-me.h"
52 #include "dbgcnt.h"
53 #include "builtins.h"
54 #include "tree-eh.h"
55 #include "gimple-match.h"
56 #include "gomp-constants.h"
57 #include "optabs-query.h"
58 #include "omp-general.h"
59 #include "tree-cfg.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "asan.h"
64 #include "diagnostic-core.h"
65 #include "intl.h"
66 #include "calls.h"
67 #include "tree-vector-builder.h"
68 #include "tree-ssa-strlen.h"
69 #include "varasm.h"
70 #include "internal-fn.h"
71 #include "gimple-range.h"
73 enum strlen_range_kind {
74 /* Compute the exact constant string length. */
75 SRK_STRLEN,
76 /* Compute the maximum constant string length. */
77 SRK_STRLENMAX,
78 /* Compute a range of string lengths bounded by object sizes. When
79 the length of a string cannot be determined, consider as the upper
80 bound the size of the enclosing object the string may be a member
81 or element of. Also determine the size of the largest character
82 array the string may refer to. */
83 SRK_LENRANGE,
84 /* Determine the integer value of the argument (not string length). */
85 SRK_INT_VALUE
88 static bool
89 get_range_strlen (tree, bitmap, strlen_range_kind, c_strlen_data *, unsigned);
91 /* Return true when DECL can be referenced from current unit.
92 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
93 We can get declarations that are not possible to reference for various
94 reasons:
96 1) When analyzing C++ virtual tables.
97 C++ virtual tables do have known constructors even
98 when they are keyed to other compilation unit.
99 Those tables can contain pointers to methods and vars
100 in other units. Those methods have both STATIC and EXTERNAL
101 set.
102 2) In WHOPR mode devirtualization might lead to reference
103 to method that was partitioned elsehwere.
104 In this case we have static VAR_DECL or FUNCTION_DECL
105 that has no corresponding callgraph/varpool node
106 declaring the body.
107 3) COMDAT functions referred by external vtables that
108 we devirtualize only during final compilation stage.
109 At this time we already decided that we will not output
110 the function body and thus we can't reference the symbol
111 directly. */
113 static bool
114 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
116 varpool_node *vnode;
117 struct cgraph_node *node;
118 symtab_node *snode;
120 if (DECL_ABSTRACT_P (decl))
121 return false;
123 /* We are concerned only about static/external vars and functions. */
124 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
125 || !VAR_OR_FUNCTION_DECL_P (decl))
126 return true;
128 /* Static objects can be referred only if they are defined and not optimized
129 out yet. */
130 if (!TREE_PUBLIC (decl))
132 if (DECL_EXTERNAL (decl))
133 return false;
134 /* Before we start optimizing unreachable code we can be sure all
135 static objects are defined. */
136 if (symtab->function_flags_ready)
137 return true;
138 snode = symtab_node::get (decl);
139 if (!snode || !snode->definition)
140 return false;
141 node = dyn_cast <cgraph_node *> (snode);
142 return !node || !node->inlined_to;
145 /* We will later output the initializer, so we can refer to it.
146 So we are concerned only when DECL comes from initializer of
147 external var or var that has been optimized out. */
148 if (!from_decl
149 || !VAR_P (from_decl)
150 || (!DECL_EXTERNAL (from_decl)
151 && (vnode = varpool_node::get (from_decl)) != NULL
152 && vnode->definition)
153 || (flag_ltrans
154 && (vnode = varpool_node::get (from_decl)) != NULL
155 && vnode->in_other_partition))
156 return true;
157 /* We are folding reference from external vtable. The vtable may reffer
158 to a symbol keyed to other compilation unit. The other compilation
159 unit may be in separate DSO and the symbol may be hidden. */
160 if (DECL_VISIBILITY_SPECIFIED (decl)
161 && DECL_EXTERNAL (decl)
162 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
163 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
164 return false;
165 /* When function is public, we always can introduce new reference.
166 Exception are the COMDAT functions where introducing a direct
167 reference imply need to include function body in the curren tunit. */
168 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
169 return true;
170 /* We have COMDAT. We are going to check if we still have definition
171 or if the definition is going to be output in other partition.
172 Bypass this when gimplifying; all needed functions will be produced.
174 As observed in PR20991 for already optimized out comdat virtual functions
175 it may be tempting to not necessarily give up because the copy will be
176 output elsewhere when corresponding vtable is output.
177 This is however not possible - ABI specify that COMDATs are output in
178 units where they are used and when the other unit was compiled with LTO
179 it is possible that vtable was kept public while the function itself
180 was privatized. */
181 if (!symtab->function_flags_ready)
182 return true;
184 snode = symtab_node::get (decl);
185 if (!snode
186 || ((!snode->definition || DECL_EXTERNAL (decl))
187 && (!snode->in_other_partition
188 || (!snode->forced_by_abi && !snode->force_output))))
189 return false;
190 node = dyn_cast <cgraph_node *> (snode);
191 return !node || !node->inlined_to;
194 /* Create a temporary for TYPE for a statement STMT. If the current function
195 is in SSA form, a SSA name is created. Otherwise a temporary register
196 is made. */
198 tree
199 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
201 if (gimple_in_ssa_p (cfun))
202 return make_ssa_name (type, stmt);
203 else
204 return create_tmp_reg (type);
207 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
208 acceptable form for is_gimple_min_invariant.
209 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
211 tree
212 canonicalize_constructor_val (tree cval, tree from_decl)
214 if (CONSTANT_CLASS_P (cval))
215 return cval;
217 tree orig_cval = cval;
218 STRIP_NOPS (cval);
219 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
220 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
222 tree ptr = TREE_OPERAND (cval, 0);
223 if (is_gimple_min_invariant (ptr))
224 cval = build1_loc (EXPR_LOCATION (cval),
225 ADDR_EXPR, TREE_TYPE (ptr),
226 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
227 ptr,
228 fold_convert (ptr_type_node,
229 TREE_OPERAND (cval, 1))));
231 if (TREE_CODE (cval) == ADDR_EXPR)
233 tree base = NULL_TREE;
234 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
236 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
237 if (base)
238 TREE_OPERAND (cval, 0) = base;
240 else
241 base = get_base_address (TREE_OPERAND (cval, 0));
242 if (!base)
243 return NULL_TREE;
245 if (VAR_OR_FUNCTION_DECL_P (base)
246 && !can_refer_decl_in_current_unit_p (base, from_decl))
247 return NULL_TREE;
248 if (TREE_TYPE (base) == error_mark_node)
249 return NULL_TREE;
250 if (VAR_P (base))
251 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
252 but since the use can be in a debug stmt we can't. */
254 else if (TREE_CODE (base) == FUNCTION_DECL)
256 /* Make sure we create a cgraph node for functions we'll reference.
257 They can be non-existent if the reference comes from an entry
258 of an external vtable for example. */
259 cgraph_node::get_create (base);
261 /* Fixup types in global initializers. */
262 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
263 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
265 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
266 cval = fold_convert (TREE_TYPE (orig_cval), cval);
267 return cval;
269 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
270 if (TREE_CODE (cval) == INTEGER_CST)
272 if (TREE_OVERFLOW_P (cval))
273 cval = drop_tree_overflow (cval);
274 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
275 cval = fold_convert (TREE_TYPE (orig_cval), cval);
276 return cval;
278 return orig_cval;
281 /* If SYM is a constant variable with known value, return the value.
282 NULL_TREE is returned otherwise. */
284 tree
285 get_symbol_constant_value (tree sym)
287 tree val = ctor_for_folding (sym);
288 if (val != error_mark_node)
290 if (val)
292 val = canonicalize_constructor_val (unshare_expr (val), sym);
293 if (val
294 && is_gimple_min_invariant (val)
295 && useless_type_conversion_p (TREE_TYPE (sym), TREE_TYPE (val)))
296 return val;
297 else
298 return NULL_TREE;
300 /* Variables declared 'const' without an initializer
301 have zero as the initializer if they may not be
302 overridden at link or run time. */
303 if (!val
304 && is_gimple_reg_type (TREE_TYPE (sym)))
305 return build_zero_cst (TREE_TYPE (sym));
308 return NULL_TREE;
313 /* Subroutine of fold_stmt. We perform constant folding of the
314 memory reference tree EXPR. */
316 static tree
317 maybe_fold_reference (tree expr)
319 tree result = NULL_TREE;
321 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
322 || TREE_CODE (expr) == REALPART_EXPR
323 || TREE_CODE (expr) == IMAGPART_EXPR)
324 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
325 result = fold_unary_loc (EXPR_LOCATION (expr),
326 TREE_CODE (expr),
327 TREE_TYPE (expr),
328 TREE_OPERAND (expr, 0));
329 else if (TREE_CODE (expr) == BIT_FIELD_REF
330 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
331 result = fold_ternary_loc (EXPR_LOCATION (expr),
332 TREE_CODE (expr),
333 TREE_TYPE (expr),
334 TREE_OPERAND (expr, 0),
335 TREE_OPERAND (expr, 1),
336 TREE_OPERAND (expr, 2));
337 else
338 result = fold_const_aggregate_ref (expr);
340 if (result && is_gimple_min_invariant (result))
341 return result;
343 return NULL_TREE;
346 /* Return true if EXPR is an acceptable right-hand-side for a
347 GIMPLE assignment. We validate the entire tree, not just
348 the root node, thus catching expressions that embed complex
349 operands that are not permitted in GIMPLE. This function
350 is needed because the folding routines in fold-const.cc
351 may return such expressions in some cases, e.g., an array
352 access with an embedded index addition. It may make more
353 sense to have folding routines that are sensitive to the
354 constraints on GIMPLE operands, rather than abandoning any
355 any attempt to fold if the usual folding turns out to be too
356 aggressive. */
358 bool
359 valid_gimple_rhs_p (tree expr)
361 enum tree_code code = TREE_CODE (expr);
363 switch (TREE_CODE_CLASS (code))
365 case tcc_declaration:
366 if (!is_gimple_variable (expr))
367 return false;
368 break;
370 case tcc_constant:
371 /* All constants are ok. */
372 break;
374 case tcc_comparison:
375 /* GENERIC allows comparisons with non-boolean types, reject
376 those for GIMPLE. Let vector-typed comparisons pass - rules
377 for GENERIC and GIMPLE are the same here. */
378 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
379 && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
380 || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
381 && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
382 return false;
384 /* Fallthru. */
385 case tcc_binary:
386 if (!is_gimple_val (TREE_OPERAND (expr, 0))
387 || !is_gimple_val (TREE_OPERAND (expr, 1)))
388 return false;
389 break;
391 case tcc_unary:
392 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
393 return false;
394 break;
396 case tcc_expression:
397 switch (code)
399 case ADDR_EXPR:
401 tree t;
402 if (is_gimple_min_invariant (expr))
403 return true;
404 t = TREE_OPERAND (expr, 0);
405 while (handled_component_p (t))
407 /* ??? More checks needed, see the GIMPLE verifier. */
408 if ((TREE_CODE (t) == ARRAY_REF
409 || TREE_CODE (t) == ARRAY_RANGE_REF)
410 && !is_gimple_val (TREE_OPERAND (t, 1)))
411 return false;
412 t = TREE_OPERAND (t, 0);
414 if (!is_gimple_id (t))
415 return false;
417 break;
419 default:
420 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
422 if (!is_gimple_val (TREE_OPERAND (expr, 0))
423 || !is_gimple_val (TREE_OPERAND (expr, 1))
424 || !is_gimple_val (TREE_OPERAND (expr, 2)))
425 return false;
426 break;
428 return false;
430 break;
432 case tcc_vl_exp:
433 return false;
435 case tcc_exceptional:
436 if (code == CONSTRUCTOR)
438 unsigned i;
439 tree elt;
440 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
441 if (!is_gimple_val (elt))
442 return false;
443 return true;
445 if (code != SSA_NAME)
446 return false;
447 break;
449 case tcc_reference:
450 if (code == BIT_FIELD_REF)
451 return is_gimple_val (TREE_OPERAND (expr, 0));
452 return false;
454 default:
455 return false;
458 return true;
462 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
463 replacement rhs for the statement or NULL_TREE if no simplification
464 could be made. It is assumed that the operands have been previously
465 folded. */
467 static tree
468 fold_gimple_assign (gimple_stmt_iterator *si)
470 gimple *stmt = gsi_stmt (*si);
471 enum tree_code subcode = gimple_assign_rhs_code (stmt);
472 location_t loc = gimple_location (stmt);
474 tree result = NULL_TREE;
476 switch (get_gimple_rhs_class (subcode))
478 case GIMPLE_SINGLE_RHS:
480 tree rhs = gimple_assign_rhs1 (stmt);
482 if (TREE_CLOBBER_P (rhs))
483 return NULL_TREE;
485 if (REFERENCE_CLASS_P (rhs))
486 return maybe_fold_reference (rhs);
488 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
490 tree val = OBJ_TYPE_REF_EXPR (rhs);
491 if (is_gimple_min_invariant (val))
492 return val;
493 else if (flag_devirtualize && virtual_method_call_p (rhs))
495 bool final;
496 vec <cgraph_node *>targets
497 = possible_polymorphic_call_targets (rhs, stmt, &final);
498 if (final && targets.length () <= 1 && dbg_cnt (devirt))
500 if (dump_enabled_p ())
502 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
503 "resolving virtual function address "
504 "reference to function %s\n",
505 targets.length () == 1
506 ? targets[0]->name ()
507 : "NULL");
509 if (targets.length () == 1)
511 val = fold_convert (TREE_TYPE (val),
512 build_fold_addr_expr_loc
513 (loc, targets[0]->decl));
514 STRIP_USELESS_TYPE_CONVERSION (val);
516 else
517 /* We cannot use __builtin_unreachable here because it
518 cannot have address taken. */
519 val = build_int_cst (TREE_TYPE (val), 0);
520 return val;
525 else if (TREE_CODE (rhs) == ADDR_EXPR)
527 tree ref = TREE_OPERAND (rhs, 0);
528 if (TREE_CODE (ref) == MEM_REF
529 && integer_zerop (TREE_OPERAND (ref, 1)))
531 result = TREE_OPERAND (ref, 0);
532 if (!useless_type_conversion_p (TREE_TYPE (rhs),
533 TREE_TYPE (result)))
534 result = build1 (NOP_EXPR, TREE_TYPE (rhs), result);
535 return result;
539 else if (TREE_CODE (rhs) == CONSTRUCTOR
540 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
542 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
543 unsigned i;
544 tree val;
546 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
547 if (! CONSTANT_CLASS_P (val))
548 return NULL_TREE;
550 return build_vector_from_ctor (TREE_TYPE (rhs),
551 CONSTRUCTOR_ELTS (rhs));
554 else if (DECL_P (rhs)
555 && is_gimple_reg_type (TREE_TYPE (rhs)))
556 return get_symbol_constant_value (rhs);
558 break;
560 case GIMPLE_UNARY_RHS:
561 break;
563 case GIMPLE_BINARY_RHS:
564 break;
566 case GIMPLE_TERNARY_RHS:
567 result = fold_ternary_loc (loc, subcode,
568 TREE_TYPE (gimple_assign_lhs (stmt)),
569 gimple_assign_rhs1 (stmt),
570 gimple_assign_rhs2 (stmt),
571 gimple_assign_rhs3 (stmt));
573 if (result)
575 STRIP_USELESS_TYPE_CONVERSION (result);
576 if (valid_gimple_rhs_p (result))
577 return result;
579 break;
581 case GIMPLE_INVALID_RHS:
582 gcc_unreachable ();
585 return NULL_TREE;
589 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
590 adjusting the replacement stmts location and virtual operands.
591 If the statement has a lhs the last stmt in the sequence is expected
592 to assign to that lhs. */
594 void
595 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
597 gimple *stmt = gsi_stmt (*si_p);
599 if (gimple_has_location (stmt))
600 annotate_all_with_location (stmts, gimple_location (stmt));
602 /* First iterate over the replacement statements backward, assigning
603 virtual operands to their defining statements. */
604 gimple *laststore = NULL;
605 for (gimple_stmt_iterator i = gsi_last (stmts);
606 !gsi_end_p (i); gsi_prev (&i))
608 gimple *new_stmt = gsi_stmt (i);
609 if ((gimple_assign_single_p (new_stmt)
610 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
611 || (is_gimple_call (new_stmt)
612 && (gimple_call_flags (new_stmt)
613 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
615 tree vdef;
616 if (!laststore)
617 vdef = gimple_vdef (stmt);
618 else
619 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
620 gimple_set_vdef (new_stmt, vdef);
621 if (vdef && TREE_CODE (vdef) == SSA_NAME)
622 SSA_NAME_DEF_STMT (vdef) = new_stmt;
623 laststore = new_stmt;
627 /* Second iterate over the statements forward, assigning virtual
628 operands to their uses. */
629 tree reaching_vuse = gimple_vuse (stmt);
630 for (gimple_stmt_iterator i = gsi_start (stmts);
631 !gsi_end_p (i); gsi_next (&i))
633 gimple *new_stmt = gsi_stmt (i);
634 /* If the new statement possibly has a VUSE, update it with exact SSA
635 name we know will reach this one. */
636 if (gimple_has_mem_ops (new_stmt))
637 gimple_set_vuse (new_stmt, reaching_vuse);
638 gimple_set_modified (new_stmt, true);
639 if (gimple_vdef (new_stmt))
640 reaching_vuse = gimple_vdef (new_stmt);
643 /* If the new sequence does not do a store release the virtual
644 definition of the original statement. */
645 if (reaching_vuse
646 && reaching_vuse == gimple_vuse (stmt))
648 tree vdef = gimple_vdef (stmt);
649 if (vdef
650 && TREE_CODE (vdef) == SSA_NAME)
652 unlink_stmt_vdef (stmt);
653 release_ssa_name (vdef);
657 /* Finally replace the original statement with the sequence. */
658 gsi_replace_with_seq (si_p, stmts, false);
661 /* Helper function for update_gimple_call and
662 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
663 with GIMPLE_CALL NEW_STMT. */
665 static void
666 finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
667 gimple *stmt)
669 tree lhs = gimple_call_lhs (stmt);
670 gimple_call_set_lhs (new_stmt, lhs);
671 if (lhs && TREE_CODE (lhs) == SSA_NAME)
672 SSA_NAME_DEF_STMT (lhs) = new_stmt;
673 gimple_move_vops (new_stmt, stmt);
674 gimple_set_location (new_stmt, gimple_location (stmt));
675 if (gimple_block (new_stmt) == NULL_TREE)
676 gimple_set_block (new_stmt, gimple_block (stmt));
677 gsi_replace (si_p, new_stmt, false);
680 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
681 with number of arguments NARGS, where the arguments in GIMPLE form
682 follow NARGS argument. */
684 bool
685 update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
687 va_list ap;
688 gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
690 gcc_assert (is_gimple_call (stmt));
691 va_start (ap, nargs);
692 new_stmt = gimple_build_call_valist (fn, nargs, ap);
693 finish_update_gimple_call (si_p, new_stmt, stmt);
694 va_end (ap);
695 return true;
698 /* Return true if EXPR is a CALL_EXPR suitable for representation
699 as a single GIMPLE_CALL statement. If the arguments require
700 further gimplification, return false. */
702 static bool
703 valid_gimple_call_p (tree expr)
705 unsigned i, nargs;
707 if (TREE_CODE (expr) != CALL_EXPR)
708 return false;
710 nargs = call_expr_nargs (expr);
711 for (i = 0; i < nargs; i++)
713 tree arg = CALL_EXPR_ARG (expr, i);
714 if (is_gimple_reg_type (TREE_TYPE (arg)))
716 if (!is_gimple_val (arg))
717 return false;
719 else
720 if (!is_gimple_lvalue (arg))
721 return false;
724 return true;
727 /* Convert EXPR into a GIMPLE value suitable for substitution on the
728 RHS of an assignment. Insert the necessary statements before
729 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
730 is replaced. If the call is expected to produces a result, then it
731 is replaced by an assignment of the new RHS to the result variable.
732 If the result is to be ignored, then the call is replaced by a
733 GIMPLE_NOP. A proper VDEF chain is retained by making the first
734 VUSE and the last VDEF of the whole sequence be the same as the replaced
735 statement and using new SSA names for stores in between. */
737 void
738 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
740 tree lhs;
741 gimple *stmt, *new_stmt;
742 gimple_stmt_iterator i;
743 gimple_seq stmts = NULL;
745 stmt = gsi_stmt (*si_p);
747 gcc_assert (is_gimple_call (stmt));
749 if (valid_gimple_call_p (expr))
751 /* The call has simplified to another call. */
752 tree fn = CALL_EXPR_FN (expr);
753 unsigned i;
754 unsigned nargs = call_expr_nargs (expr);
755 vec<tree> args = vNULL;
756 gcall *new_stmt;
758 if (nargs > 0)
760 args.create (nargs);
761 args.safe_grow_cleared (nargs, true);
763 for (i = 0; i < nargs; i++)
764 args[i] = CALL_EXPR_ARG (expr, i);
767 new_stmt = gimple_build_call_vec (fn, args);
768 finish_update_gimple_call (si_p, new_stmt, stmt);
769 args.release ();
770 return;
773 lhs = gimple_call_lhs (stmt);
774 if (lhs == NULL_TREE)
776 push_gimplify_context (gimple_in_ssa_p (cfun));
777 gimplify_and_add (expr, &stmts);
778 pop_gimplify_context (NULL);
780 /* We can end up with folding a memcpy of an empty class assignment
781 which gets optimized away by C++ gimplification. */
782 if (gimple_seq_empty_p (stmts))
784 if (gimple_in_ssa_p (cfun))
786 unlink_stmt_vdef (stmt);
787 release_defs (stmt);
789 gsi_replace (si_p, gimple_build_nop (), false);
790 return;
793 else
795 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
796 new_stmt = gimple_build_assign (lhs, tmp);
797 i = gsi_last (stmts);
798 gsi_insert_after_without_update (&i, new_stmt,
799 GSI_CONTINUE_LINKING);
802 gsi_replace_with_seq_vops (si_p, stmts);
806 /* Replace the call at *GSI with the gimple value VAL. */
808 void
809 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
811 gimple *stmt = gsi_stmt (*gsi);
812 tree lhs = gimple_call_lhs (stmt);
813 gimple *repl;
814 if (lhs)
816 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
817 val = fold_convert (TREE_TYPE (lhs), val);
818 repl = gimple_build_assign (lhs, val);
820 else
821 repl = gimple_build_nop ();
822 tree vdef = gimple_vdef (stmt);
823 if (vdef && TREE_CODE (vdef) == SSA_NAME)
825 unlink_stmt_vdef (stmt);
826 release_ssa_name (vdef);
828 gsi_replace (gsi, repl, false);
831 /* Replace the call at *GSI with the new call REPL and fold that
832 again. */
834 static void
835 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
837 gimple *stmt = gsi_stmt (*gsi);
838 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
839 gimple_set_location (repl, gimple_location (stmt));
840 gimple_move_vops (repl, stmt);
841 gsi_replace (gsi, repl, false);
842 fold_stmt (gsi);
845 /* Return true if VAR is a VAR_DECL or a component thereof. */
847 static bool
848 var_decl_component_p (tree var)
850 tree inner = var;
851 while (handled_component_p (inner))
852 inner = TREE_OPERAND (inner, 0);
853 return (DECL_P (inner)
854 || (TREE_CODE (inner) == MEM_REF
855 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
858 /* Return TRUE if the SIZE argument, representing the size of an
859 object, is in a range of values of which exactly zero is valid. */
861 static bool
862 size_must_be_zero_p (tree size)
864 if (integer_zerop (size))
865 return true;
867 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
868 return false;
870 tree type = TREE_TYPE (size);
871 int prec = TYPE_PRECISION (type);
873 /* Compute the value of SSIZE_MAX, the largest positive value that
874 can be stored in ssize_t, the signed counterpart of size_t. */
875 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
876 wide_int zero = wi::zero (TYPE_PRECISION (type));
877 value_range valid_range (type, zero, ssize_max);
878 value_range vr;
879 get_range_query (cfun)->range_of_expr (vr, size);
881 if (vr.undefined_p ())
882 vr.set_varying (TREE_TYPE (size));
883 vr.intersect (valid_range);
884 return vr.zero_p ();
887 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
888 diagnose (otherwise undefined) overlapping copies without preventing
889 folding. When folded, GCC guarantees that overlapping memcpy has
890 the same semantics as memmove. Call to the library memcpy need not
891 provide the same guarantee. Return false if no simplification can
892 be made. */
894 static bool
895 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
896 tree dest, tree src, enum built_in_function code)
898 gimple *stmt = gsi_stmt (*gsi);
899 tree lhs = gimple_call_lhs (stmt);
900 tree len = gimple_call_arg (stmt, 2);
901 location_t loc = gimple_location (stmt);
903 /* If the LEN parameter is a constant zero or in range where
904 the only valid value is zero, return DEST. */
905 if (size_must_be_zero_p (len))
907 gimple *repl;
908 if (gimple_call_lhs (stmt))
909 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
910 else
911 repl = gimple_build_nop ();
912 tree vdef = gimple_vdef (stmt);
913 if (vdef && TREE_CODE (vdef) == SSA_NAME)
915 unlink_stmt_vdef (stmt);
916 release_ssa_name (vdef);
918 gsi_replace (gsi, repl, false);
919 return true;
922 /* If SRC and DEST are the same (and not volatile), return
923 DEST{,+LEN,+LEN-1}. */
924 if (operand_equal_p (src, dest, 0))
926 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
927 It's safe and may even be emitted by GCC itself (see bug
928 32667). */
929 unlink_stmt_vdef (stmt);
930 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
931 release_ssa_name (gimple_vdef (stmt));
932 if (!lhs)
934 gsi_replace (gsi, gimple_build_nop (), false);
935 return true;
937 goto done;
939 else
941 /* We cannot (easily) change the type of the copy if it is a storage
942 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
943 modify the storage order of objects (see storage_order_barrier_p). */
944 tree srctype
945 = POINTER_TYPE_P (TREE_TYPE (src))
946 ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
947 tree desttype
948 = POINTER_TYPE_P (TREE_TYPE (dest))
949 ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
950 tree destvar, srcvar, srcoff;
951 unsigned int src_align, dest_align;
952 unsigned HOST_WIDE_INT tmp_len;
953 const char *tmp_str;
955 /* Build accesses at offset zero with a ref-all character type. */
956 tree off0
957 = build_int_cst (build_pointer_type_for_mode (char_type_node,
958 ptr_mode, true), 0);
960 /* If we can perform the copy efficiently with first doing all loads
961 and then all stores inline it that way. Currently efficiently
962 means that we can load all the memory into a single integer
963 register which is what MOVE_MAX gives us. */
964 src_align = get_pointer_alignment (src);
965 dest_align = get_pointer_alignment (dest);
966 if (tree_fits_uhwi_p (len)
967 && compare_tree_int (len, MOVE_MAX) <= 0
968 /* FIXME: Don't transform copies from strings with known length.
969 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
970 from being handled, and the case was XFAILed for that reason.
971 Now that it is handled and the XFAIL removed, as soon as other
972 strlenopt tests that rely on it for passing are adjusted, this
973 hack can be removed. */
974 && !c_strlen (src, 1)
975 && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
976 && memchr (tmp_str, 0, tmp_len) == NULL)
977 && !(srctype
978 && AGGREGATE_TYPE_P (srctype)
979 && TYPE_REVERSE_STORAGE_ORDER (srctype))
980 && !(desttype
981 && AGGREGATE_TYPE_P (desttype)
982 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
984 unsigned ilen = tree_to_uhwi (len);
985 if (pow2p_hwi (ilen))
987 /* Detect out-of-bounds accesses without issuing warnings.
988 Avoid folding out-of-bounds copies but to avoid false
989 positives for unreachable code defer warning until after
990 DCE has worked its magic.
991 -Wrestrict is still diagnosed. */
992 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
993 dest, src, len, len,
994 false, false))
995 if (warning != OPT_Wrestrict)
996 return false;
998 scalar_int_mode mode;
999 if (int_mode_for_size (ilen * 8, 0).exists (&mode)
1000 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
1001 /* If the destination pointer is not aligned we must be able
1002 to emit an unaligned store. */
1003 && (dest_align >= GET_MODE_ALIGNMENT (mode)
1004 || !targetm.slow_unaligned_access (mode, dest_align)
1005 || (optab_handler (movmisalign_optab, mode)
1006 != CODE_FOR_nothing)))
1008 tree type = build_nonstandard_integer_type (ilen * 8, 1);
1009 tree srctype = type;
1010 tree desttype = type;
1011 if (src_align < GET_MODE_ALIGNMENT (mode))
1012 srctype = build_aligned_type (type, src_align);
1013 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
1014 tree tem = fold_const_aggregate_ref (srcmem);
1015 if (tem)
1016 srcmem = tem;
1017 else if (src_align < GET_MODE_ALIGNMENT (mode)
1018 && targetm.slow_unaligned_access (mode, src_align)
1019 && (optab_handler (movmisalign_optab, mode)
1020 == CODE_FOR_nothing))
1021 srcmem = NULL_TREE;
1022 if (srcmem)
1024 gimple *new_stmt;
1025 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
1027 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
1028 srcmem
1029 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
1030 new_stmt);
1031 gimple_assign_set_lhs (new_stmt, srcmem);
1032 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1033 gimple_set_location (new_stmt, loc);
1034 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1036 if (dest_align < GET_MODE_ALIGNMENT (mode))
1037 desttype = build_aligned_type (type, dest_align);
1038 new_stmt
1039 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
1040 dest, off0),
1041 srcmem);
1042 gimple_move_vops (new_stmt, stmt);
1043 if (!lhs)
1045 gsi_replace (gsi, new_stmt, false);
1046 return true;
1048 gimple_set_location (new_stmt, loc);
1049 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1050 goto done;
1056 if (code == BUILT_IN_MEMMOVE)
1058 /* Both DEST and SRC must be pointer types.
1059 ??? This is what old code did. Is the testing for pointer types
1060 really mandatory?
1062 If either SRC is readonly or length is 1, we can use memcpy. */
1063 if (!dest_align || !src_align)
1064 return false;
1065 if (readonly_data_expr (src)
1066 || (tree_fits_uhwi_p (len)
1067 && (MIN (src_align, dest_align) / BITS_PER_UNIT
1068 >= tree_to_uhwi (len))))
1070 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1071 if (!fn)
1072 return false;
1073 gimple_call_set_fndecl (stmt, fn);
1074 gimple_call_set_arg (stmt, 0, dest);
1075 gimple_call_set_arg (stmt, 1, src);
1076 fold_stmt (gsi);
1077 return true;
1080 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1081 if (TREE_CODE (src) == ADDR_EXPR
1082 && TREE_CODE (dest) == ADDR_EXPR)
1084 tree src_base, dest_base, fn;
1085 poly_int64 src_offset = 0, dest_offset = 0;
1086 poly_uint64 maxsize;
1088 srcvar = TREE_OPERAND (src, 0);
1089 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
1090 if (src_base == NULL)
1091 src_base = srcvar;
1092 destvar = TREE_OPERAND (dest, 0);
1093 dest_base = get_addr_base_and_unit_offset (destvar,
1094 &dest_offset);
1095 if (dest_base == NULL)
1096 dest_base = destvar;
1097 if (!poly_int_tree_p (len, &maxsize))
1098 maxsize = -1;
1099 if (SSA_VAR_P (src_base)
1100 && SSA_VAR_P (dest_base))
1102 if (operand_equal_p (src_base, dest_base, 0)
1103 && ranges_maybe_overlap_p (src_offset, maxsize,
1104 dest_offset, maxsize))
1105 return false;
1107 else if (TREE_CODE (src_base) == MEM_REF
1108 && TREE_CODE (dest_base) == MEM_REF)
1110 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
1111 TREE_OPERAND (dest_base, 0), 0))
1112 return false;
1113 poly_offset_int full_src_offset
1114 = mem_ref_offset (src_base) + src_offset;
1115 poly_offset_int full_dest_offset
1116 = mem_ref_offset (dest_base) + dest_offset;
1117 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
1118 full_dest_offset, maxsize))
1119 return false;
1121 else
1122 return false;
1124 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1125 if (!fn)
1126 return false;
1127 gimple_call_set_fndecl (stmt, fn);
1128 gimple_call_set_arg (stmt, 0, dest);
1129 gimple_call_set_arg (stmt, 1, src);
1130 fold_stmt (gsi);
1131 return true;
1134 /* If the destination and source do not alias optimize into
1135 memcpy as well. */
1136 if ((is_gimple_min_invariant (dest)
1137 || TREE_CODE (dest) == SSA_NAME)
1138 && (is_gimple_min_invariant (src)
1139 || TREE_CODE (src) == SSA_NAME))
1141 ao_ref destr, srcr;
1142 ao_ref_init_from_ptr_and_size (&destr, dest, len);
1143 ao_ref_init_from_ptr_and_size (&srcr, src, len);
1144 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1146 tree fn;
1147 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1148 if (!fn)
1149 return false;
1150 gimple_call_set_fndecl (stmt, fn);
1151 gimple_call_set_arg (stmt, 0, dest);
1152 gimple_call_set_arg (stmt, 1, src);
1153 fold_stmt (gsi);
1154 return true;
1158 return false;
1161 if (!tree_fits_shwi_p (len))
1162 return false;
1163 if (!srctype
1164 || (AGGREGATE_TYPE_P (srctype)
1165 && TYPE_REVERSE_STORAGE_ORDER (srctype)))
1166 return false;
1167 if (!desttype
1168 || (AGGREGATE_TYPE_P (desttype)
1169 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
1170 return false;
1171 /* In the following try to find a type that is most natural to be
1172 used for the memcpy source and destination and that allows
1173 the most optimization when memcpy is turned into a plain assignment
1174 using that type. In theory we could always use a char[len] type
1175 but that only gains us that the destination and source possibly
1176 no longer will have their address taken. */
1177 if (TREE_CODE (srctype) == ARRAY_TYPE
1178 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1179 srctype = TREE_TYPE (srctype);
1180 if (TREE_CODE (desttype) == ARRAY_TYPE
1181 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1182 desttype = TREE_TYPE (desttype);
1183 if (TREE_ADDRESSABLE (srctype)
1184 || TREE_ADDRESSABLE (desttype))
1185 return false;
1187 /* Make sure we are not copying using a floating-point mode or
1188 a type whose size possibly does not match its precision. */
1189 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1190 || TREE_CODE (desttype) == BOOLEAN_TYPE
1191 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1192 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1193 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1194 || TREE_CODE (srctype) == BOOLEAN_TYPE
1195 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1196 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1197 if (!srctype)
1198 srctype = desttype;
1199 if (!desttype)
1200 desttype = srctype;
1201 if (!srctype)
1202 return false;
1204 src_align = get_pointer_alignment (src);
1205 dest_align = get_pointer_alignment (dest);
1207 /* Choose between src and destination type for the access based
1208 on alignment, whether the access constitutes a register access
1209 and whether it may actually expose a declaration for SSA rewrite
1210 or SRA decomposition. Also try to expose a string constant, we
1211 might be able to concatenate several of them later into a single
1212 string store. */
1213 destvar = NULL_TREE;
1214 srcvar = NULL_TREE;
1215 if (TREE_CODE (dest) == ADDR_EXPR
1216 && var_decl_component_p (TREE_OPERAND (dest, 0))
1217 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1218 && dest_align >= TYPE_ALIGN (desttype)
1219 && (is_gimple_reg_type (desttype)
1220 || src_align >= TYPE_ALIGN (desttype)))
1221 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1222 else if (TREE_CODE (src) == ADDR_EXPR
1223 && var_decl_component_p (TREE_OPERAND (src, 0))
1224 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1225 && src_align >= TYPE_ALIGN (srctype)
1226 && (is_gimple_reg_type (srctype)
1227 || dest_align >= TYPE_ALIGN (srctype)))
1228 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1229 /* FIXME: Don't transform copies from strings with known original length.
1230 As soon as strlenopt tests that rely on it for passing are adjusted,
1231 this hack can be removed. */
1232 else if (gimple_call_alloca_for_var_p (stmt)
1233 && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1234 && integer_zerop (srcoff)
1235 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1236 && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1237 srctype = TREE_TYPE (srcvar);
1238 else
1239 return false;
1241 /* Now that we chose an access type express the other side in
1242 terms of it if the target allows that with respect to alignment
1243 constraints. */
1244 if (srcvar == NULL_TREE)
1246 if (src_align >= TYPE_ALIGN (desttype))
1247 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1248 else
1250 enum machine_mode mode = TYPE_MODE (desttype);
1251 if ((mode == BLKmode && STRICT_ALIGNMENT)
1252 || (targetm.slow_unaligned_access (mode, src_align)
1253 && (optab_handler (movmisalign_optab, mode)
1254 == CODE_FOR_nothing)))
1255 return false;
1256 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1257 src_align);
1258 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1261 else if (destvar == NULL_TREE)
1263 if (dest_align >= TYPE_ALIGN (srctype))
1264 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1265 else
1267 enum machine_mode mode = TYPE_MODE (srctype);
1268 if ((mode == BLKmode && STRICT_ALIGNMENT)
1269 || (targetm.slow_unaligned_access (mode, dest_align)
1270 && (optab_handler (movmisalign_optab, mode)
1271 == CODE_FOR_nothing)))
1272 return false;
1273 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1274 dest_align);
1275 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1279 /* Same as above, detect out-of-bounds accesses without issuing
1280 warnings. Avoid folding out-of-bounds copies but to avoid
1281 false positives for unreachable code defer warning until
1282 after DCE has worked its magic.
1283 -Wrestrict is still diagnosed. */
1284 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1285 dest, src, len, len,
1286 false, false))
1287 if (warning != OPT_Wrestrict)
1288 return false;
1290 gimple *new_stmt;
1291 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1293 tree tem = fold_const_aggregate_ref (srcvar);
1294 if (tem)
1295 srcvar = tem;
1296 if (! is_gimple_min_invariant (srcvar))
1298 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1299 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1300 new_stmt);
1301 gimple_assign_set_lhs (new_stmt, srcvar);
1302 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1303 gimple_set_location (new_stmt, loc);
1304 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1306 new_stmt = gimple_build_assign (destvar, srcvar);
1307 goto set_vop_and_replace;
1310 /* We get an aggregate copy. If the source is a STRING_CST, then
1311 directly use its type to perform the copy. */
1312 if (TREE_CODE (srcvar) == STRING_CST)
1313 desttype = srctype;
1315 /* Or else, use an unsigned char[] type to perform the copy in order
1316 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1317 types or float modes behavior on copying. */
1318 else
1320 desttype = build_array_type_nelts (unsigned_char_type_node,
1321 tree_to_uhwi (len));
1322 srctype = desttype;
1323 if (src_align > TYPE_ALIGN (srctype))
1324 srctype = build_aligned_type (srctype, src_align);
1325 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1328 if (dest_align > TYPE_ALIGN (desttype))
1329 desttype = build_aligned_type (desttype, dest_align);
1330 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1331 new_stmt = gimple_build_assign (destvar, srcvar);
1333 set_vop_and_replace:
1334 gimple_move_vops (new_stmt, stmt);
1335 if (!lhs)
1337 gsi_replace (gsi, new_stmt, false);
1338 return true;
1340 gimple_set_location (new_stmt, loc);
1341 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1344 done:
1345 gimple_seq stmts = NULL;
1346 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1347 len = NULL_TREE;
1348 else if (code == BUILT_IN_MEMPCPY)
1350 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1351 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1352 TREE_TYPE (dest), dest, len);
1354 else
1355 gcc_unreachable ();
1357 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1358 gimple *repl = gimple_build_assign (lhs, dest);
1359 gsi_replace (gsi, repl, false);
1360 return true;
1363 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1364 to built-in memcmp (a, b, len). */
1366 static bool
1367 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1369 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1371 if (!fn)
1372 return false;
1374 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1376 gimple *stmt = gsi_stmt (*gsi);
1377 tree a = gimple_call_arg (stmt, 0);
1378 tree b = gimple_call_arg (stmt, 1);
1379 tree len = gimple_call_arg (stmt, 2);
1381 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1382 replace_call_with_call_and_fold (gsi, repl);
1384 return true;
1387 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1388 to built-in memmove (dest, src, len). */
1390 static bool
1391 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1393 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1395 if (!fn)
1396 return false;
1398 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1399 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1400 len) into memmove (dest, src, len). */
1402 gimple *stmt = gsi_stmt (*gsi);
1403 tree src = gimple_call_arg (stmt, 0);
1404 tree dest = gimple_call_arg (stmt, 1);
1405 tree len = gimple_call_arg (stmt, 2);
1407 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1408 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1409 replace_call_with_call_and_fold (gsi, repl);
1411 return true;
1414 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1415 to built-in memset (dest, 0, len). */
1417 static bool
1418 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1420 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1422 if (!fn)
1423 return false;
1425 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1427 gimple *stmt = gsi_stmt (*gsi);
1428 tree dest = gimple_call_arg (stmt, 0);
1429 tree len = gimple_call_arg (stmt, 1);
1431 gimple_seq seq = NULL;
1432 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1433 gimple_seq_add_stmt_without_update (&seq, repl);
1434 gsi_replace_with_seq_vops (gsi, seq);
1435 fold_stmt (gsi);
1437 return true;
1440 /* Fold function call to builtin memset or bzero at *GSI setting the
1441 memory of size LEN to VAL. Return whether a simplification was made. */
1443 static bool
1444 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1446 gimple *stmt = gsi_stmt (*gsi);
1447 tree etype;
1448 unsigned HOST_WIDE_INT length, cval;
1450 /* If the LEN parameter is zero, return DEST. */
1451 if (integer_zerop (len))
1453 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1454 return true;
1457 if (! tree_fits_uhwi_p (len))
1458 return false;
1460 if (TREE_CODE (c) != INTEGER_CST)
1461 return false;
1463 tree dest = gimple_call_arg (stmt, 0);
1464 tree var = dest;
1465 if (TREE_CODE (var) != ADDR_EXPR)
1466 return false;
1468 var = TREE_OPERAND (var, 0);
1469 if (TREE_THIS_VOLATILE (var))
1470 return false;
1472 etype = TREE_TYPE (var);
1473 if (TREE_CODE (etype) == ARRAY_TYPE)
1474 etype = TREE_TYPE (etype);
1476 if ((!INTEGRAL_TYPE_P (etype)
1477 && !POINTER_TYPE_P (etype))
1478 || TREE_CODE (etype) == BITINT_TYPE)
1479 return NULL_TREE;
1481 if (! var_decl_component_p (var))
1482 return NULL_TREE;
1484 length = tree_to_uhwi (len);
1485 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1486 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1487 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
1488 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1489 return NULL_TREE;
1491 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1492 return NULL_TREE;
1494 if (!type_has_mode_precision_p (etype))
1495 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1496 TYPE_UNSIGNED (etype));
1498 if (integer_zerop (c))
1499 cval = 0;
1500 else
1502 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1503 return NULL_TREE;
1505 cval = TREE_INT_CST_LOW (c);
1506 cval &= 0xff;
1507 cval |= cval << 8;
1508 cval |= cval << 16;
1509 cval |= (cval << 31) << 1;
1512 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1513 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1514 gimple_move_vops (store, stmt);
1515 gimple_set_location (store, gimple_location (stmt));
1516 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1517 if (gimple_call_lhs (stmt))
1519 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1520 gsi_replace (gsi, asgn, false);
1522 else
1524 gimple_stmt_iterator gsi2 = *gsi;
1525 gsi_prev (gsi);
1526 gsi_remove (&gsi2, true);
1529 return true;
1532 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1534 static bool
1535 get_range_strlen_tree (tree arg, bitmap visited, strlen_range_kind rkind,
1536 c_strlen_data *pdata, unsigned eltsize)
1538 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1540 /* The length computed by this invocation of the function. */
1541 tree val = NULL_TREE;
1543 /* True if VAL is an optimistic (tight) bound determined from
1544 the size of the character array in which the string may be
1545 stored. In that case, the computed VAL is used to set
1546 PDATA->MAXBOUND. */
1547 bool tight_bound = false;
1549 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1550 if (TREE_CODE (arg) == ADDR_EXPR
1551 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1553 tree op = TREE_OPERAND (arg, 0);
1554 if (integer_zerop (TREE_OPERAND (op, 1)))
1556 tree aop0 = TREE_OPERAND (op, 0);
1557 if (TREE_CODE (aop0) == INDIRECT_REF
1558 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1559 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1560 pdata, eltsize);
1562 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1563 && rkind == SRK_LENRANGE)
1565 /* Fail if an array is the last member of a struct object
1566 since it could be treated as a (fake) flexible array
1567 member. */
1568 tree idx = TREE_OPERAND (op, 1);
1570 arg = TREE_OPERAND (op, 0);
1571 tree optype = TREE_TYPE (arg);
1572 if (tree dom = TYPE_DOMAIN (optype))
1573 if (tree bound = TYPE_MAX_VALUE (dom))
1574 if (TREE_CODE (bound) == INTEGER_CST
1575 && TREE_CODE (idx) == INTEGER_CST
1576 && tree_int_cst_lt (bound, idx))
1577 return false;
1581 if (rkind == SRK_INT_VALUE)
1583 /* We are computing the maximum value (not string length). */
1584 val = arg;
1585 if (TREE_CODE (val) != INTEGER_CST
1586 || tree_int_cst_sgn (val) < 0)
1587 return false;
1589 else
1591 c_strlen_data lendata = { };
1592 val = c_strlen (arg, 1, &lendata, eltsize);
1594 if (!val && lendata.decl)
1596 /* ARG refers to an unterminated const character array.
1597 DATA.DECL with size DATA.LEN. */
1598 val = lendata.minlen;
1599 pdata->decl = lendata.decl;
1603 /* Set if VAL represents the maximum length based on array size (set
1604 when exact length cannot be determined). */
1605 bool maxbound = false;
1607 if (!val && rkind == SRK_LENRANGE)
1609 if (TREE_CODE (arg) == ADDR_EXPR)
1610 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1611 pdata, eltsize);
1613 if (TREE_CODE (arg) == ARRAY_REF)
1615 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1617 /* Determine the "innermost" array type. */
1618 while (TREE_CODE (optype) == ARRAY_TYPE
1619 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1620 optype = TREE_TYPE (optype);
1622 /* Avoid arrays of pointers. */
1623 tree eltype = TREE_TYPE (optype);
1624 if (TREE_CODE (optype) != ARRAY_TYPE
1625 || !INTEGRAL_TYPE_P (eltype))
1626 return false;
1628 /* Fail when the array bound is unknown or zero. */
1629 val = TYPE_SIZE_UNIT (optype);
1630 if (!val
1631 || TREE_CODE (val) != INTEGER_CST
1632 || integer_zerop (val))
1633 return false;
1635 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1636 integer_one_node);
1638 /* Set the minimum size to zero since the string in
1639 the array could have zero length. */
1640 pdata->minlen = ssize_int (0);
1642 tight_bound = true;
1644 else if (TREE_CODE (arg) == COMPONENT_REF
1645 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1646 == ARRAY_TYPE))
1648 /* Use the type of the member array to determine the upper
1649 bound on the length of the array. This may be overly
1650 optimistic if the array itself isn't NUL-terminated and
1651 the caller relies on the subsequent member to contain
1652 the NUL but that would only be considered valid if
1653 the array were the last member of a struct. */
1655 tree fld = TREE_OPERAND (arg, 1);
1657 tree optype = TREE_TYPE (fld);
1659 /* Determine the "innermost" array type. */
1660 while (TREE_CODE (optype) == ARRAY_TYPE
1661 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1662 optype = TREE_TYPE (optype);
1664 /* Fail when the array bound is unknown or zero. */
1665 val = TYPE_SIZE_UNIT (optype);
1666 if (!val
1667 || TREE_CODE (val) != INTEGER_CST
1668 || integer_zerop (val))
1669 return false;
1670 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1671 integer_one_node);
1673 /* Set the minimum size to zero since the string in
1674 the array could have zero length. */
1675 pdata->minlen = ssize_int (0);
1677 /* The array size determined above is an optimistic bound
1678 on the length. If the array isn't nul-terminated the
1679 length computed by the library function would be greater.
1680 Even though using strlen to cross the subobject boundary
1681 is undefined, avoid drawing conclusions from the member
1682 type about the length here. */
1683 tight_bound = true;
1685 else if (TREE_CODE (arg) == MEM_REF
1686 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1687 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1688 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1690 /* Handle a MEM_REF into a DECL accessing an array of integers,
1691 being conservative about references to extern structures with
1692 flexible array members that can be initialized to arbitrary
1693 numbers of elements as an extension (static structs are okay). */
1694 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1695 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1696 && (decl_binds_to_current_def_p (ref)
1697 || !array_ref_flexible_size_p (arg)))
1699 /* Fail if the offset is out of bounds. Such accesses
1700 should be diagnosed at some point. */
1701 val = DECL_SIZE_UNIT (ref);
1702 if (!val
1703 || TREE_CODE (val) != INTEGER_CST
1704 || integer_zerop (val))
1705 return false;
1707 poly_offset_int psiz = wi::to_offset (val);
1708 poly_offset_int poff = mem_ref_offset (arg);
1709 if (known_le (psiz, poff))
1710 return false;
1712 pdata->minlen = ssize_int (0);
1714 /* Subtract the offset and one for the terminating nul. */
1715 psiz -= poff;
1716 psiz -= 1;
1717 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1718 /* Since VAL reflects the size of a declared object
1719 rather the type of the access it is not a tight bound. */
1722 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
1724 /* Avoid handling pointers to arrays. GCC might misuse
1725 a pointer to an array of one bound to point to an array
1726 object of a greater bound. */
1727 tree argtype = TREE_TYPE (arg);
1728 if (TREE_CODE (argtype) == ARRAY_TYPE)
1730 val = TYPE_SIZE_UNIT (argtype);
1731 if (!val
1732 || TREE_CODE (val) != INTEGER_CST
1733 || integer_zerop (val))
1734 return false;
1735 val = wide_int_to_tree (TREE_TYPE (val),
1736 wi::sub (wi::to_wide (val), 1));
1738 /* Set the minimum size to zero since the string in
1739 the array could have zero length. */
1740 pdata->minlen = ssize_int (0);
1743 maxbound = true;
1746 if (!val)
1747 return false;
1749 /* Adjust the lower bound on the string length as necessary. */
1750 if (!pdata->minlen
1751 || (rkind != SRK_STRLEN
1752 && TREE_CODE (pdata->minlen) == INTEGER_CST
1753 && TREE_CODE (val) == INTEGER_CST
1754 && tree_int_cst_lt (val, pdata->minlen)))
1755 pdata->minlen = val;
1757 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1759 /* Adjust the tighter (more optimistic) string length bound
1760 if necessary and proceed to adjust the more conservative
1761 bound. */
1762 if (TREE_CODE (val) == INTEGER_CST)
1764 if (tree_int_cst_lt (pdata->maxbound, val))
1765 pdata->maxbound = val;
1767 else
1768 pdata->maxbound = val;
1770 else if (pdata->maxbound || maxbound)
1771 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1772 if VAL corresponds to the maximum length determined based
1773 on the type of the object. */
1774 pdata->maxbound = val;
1776 if (tight_bound)
1778 /* VAL computed above represents an optimistically tight bound
1779 on the length of the string based on the referenced object's
1780 or subobject's type. Determine the conservative upper bound
1781 based on the enclosing object's size if possible. */
1782 if (rkind == SRK_LENRANGE)
1784 poly_int64 offset;
1785 tree base = get_addr_base_and_unit_offset (arg, &offset);
1786 if (!base)
1788 /* When the call above fails due to a non-constant offset
1789 assume the offset is zero and use the size of the whole
1790 enclosing object instead. */
1791 base = get_base_address (arg);
1792 offset = 0;
1794 /* If the base object is a pointer no upper bound on the length
1795 can be determined. Otherwise the maximum length is equal to
1796 the size of the enclosing object minus the offset of
1797 the referenced subobject minus 1 (for the terminating nul). */
1798 tree type = TREE_TYPE (base);
1799 if (TREE_CODE (type) == POINTER_TYPE
1800 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1801 || !(val = DECL_SIZE_UNIT (base)))
1802 val = build_all_ones_cst (size_type_node);
1803 else
1805 val = DECL_SIZE_UNIT (base);
1806 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1807 size_int (offset + 1));
1810 else
1811 return false;
1814 if (pdata->maxlen)
1816 /* Adjust the more conservative bound if possible/necessary
1817 and fail otherwise. */
1818 if (rkind != SRK_STRLEN)
1820 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1821 || TREE_CODE (val) != INTEGER_CST)
1822 return false;
1824 if (tree_int_cst_lt (pdata->maxlen, val))
1825 pdata->maxlen = val;
1826 return true;
1828 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1830 /* Fail if the length of this ARG is different from that
1831 previously determined from another ARG. */
1832 return false;
1836 pdata->maxlen = val;
1837 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1840 /* For an ARG referencing one or more strings, try to obtain the range
1841 of their lengths, or the size of the largest array ARG referes to if
1842 the range of lengths cannot be determined, and store all in *PDATA.
1843 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1844 the maximum constant value.
1845 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1846 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1847 length or if we are unable to determine the length, return false.
1848 VISITED is a bitmap of visited variables.
1849 RKIND determines the kind of value or range to obtain (see
1850 strlen_range_kind).
1851 Set PDATA->DECL if ARG refers to an unterminated constant array.
1852 On input, set ELTSIZE to 1 for normal single byte character strings,
1853 and either 2 or 4 for wide characer strings (the size of wchar_t).
1854 Return true if *PDATA was successfully populated and false otherwise. */
1856 static bool
1857 get_range_strlen (tree arg, bitmap visited,
1858 strlen_range_kind rkind,
1859 c_strlen_data *pdata, unsigned eltsize)
1862 if (TREE_CODE (arg) != SSA_NAME)
1863 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1865 /* If ARG is registered for SSA update we cannot look at its defining
1866 statement. */
1867 if (name_registered_for_update_p (arg))
1868 return false;
1870 /* If we were already here, break the infinite cycle. */
1871 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1872 return true;
1874 tree var = arg;
1875 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1877 switch (gimple_code (def_stmt))
1879 case GIMPLE_ASSIGN:
1880 /* The RHS of the statement defining VAR must either have a
1881 constant length or come from another SSA_NAME with a constant
1882 length. */
1883 if (gimple_assign_single_p (def_stmt)
1884 || gimple_assign_unary_nop_p (def_stmt))
1886 tree rhs = gimple_assign_rhs1 (def_stmt);
1887 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1889 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1891 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1892 gimple_assign_rhs3 (def_stmt) };
1894 for (unsigned int i = 0; i < 2; i++)
1895 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1897 if (rkind != SRK_LENRANGE)
1898 return false;
1899 /* Set the upper bound to the maximum to prevent
1900 it from being adjusted in the next iteration but
1901 leave MINLEN and the more conservative MAXBOUND
1902 determined so far alone (or leave them null if
1903 they haven't been set yet). That the MINLEN is
1904 in fact zero can be determined from MAXLEN being
1905 unbounded but the discovered minimum is used for
1906 diagnostics. */
1907 pdata->maxlen = build_all_ones_cst (size_type_node);
1909 return true;
1911 return false;
1913 case GIMPLE_PHI:
1914 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1915 must have a constant length. */
1916 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1918 tree arg = gimple_phi_arg (def_stmt, i)->def;
1920 /* If this PHI has itself as an argument, we cannot
1921 determine the string length of this argument. However,
1922 if we can find a constant string length for the other
1923 PHI args then we can still be sure that this is a
1924 constant string length. So be optimistic and just
1925 continue with the next argument. */
1926 if (arg == gimple_phi_result (def_stmt))
1927 continue;
1929 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1931 if (rkind != SRK_LENRANGE)
1932 return false;
1933 /* Set the upper bound to the maximum to prevent
1934 it from being adjusted in the next iteration but
1935 leave MINLEN and the more conservative MAXBOUND
1936 determined so far alone (or leave them null if
1937 they haven't been set yet). That the MINLEN is
1938 in fact zero can be determined from MAXLEN being
1939 unbounded but the discovered minimum is used for
1940 diagnostics. */
1941 pdata->maxlen = build_all_ones_cst (size_type_node);
1944 return true;
1946 default:
1947 return false;
1951 /* Try to obtain the range of the lengths of the string(s) referenced
1952 by ARG, or the size of the largest array ARG refers to if the range
1953 of lengths cannot be determined, and store all in *PDATA which must
1954 be zero-initialized on input except PDATA->MAXBOUND may be set to
1955 a non-null tree node other than INTEGER_CST to request to have it
1956 set to the length of the longest string in a PHI. ELTSIZE is
1957 the expected size of the string element in bytes: 1 for char and
1958 some power of 2 for wide characters.
1959 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1960 for optimization. Returning false means that a nonzero PDATA->MINLEN
1961 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1962 is -1 (in that case, the actual range is indeterminate, i.e.,
1963 [0, PTRDIFF_MAX - 2]. */
1965 bool
1966 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1968 auto_bitmap visited;
1969 tree maxbound = pdata->maxbound;
1971 if (!get_range_strlen (arg, visited, SRK_LENRANGE, pdata, eltsize))
1973 /* On failure extend the length range to an impossible maximum
1974 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1975 members can stay unchanged regardless. */
1976 pdata->minlen = ssize_int (0);
1977 pdata->maxlen = build_all_ones_cst (size_type_node);
1979 else if (!pdata->minlen)
1980 pdata->minlen = ssize_int (0);
1982 /* If it's unchanged from it initial non-null value, set the conservative
1983 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1984 if (maxbound && pdata->maxbound == maxbound)
1985 pdata->maxbound = build_all_ones_cst (size_type_node);
1987 return !integer_all_onesp (pdata->maxlen);
1990 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1991 For ARG of pointer types, NONSTR indicates if the caller is prepared
1992 to handle unterminated strings. For integer ARG and when RKIND ==
1993 SRK_INT_VALUE, NONSTR must be null.
1995 If an unterminated array is discovered and our caller handles
1996 unterminated arrays, then bubble up the offending DECL and
1997 return the maximum size. Otherwise return NULL. */
1999 static tree
2000 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
2002 /* A non-null NONSTR is meaningless when determining the maximum
2003 value of an integer ARG. */
2004 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
2005 /* ARG must have an integral type when RKIND says so. */
2006 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
2008 auto_bitmap visited;
2010 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2011 is unbounded. */
2012 c_strlen_data lendata = { };
2013 if (!get_range_strlen (arg, visited, rkind, &lendata, /* eltsize = */1))
2014 lendata.maxlen = NULL_TREE;
2015 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
2016 lendata.maxlen = NULL_TREE;
2018 if (nonstr)
2020 /* For callers prepared to handle unterminated arrays set
2021 *NONSTR to point to the declaration of the array and return
2022 the maximum length/size. */
2023 *nonstr = lendata.decl;
2024 return lendata.maxlen;
2027 /* Fail if the constant array isn't nul-terminated. */
2028 return lendata.decl ? NULL_TREE : lendata.maxlen;
2031 /* Return true if LEN is known to be less than or equal to (or if STRICT is
2032 true, strictly less than) the lower bound of SIZE at compile time and false
2033 otherwise. */
2035 static bool
2036 known_lower (gimple *stmt, tree len, tree size, bool strict = false)
2038 if (len == NULL_TREE)
2039 return false;
2041 wide_int size_range[2];
2042 wide_int len_range[2];
2043 if (get_range (len, stmt, len_range) && get_range (size, stmt, size_range))
2045 if (strict)
2046 return wi::ltu_p (len_range[1], size_range[0]);
2047 else
2048 return wi::leu_p (len_range[1], size_range[0]);
2051 return false;
2054 /* Fold function call to builtin strcpy with arguments DEST and SRC.
2055 If LEN is not NULL, it represents the length of the string to be
2056 copied. Return NULL_TREE if no simplification can be made. */
2058 static bool
2059 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
2060 tree dest, tree src)
2062 gimple *stmt = gsi_stmt (*gsi);
2063 location_t loc = gimple_location (stmt);
2064 tree fn;
2066 /* If SRC and DEST are the same (and not volatile), return DEST. */
2067 if (operand_equal_p (src, dest, 0))
2069 /* Issue -Wrestrict unless the pointers are null (those do
2070 not point to objects and so do not indicate an overlap;
2071 such calls could be the result of sanitization and jump
2072 threading). */
2073 if (!integer_zerop (dest) && !warning_suppressed_p (stmt, OPT_Wrestrict))
2075 tree func = gimple_call_fndecl (stmt);
2077 warning_at (loc, OPT_Wrestrict,
2078 "%qD source argument is the same as destination",
2079 func);
2082 replace_call_with_value (gsi, dest);
2083 return true;
2086 if (optimize_function_for_size_p (cfun))
2087 return false;
2089 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2090 if (!fn)
2091 return false;
2093 /* Set to non-null if ARG refers to an unterminated array. */
2094 tree nonstr = NULL;
2095 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
2097 if (nonstr)
2099 /* Avoid folding calls with unterminated arrays. */
2100 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
2101 warn_string_no_nul (loc, stmt, "strcpy", src, nonstr);
2102 suppress_warning (stmt, OPT_Wstringop_overread);
2103 return false;
2106 if (!len)
2107 return false;
2109 len = fold_convert_loc (loc, size_type_node, len);
2110 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
2111 len = force_gimple_operand_gsi (gsi, len, true,
2112 NULL_TREE, true, GSI_SAME_STMT);
2113 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2114 replace_call_with_call_and_fold (gsi, repl);
2115 return true;
2118 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2119 If SLEN is not NULL, it represents the length of the source string.
2120 Return NULL_TREE if no simplification can be made. */
2122 static bool
2123 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
2124 tree dest, tree src, tree len)
2126 gimple *stmt = gsi_stmt (*gsi);
2127 location_t loc = gimple_location (stmt);
2128 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
2130 /* If the LEN parameter is zero, return DEST. */
2131 if (integer_zerop (len))
2133 /* Avoid warning if the destination refers to an array/pointer
2134 decorate with attribute nonstring. */
2135 if (!nonstring)
2137 tree fndecl = gimple_call_fndecl (stmt);
2139 /* Warn about the lack of nul termination: the result is not
2140 a (nul-terminated) string. */
2141 tree slen = get_maxval_strlen (src, SRK_STRLEN);
2142 if (slen && !integer_zerop (slen))
2143 warning_at (loc, OPT_Wstringop_truncation,
2144 "%qD destination unchanged after copying no bytes "
2145 "from a string of length %E",
2146 fndecl, slen);
2147 else
2148 warning_at (loc, OPT_Wstringop_truncation,
2149 "%qD destination unchanged after copying no bytes",
2150 fndecl);
2153 replace_call_with_value (gsi, dest);
2154 return true;
2157 /* We can't compare slen with len as constants below if len is not a
2158 constant. */
2159 if (TREE_CODE (len) != INTEGER_CST)
2160 return false;
2162 /* Now, we must be passed a constant src ptr parameter. */
2163 tree slen = get_maxval_strlen (src, SRK_STRLEN);
2164 if (!slen || TREE_CODE (slen) != INTEGER_CST)
2165 return false;
2167 /* The size of the source string including the terminating nul. */
2168 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
2170 /* We do not support simplification of this case, though we do
2171 support it when expanding trees into RTL. */
2172 /* FIXME: generate a call to __builtin_memset. */
2173 if (tree_int_cst_lt (ssize, len))
2174 return false;
2176 /* Diagnose truncation that leaves the copy unterminated. */
2177 maybe_diag_stxncpy_trunc (*gsi, src, len);
2179 /* OK transform into builtin memcpy. */
2180 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2181 if (!fn)
2182 return false;
2184 len = fold_convert_loc (loc, size_type_node, len);
2185 len = force_gimple_operand_gsi (gsi, len, true,
2186 NULL_TREE, true, GSI_SAME_STMT);
2187 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2188 replace_call_with_call_and_fold (gsi, repl);
2190 return true;
2193 /* Fold function call to builtin strchr or strrchr.
2194 If both arguments are constant, evaluate and fold the result,
2195 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2196 In general strlen is significantly faster than strchr
2197 due to being a simpler operation. */
2198 static bool
2199 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
2201 gimple *stmt = gsi_stmt (*gsi);
2202 tree str = gimple_call_arg (stmt, 0);
2203 tree c = gimple_call_arg (stmt, 1);
2204 location_t loc = gimple_location (stmt);
2205 const char *p;
2206 char ch;
2208 if (!gimple_call_lhs (stmt))
2209 return false;
2211 /* Avoid folding if the first argument is not a nul-terminated array.
2212 Defer warning until later. */
2213 if (!check_nul_terminated_array (NULL_TREE, str))
2214 return false;
2216 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
2218 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
2220 if (p1 == NULL)
2222 replace_call_with_value (gsi, integer_zero_node);
2223 return true;
2226 tree len = build_int_cst (size_type_node, p1 - p);
2227 gimple_seq stmts = NULL;
2228 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2229 POINTER_PLUS_EXPR, str, len);
2230 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2231 gsi_replace_with_seq_vops (gsi, stmts);
2232 return true;
2235 if (!integer_zerop (c))
2236 return false;
2238 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2239 if (is_strrchr && optimize_function_for_size_p (cfun))
2241 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2243 if (strchr_fn)
2245 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2246 replace_call_with_call_and_fold (gsi, repl);
2247 return true;
2250 return false;
2253 tree len;
2254 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2256 if (!strlen_fn)
2257 return false;
2259 /* Create newstr = strlen (str). */
2260 gimple_seq stmts = NULL;
2261 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2262 gimple_set_location (new_stmt, loc);
2263 len = create_tmp_reg_or_ssa_name (size_type_node);
2264 gimple_call_set_lhs (new_stmt, len);
2265 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2267 /* Create (str p+ strlen (str)). */
2268 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2269 POINTER_PLUS_EXPR, str, len);
2270 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2271 gsi_replace_with_seq_vops (gsi, stmts);
2272 /* gsi now points at the assignment to the lhs, get a
2273 stmt iterator to the strlen.
2274 ??? We can't use gsi_for_stmt as that doesn't work when the
2275 CFG isn't built yet. */
2276 gimple_stmt_iterator gsi2 = *gsi;
2277 gsi_prev (&gsi2);
2278 fold_stmt (&gsi2);
2279 return true;
2282 /* Fold function call to builtin strstr.
2283 If both arguments are constant, evaluate and fold the result,
2284 additionally fold strstr (x, "") into x and strstr (x, "c")
2285 into strchr (x, 'c'). */
2286 static bool
2287 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2289 gimple *stmt = gsi_stmt (*gsi);
2290 if (!gimple_call_lhs (stmt))
2291 return false;
2293 tree haystack = gimple_call_arg (stmt, 0);
2294 tree needle = gimple_call_arg (stmt, 1);
2296 /* Avoid folding if either argument is not a nul-terminated array.
2297 Defer warning until later. */
2298 if (!check_nul_terminated_array (NULL_TREE, haystack)
2299 || !check_nul_terminated_array (NULL_TREE, needle))
2300 return false;
2302 const char *q = c_getstr (needle);
2303 if (q == NULL)
2304 return false;
2306 if (const char *p = c_getstr (haystack))
2308 const char *r = strstr (p, q);
2310 if (r == NULL)
2312 replace_call_with_value (gsi, integer_zero_node);
2313 return true;
2316 tree len = build_int_cst (size_type_node, r - p);
2317 gimple_seq stmts = NULL;
2318 gimple *new_stmt
2319 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2320 haystack, len);
2321 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2322 gsi_replace_with_seq_vops (gsi, stmts);
2323 return true;
2326 /* For strstr (x, "") return x. */
2327 if (q[0] == '\0')
2329 replace_call_with_value (gsi, haystack);
2330 return true;
2333 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2334 if (q[1] == '\0')
2336 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2337 if (strchr_fn)
2339 tree c = build_int_cst (integer_type_node, q[0]);
2340 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2341 replace_call_with_call_and_fold (gsi, repl);
2342 return true;
2346 return false;
2349 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2350 to the call.
2352 Return NULL_TREE if no simplification was possible, otherwise return the
2353 simplified form of the call as a tree.
2355 The simplified form may be a constant or other expression which
2356 computes the same value, but in a more efficient manner (including
2357 calls to other builtin functions).
2359 The call may contain arguments which need to be evaluated, but
2360 which are not useful to determine the result of the call. In
2361 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2362 COMPOUND_EXPR will be an argument which must be evaluated.
2363 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2364 COMPOUND_EXPR in the chain will contain the tree for the simplified
2365 form of the builtin function call. */
2367 static bool
2368 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2370 gimple *stmt = gsi_stmt (*gsi);
2371 location_t loc = gimple_location (stmt);
2373 const char *p = c_getstr (src);
2375 /* If the string length is zero, return the dst parameter. */
2376 if (p && *p == '\0')
2378 replace_call_with_value (gsi, dst);
2379 return true;
2382 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2383 return false;
2385 /* See if we can store by pieces into (dst + strlen(dst)). */
2386 tree newdst;
2387 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2388 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2390 if (!strlen_fn || !memcpy_fn)
2391 return false;
2393 /* If the length of the source string isn't computable don't
2394 split strcat into strlen and memcpy. */
2395 tree len = get_maxval_strlen (src, SRK_STRLEN);
2396 if (! len)
2397 return false;
2399 /* Create strlen (dst). */
2400 gimple_seq stmts = NULL, stmts2;
2401 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2402 gimple_set_location (repl, loc);
2403 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2404 gimple_call_set_lhs (repl, newdst);
2405 gimple_seq_add_stmt_without_update (&stmts, repl);
2407 /* Create (dst p+ strlen (dst)). */
2408 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2409 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2410 gimple_seq_add_seq_without_update (&stmts, stmts2);
2412 len = fold_convert_loc (loc, size_type_node, len);
2413 len = size_binop_loc (loc, PLUS_EXPR, len,
2414 build_int_cst (size_type_node, 1));
2415 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2416 gimple_seq_add_seq_without_update (&stmts, stmts2);
2418 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2419 gimple_seq_add_stmt_without_update (&stmts, repl);
2420 if (gimple_call_lhs (stmt))
2422 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2423 gimple_seq_add_stmt_without_update (&stmts, repl);
2424 gsi_replace_with_seq_vops (gsi, stmts);
2425 /* gsi now points at the assignment to the lhs, get a
2426 stmt iterator to the memcpy call.
2427 ??? We can't use gsi_for_stmt as that doesn't work when the
2428 CFG isn't built yet. */
2429 gimple_stmt_iterator gsi2 = *gsi;
2430 gsi_prev (&gsi2);
2431 fold_stmt (&gsi2);
2433 else
2435 gsi_replace_with_seq_vops (gsi, stmts);
2436 fold_stmt (gsi);
2438 return true;
2441 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2442 are the arguments to the call. */
2444 static bool
2445 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2447 gimple *stmt = gsi_stmt (*gsi);
2448 tree dest = gimple_call_arg (stmt, 0);
2449 tree src = gimple_call_arg (stmt, 1);
2450 tree size = gimple_call_arg (stmt, 2);
2451 tree fn;
2452 const char *p;
2455 p = c_getstr (src);
2456 /* If the SRC parameter is "", return DEST. */
2457 if (p && *p == '\0')
2459 replace_call_with_value (gsi, dest);
2460 return true;
2463 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2464 return false;
2466 /* If __builtin_strcat_chk is used, assume strcat is available. */
2467 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2468 if (!fn)
2469 return false;
2471 gimple *repl = gimple_build_call (fn, 2, dest, src);
2472 replace_call_with_call_and_fold (gsi, repl);
2473 return true;
2476 /* Simplify a call to the strncat builtin. */
2478 static bool
2479 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2481 gimple *stmt = gsi_stmt (*gsi);
2482 tree dst = gimple_call_arg (stmt, 0);
2483 tree src = gimple_call_arg (stmt, 1);
2484 tree len = gimple_call_arg (stmt, 2);
2485 tree src_len = c_strlen (src, 1);
2487 /* If the requested length is zero, or the src parameter string
2488 length is zero, return the dst parameter. */
2489 if (integer_zerop (len) || (src_len && integer_zerop (src_len)))
2491 replace_call_with_value (gsi, dst);
2492 return true;
2495 /* Return early if the requested len is less than the string length.
2496 Warnings will be issued elsewhere later. */
2497 if (!src_len || known_lower (stmt, len, src_len, true))
2498 return false;
2500 /* Warn on constant LEN. */
2501 if (TREE_CODE (len) == INTEGER_CST)
2503 bool nowarn = warning_suppressed_p (stmt, OPT_Wstringop_overflow_);
2504 tree dstsize;
2506 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize)
2507 && TREE_CODE (dstsize) == INTEGER_CST)
2509 int cmpdst = tree_int_cst_compare (len, dstsize);
2511 if (cmpdst >= 0)
2513 tree fndecl = gimple_call_fndecl (stmt);
2515 /* Strncat copies (at most) LEN bytes and always appends
2516 the terminating NUL so the specified bound should never
2517 be equal to (or greater than) the size of the destination.
2518 If it is, the copy could overflow. */
2519 location_t loc = gimple_location (stmt);
2520 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2521 cmpdst == 0
2522 ? G_("%qD specified bound %E equals "
2523 "destination size")
2524 : G_("%qD specified bound %E exceeds "
2525 "destination size %E"),
2526 fndecl, len, dstsize);
2527 if (nowarn)
2528 suppress_warning (stmt, OPT_Wstringop_overflow_);
2532 if (!nowarn && TREE_CODE (src_len) == INTEGER_CST
2533 && tree_int_cst_compare (src_len, len) == 0)
2535 tree fndecl = gimple_call_fndecl (stmt);
2536 location_t loc = gimple_location (stmt);
2538 /* To avoid possible overflow the specified bound should also
2539 not be equal to the length of the source, even when the size
2540 of the destination is unknown (it's not an uncommon mistake
2541 to specify as the bound to strncpy the length of the source). */
2542 if (warning_at (loc, OPT_Wstringop_overflow_,
2543 "%qD specified bound %E equals source length",
2544 fndecl, len))
2545 suppress_warning (stmt, OPT_Wstringop_overflow_);
2549 if (!known_lower (stmt, src_len, len))
2550 return false;
2552 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2554 /* If the replacement _DECL isn't initialized, don't do the
2555 transformation. */
2556 if (!fn)
2557 return false;
2559 /* Otherwise, emit a call to strcat. */
2560 gcall *repl = gimple_build_call (fn, 2, dst, src);
2561 replace_call_with_call_and_fold (gsi, repl);
2562 return true;
2565 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2566 LEN, and SIZE. */
2568 static bool
2569 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2571 gimple *stmt = gsi_stmt (*gsi);
2572 tree dest = gimple_call_arg (stmt, 0);
2573 tree src = gimple_call_arg (stmt, 1);
2574 tree len = gimple_call_arg (stmt, 2);
2575 tree size = gimple_call_arg (stmt, 3);
2576 tree fn;
2577 const char *p;
2579 p = c_getstr (src);
2580 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2581 if ((p && *p == '\0')
2582 || integer_zerop (len))
2584 replace_call_with_value (gsi, dest);
2585 return true;
2588 if (! integer_all_onesp (size))
2590 tree src_len = c_strlen (src, 1);
2591 if (known_lower (stmt, src_len, len))
2593 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2594 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2595 if (!fn)
2596 return false;
2598 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2599 replace_call_with_call_and_fold (gsi, repl);
2600 return true;
2602 return false;
2605 /* If __builtin_strncat_chk is used, assume strncat is available. */
2606 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2607 if (!fn)
2608 return false;
2610 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2611 replace_call_with_call_and_fold (gsi, repl);
2612 return true;
2615 /* Build and append gimple statements to STMTS that would load a first
2616 character of a memory location identified by STR. LOC is location
2617 of the statement. */
2619 static tree
2620 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2622 tree var;
2624 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2625 tree cst_uchar_ptr_node
2626 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2627 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2629 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2630 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2631 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2633 gimple_assign_set_lhs (stmt, var);
2634 gimple_seq_add_stmt_without_update (stmts, stmt);
2636 return var;
2639 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2641 static bool
2642 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2644 gimple *stmt = gsi_stmt (*gsi);
2645 tree callee = gimple_call_fndecl (stmt);
2646 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2648 tree type = integer_type_node;
2649 tree str1 = gimple_call_arg (stmt, 0);
2650 tree str2 = gimple_call_arg (stmt, 1);
2651 tree lhs = gimple_call_lhs (stmt);
2653 tree bound_node = NULL_TREE;
2654 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
2656 /* Handle strncmp and strncasecmp functions. */
2657 if (gimple_call_num_args (stmt) == 3)
2659 bound_node = gimple_call_arg (stmt, 2);
2660 if (tree_fits_uhwi_p (bound_node))
2661 bound = tree_to_uhwi (bound_node);
2664 /* If the BOUND parameter is zero, return zero. */
2665 if (bound == 0)
2667 replace_call_with_value (gsi, integer_zero_node);
2668 return true;
2671 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2672 if (operand_equal_p (str1, str2, 0))
2674 replace_call_with_value (gsi, integer_zero_node);
2675 return true;
2678 /* Initially set to the number of characters, including the terminating
2679 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2680 the array Sx is not terminated by a nul.
2681 For nul-terminated strings then adjusted to their length so that
2682 LENx == NULPOSx holds. */
2683 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2684 const char *p1 = getbyterep (str1, &len1);
2685 const char *p2 = getbyterep (str2, &len2);
2687 /* The position of the terminating nul character if one exists, otherwise
2688 a value greater than LENx. */
2689 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2691 if (p1)
2693 size_t n = strnlen (p1, len1);
2694 if (n < len1)
2695 len1 = nulpos1 = n;
2698 if (p2)
2700 size_t n = strnlen (p2, len2);
2701 if (n < len2)
2702 len2 = nulpos2 = n;
2705 /* For known strings, return an immediate value. */
2706 if (p1 && p2)
2708 int r = 0;
2709 bool known_result = false;
2711 switch (fcode)
2713 case BUILT_IN_STRCMP:
2714 case BUILT_IN_STRCMP_EQ:
2715 if (len1 != nulpos1 || len2 != nulpos2)
2716 break;
2718 r = strcmp (p1, p2);
2719 known_result = true;
2720 break;
2722 case BUILT_IN_STRNCMP:
2723 case BUILT_IN_STRNCMP_EQ:
2725 if (bound == HOST_WIDE_INT_M1U)
2726 break;
2728 /* Reduce the bound to be no more than the length
2729 of the shorter of the two strings, or the sizes
2730 of the unterminated arrays. */
2731 unsigned HOST_WIDE_INT n = bound;
2733 if (len1 == nulpos1 && len1 < n)
2734 n = len1 + 1;
2735 if (len2 == nulpos2 && len2 < n)
2736 n = len2 + 1;
2738 if (MIN (nulpos1, nulpos2) + 1 < n)
2739 break;
2741 r = strncmp (p1, p2, n);
2742 known_result = true;
2743 break;
2745 /* Only handleable situation is where the string are equal (result 0),
2746 which is already handled by operand_equal_p case. */
2747 case BUILT_IN_STRCASECMP:
2748 break;
2749 case BUILT_IN_STRNCASECMP:
2751 if (bound == HOST_WIDE_INT_M1U)
2752 break;
2753 r = strncmp (p1, p2, bound);
2754 if (r == 0)
2755 known_result = true;
2756 break;
2758 default:
2759 gcc_unreachable ();
2762 if (known_result)
2764 replace_call_with_value (gsi, build_cmp_result (type, r));
2765 return true;
2769 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
2770 || fcode == BUILT_IN_STRCMP
2771 || fcode == BUILT_IN_STRCMP_EQ
2772 || fcode == BUILT_IN_STRCASECMP;
2774 location_t loc = gimple_location (stmt);
2776 /* If the second arg is "", return *(const unsigned char*)arg1. */
2777 if (p2 && *p2 == '\0' && nonzero_bound)
2779 gimple_seq stmts = NULL;
2780 tree var = gimple_load_first_char (loc, str1, &stmts);
2781 if (lhs)
2783 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2784 gimple_seq_add_stmt_without_update (&stmts, stmt);
2787 gsi_replace_with_seq_vops (gsi, stmts);
2788 return true;
2791 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2792 if (p1 && *p1 == '\0' && nonzero_bound)
2794 gimple_seq stmts = NULL;
2795 tree var = gimple_load_first_char (loc, str2, &stmts);
2797 if (lhs)
2799 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2800 stmt = gimple_build_assign (c, NOP_EXPR, var);
2801 gimple_seq_add_stmt_without_update (&stmts, stmt);
2803 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2804 gimple_seq_add_stmt_without_update (&stmts, stmt);
2807 gsi_replace_with_seq_vops (gsi, stmts);
2808 return true;
2811 /* If BOUND is one, return an expression corresponding to
2812 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2813 if (fcode == BUILT_IN_STRNCMP && bound == 1)
2815 gimple_seq stmts = NULL;
2816 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2817 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2819 if (lhs)
2821 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2822 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2823 gimple_seq_add_stmt_without_update (&stmts, convert1);
2825 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2826 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2827 gimple_seq_add_stmt_without_update (&stmts, convert2);
2829 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2830 gimple_seq_add_stmt_without_update (&stmts, stmt);
2833 gsi_replace_with_seq_vops (gsi, stmts);
2834 return true;
2837 /* If BOUND is greater than the length of one constant string,
2838 and the other argument is also a nul-terminated string, replace
2839 strncmp with strcmp. */
2840 if (fcode == BUILT_IN_STRNCMP
2841 && bound > 0 && bound < HOST_WIDE_INT_M1U
2842 && ((p2 && len2 < bound && len2 == nulpos2)
2843 || (p1 && len1 < bound && len1 == nulpos1)))
2845 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2846 if (!fn)
2847 return false;
2848 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2849 replace_call_with_call_and_fold (gsi, repl);
2850 return true;
2853 return false;
2856 /* Fold a call to the memchr pointed by GSI iterator. */
2858 static bool
2859 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2861 gimple *stmt = gsi_stmt (*gsi);
2862 tree lhs = gimple_call_lhs (stmt);
2863 tree arg1 = gimple_call_arg (stmt, 0);
2864 tree arg2 = gimple_call_arg (stmt, 1);
2865 tree len = gimple_call_arg (stmt, 2);
2867 /* If the LEN parameter is zero, return zero. */
2868 if (integer_zerop (len))
2870 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2871 return true;
2874 char c;
2875 if (TREE_CODE (arg2) != INTEGER_CST
2876 || !tree_fits_uhwi_p (len)
2877 || !target_char_cst_p (arg2, &c))
2878 return false;
2880 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2881 unsigned HOST_WIDE_INT string_length;
2882 const char *p1 = getbyterep (arg1, &string_length);
2884 if (p1)
2886 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2887 if (r == NULL)
2889 tree mem_size, offset_node;
2890 byte_representation (arg1, &offset_node, &mem_size, NULL);
2891 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2892 ? 0 : tree_to_uhwi (offset_node);
2893 /* MEM_SIZE is the size of the array the string literal
2894 is stored in. */
2895 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2896 gcc_checking_assert (string_length <= string_size);
2897 if (length <= string_size)
2899 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2900 return true;
2903 else
2905 unsigned HOST_WIDE_INT offset = r - p1;
2906 gimple_seq stmts = NULL;
2907 if (lhs != NULL_TREE)
2909 tree offset_cst = build_int_cst (sizetype, offset);
2910 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2911 arg1, offset_cst);
2912 gimple_seq_add_stmt_without_update (&stmts, stmt);
2914 else
2915 gimple_seq_add_stmt_without_update (&stmts,
2916 gimple_build_nop ());
2918 gsi_replace_with_seq_vops (gsi, stmts);
2919 return true;
2923 return false;
2926 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2927 to the call. IGNORE is true if the value returned
2928 by the builtin will be ignored. UNLOCKED is true is true if this
2929 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2930 the known length of the string. Return NULL_TREE if no simplification
2931 was possible. */
2933 static bool
2934 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2935 tree arg0, tree arg1,
2936 bool unlocked)
2938 gimple *stmt = gsi_stmt (*gsi);
2940 /* If we're using an unlocked function, assume the other unlocked
2941 functions exist explicitly. */
2942 tree const fn_fputc = (unlocked
2943 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2944 : builtin_decl_implicit (BUILT_IN_FPUTC));
2945 tree const fn_fwrite = (unlocked
2946 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2947 : builtin_decl_implicit (BUILT_IN_FWRITE));
2949 /* If the return value is used, don't do the transformation. */
2950 if (gimple_call_lhs (stmt))
2951 return false;
2953 /* Get the length of the string passed to fputs. If the length
2954 can't be determined, punt. */
2955 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2956 if (!len || TREE_CODE (len) != INTEGER_CST)
2957 return false;
2959 switch (compare_tree_int (len, 1))
2961 case -1: /* length is 0, delete the call entirely . */
2962 replace_call_with_value (gsi, integer_zero_node);
2963 return true;
2965 case 0: /* length is 1, call fputc. */
2967 const char *p = c_getstr (arg0);
2968 if (p != NULL)
2970 if (!fn_fputc)
2971 return false;
2973 gimple *repl
2974 = gimple_build_call (fn_fputc, 2,
2975 build_int_cst (integer_type_node, p[0]),
2976 arg1);
2977 replace_call_with_call_and_fold (gsi, repl);
2978 return true;
2981 /* FALLTHROUGH */
2982 case 1: /* length is greater than 1, call fwrite. */
2984 /* If optimizing for size keep fputs. */
2985 if (optimize_function_for_size_p (cfun))
2986 return false;
2987 /* New argument list transforming fputs(string, stream) to
2988 fwrite(string, 1, len, stream). */
2989 if (!fn_fwrite)
2990 return false;
2992 gimple *repl
2993 = gimple_build_call (fn_fwrite, 4, arg0, size_one_node,
2994 fold_convert (size_type_node, len), arg1);
2995 replace_call_with_call_and_fold (gsi, repl);
2996 return true;
2998 default:
2999 gcc_unreachable ();
3003 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
3004 DEST, SRC, LEN, and SIZE are the arguments to the call.
3005 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
3006 code of the builtin. If MAXLEN is not NULL, it is maximum length
3007 passed as third argument. */
3009 static bool
3010 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
3011 tree dest, tree src, tree len, tree size,
3012 enum built_in_function fcode)
3014 gimple *stmt = gsi_stmt (*gsi);
3015 location_t loc = gimple_location (stmt);
3016 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3017 tree fn;
3019 /* If SRC and DEST are the same (and not volatile), return DEST
3020 (resp. DEST+LEN for __mempcpy_chk). */
3021 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
3023 if (fcode != BUILT_IN_MEMPCPY_CHK)
3025 replace_call_with_value (gsi, dest);
3026 return true;
3028 else
3030 gimple_seq stmts = NULL;
3031 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
3032 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
3033 TREE_TYPE (dest), dest, len);
3034 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3035 replace_call_with_value (gsi, temp);
3036 return true;
3040 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3041 if (! integer_all_onesp (size)
3042 && !known_lower (stmt, len, size)
3043 && !known_lower (stmt, maxlen, size))
3045 /* MAXLEN and LEN both cannot be proved to be less than SIZE, at
3046 least try to optimize (void) __mempcpy_chk () into
3047 (void) __memcpy_chk () */
3048 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
3050 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3051 if (!fn)
3052 return false;
3054 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3055 replace_call_with_call_and_fold (gsi, repl);
3056 return true;
3058 return false;
3061 fn = NULL_TREE;
3062 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3063 mem{cpy,pcpy,move,set} is available. */
3064 switch (fcode)
3066 case BUILT_IN_MEMCPY_CHK:
3067 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
3068 break;
3069 case BUILT_IN_MEMPCPY_CHK:
3070 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
3071 break;
3072 case BUILT_IN_MEMMOVE_CHK:
3073 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
3074 break;
3075 case BUILT_IN_MEMSET_CHK:
3076 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
3077 break;
3078 default:
3079 break;
3082 if (!fn)
3083 return false;
3085 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3086 replace_call_with_call_and_fold (gsi, repl);
3087 return true;
3090 /* Print a message in the dump file recording transformation of FROM to TO. */
3092 static void
3093 dump_transformation (gcall *from, gcall *to)
3095 if (dump_enabled_p ())
3096 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, from, "simplified %T to %T\n",
3097 gimple_call_fn (from), gimple_call_fn (to));
3100 /* Fold a call to the __st[rp]cpy_chk builtin.
3101 DEST, SRC, and SIZE are the arguments to the call.
3102 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3103 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3104 strings passed as second argument. */
3106 static bool
3107 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
3108 tree dest,
3109 tree src, tree size,
3110 enum built_in_function fcode)
3112 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3113 location_t loc = gimple_location (stmt);
3114 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3115 tree len, fn;
3117 /* If SRC and DEST are the same (and not volatile), return DEST. */
3118 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
3120 /* Issue -Wrestrict unless the pointers are null (those do
3121 not point to objects and so do not indicate an overlap;
3122 such calls could be the result of sanitization and jump
3123 threading). */
3124 if (!integer_zerop (dest)
3125 && !warning_suppressed_p (stmt, OPT_Wrestrict))
3127 tree func = gimple_call_fndecl (stmt);
3129 warning_at (loc, OPT_Wrestrict,
3130 "%qD source argument is the same as destination",
3131 func);
3134 replace_call_with_value (gsi, dest);
3135 return true;
3138 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
3139 if (! integer_all_onesp (size))
3141 len = c_strlen (src, 1);
3142 if (!known_lower (stmt, len, size, true)
3143 && !known_lower (stmt, maxlen, size, true))
3145 if (fcode == BUILT_IN_STPCPY_CHK)
3147 if (! ignore)
3148 return false;
3150 /* If return value of __stpcpy_chk is ignored,
3151 optimize into __strcpy_chk. */
3152 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
3153 if (!fn)
3154 return false;
3156 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
3157 replace_call_with_call_and_fold (gsi, repl);
3158 return true;
3161 if (! len || TREE_SIDE_EFFECTS (len))
3162 return false;
3164 /* If c_strlen returned something, but not provably less than size,
3165 transform __strcpy_chk into __memcpy_chk. */
3166 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3167 if (!fn)
3168 return false;
3170 gimple_seq stmts = NULL;
3171 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
3172 len = gimple_convert (&stmts, loc, size_type_node, len);
3173 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
3174 build_int_cst (size_type_node, 1));
3175 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3176 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3177 replace_call_with_call_and_fold (gsi, repl);
3178 return true;
3182 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3183 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK && !ignore
3184 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
3185 if (!fn)
3186 return false;
3188 gcall *repl = gimple_build_call (fn, 2, dest, src);
3189 dump_transformation (stmt, repl);
3190 replace_call_with_call_and_fold (gsi, repl);
3191 return true;
3194 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3195 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3196 length passed as third argument. IGNORE is true if return value can be
3197 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3199 static bool
3200 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
3201 tree dest, tree src,
3202 tree len, tree size,
3203 enum built_in_function fcode)
3205 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3206 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3207 tree fn;
3209 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3210 if (! integer_all_onesp (size)
3211 && !known_lower (stmt, len, size) && !known_lower (stmt, maxlen, size))
3213 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
3215 /* If return value of __stpncpy_chk is ignored,
3216 optimize into __strncpy_chk. */
3217 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3218 if (fn)
3220 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3221 replace_call_with_call_and_fold (gsi, repl);
3222 return true;
3225 return false;
3228 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3229 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK && !ignore
3230 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3231 if (!fn)
3232 return false;
3234 gcall *repl = gimple_build_call (fn, 3, dest, src, len);
3235 dump_transformation (stmt, repl);
3236 replace_call_with_call_and_fold (gsi, repl);
3237 return true;
3240 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3241 Return NULL_TREE if no simplification can be made. */
3243 static bool
3244 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3246 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3247 location_t loc = gimple_location (stmt);
3248 tree dest = gimple_call_arg (stmt, 0);
3249 tree src = gimple_call_arg (stmt, 1);
3250 tree fn, lenp1;
3252 /* If the result is unused, replace stpcpy with strcpy. */
3253 if (gimple_call_lhs (stmt) == NULL_TREE)
3255 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3256 if (!fn)
3257 return false;
3258 gimple_call_set_fndecl (stmt, fn);
3259 fold_stmt (gsi);
3260 return true;
3263 /* Set to non-null if ARG refers to an unterminated array. */
3264 c_strlen_data data = { };
3265 /* The size of the unterminated array if SRC referes to one. */
3266 tree size;
3267 /* True if the size is exact/constant, false if it's the lower bound
3268 of a range. */
3269 bool exact;
3270 tree len = c_strlen (src, 1, &data, 1);
3271 if (!len
3272 || TREE_CODE (len) != INTEGER_CST)
3274 data.decl = unterminated_array (src, &size, &exact);
3275 if (!data.decl)
3276 return false;
3279 if (data.decl)
3281 /* Avoid folding calls with unterminated arrays. */
3282 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
3283 warn_string_no_nul (loc, stmt, "stpcpy", src, data.decl, size,
3284 exact);
3285 suppress_warning (stmt, OPT_Wstringop_overread);
3286 return false;
3289 if (optimize_function_for_size_p (cfun)
3290 /* If length is zero it's small enough. */
3291 && !integer_zerop (len))
3292 return false;
3294 /* If the source has a known length replace stpcpy with memcpy. */
3295 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3296 if (!fn)
3297 return false;
3299 gimple_seq stmts = NULL;
3300 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3301 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3302 tem, build_int_cst (size_type_node, 1));
3303 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3304 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
3305 gimple_move_vops (repl, stmt);
3306 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3307 /* Replace the result with dest + len. */
3308 stmts = NULL;
3309 tem = gimple_convert (&stmts, loc, sizetype, len);
3310 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3311 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3312 POINTER_PLUS_EXPR, dest, tem);
3313 gsi_replace (gsi, ret, false);
3314 /* Finally fold the memcpy call. */
3315 gimple_stmt_iterator gsi2 = *gsi;
3316 gsi_prev (&gsi2);
3317 fold_stmt (&gsi2);
3318 return true;
3321 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3322 NULL_TREE if a normal call should be emitted rather than expanding
3323 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3324 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3325 passed as second argument. */
3327 static bool
3328 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3329 enum built_in_function fcode)
3331 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3332 tree dest, size, len, fn, fmt, flag;
3333 const char *fmt_str;
3335 /* Verify the required arguments in the original call. */
3336 if (gimple_call_num_args (stmt) < 5)
3337 return false;
3339 dest = gimple_call_arg (stmt, 0);
3340 len = gimple_call_arg (stmt, 1);
3341 flag = gimple_call_arg (stmt, 2);
3342 size = gimple_call_arg (stmt, 3);
3343 fmt = gimple_call_arg (stmt, 4);
3345 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3346 if (! integer_all_onesp (size)
3347 && !known_lower (stmt, len, size) && !known_lower (stmt, maxlen, size))
3348 return false;
3350 if (!init_target_chars ())
3351 return false;
3353 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3354 or if format doesn't contain % chars or is "%s". */
3355 if (! integer_zerop (flag))
3357 fmt_str = c_getstr (fmt);
3358 if (fmt_str == NULL)
3359 return false;
3360 if (strchr (fmt_str, target_percent) != NULL
3361 && strcmp (fmt_str, target_percent_s))
3362 return false;
3365 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3366 available. */
3367 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3368 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3369 if (!fn)
3370 return false;
3372 /* Replace the called function and the first 5 argument by 3 retaining
3373 trailing varargs. */
3374 gimple_call_set_fndecl (stmt, fn);
3375 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3376 gimple_call_set_arg (stmt, 0, dest);
3377 gimple_call_set_arg (stmt, 1, len);
3378 gimple_call_set_arg (stmt, 2, fmt);
3379 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3380 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3381 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3382 fold_stmt (gsi);
3383 return true;
3386 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3387 Return NULL_TREE if a normal call should be emitted rather than
3388 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3389 or BUILT_IN_VSPRINTF_CHK. */
3391 static bool
3392 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3393 enum built_in_function fcode)
3395 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3396 tree dest, size, len, fn, fmt, flag;
3397 const char *fmt_str;
3398 unsigned nargs = gimple_call_num_args (stmt);
3400 /* Verify the required arguments in the original call. */
3401 if (nargs < 4)
3402 return false;
3403 dest = gimple_call_arg (stmt, 0);
3404 flag = gimple_call_arg (stmt, 1);
3405 size = gimple_call_arg (stmt, 2);
3406 fmt = gimple_call_arg (stmt, 3);
3408 len = NULL_TREE;
3410 if (!init_target_chars ())
3411 return false;
3413 /* Check whether the format is a literal string constant. */
3414 fmt_str = c_getstr (fmt);
3415 if (fmt_str != NULL)
3417 /* If the format doesn't contain % args or %%, we know the size. */
3418 if (strchr (fmt_str, target_percent) == 0)
3420 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3421 len = build_int_cstu (size_type_node, strlen (fmt_str));
3423 /* If the format is "%s" and first ... argument is a string literal,
3424 we know the size too. */
3425 else if (fcode == BUILT_IN_SPRINTF_CHK
3426 && strcmp (fmt_str, target_percent_s) == 0)
3428 tree arg;
3430 if (nargs == 5)
3432 arg = gimple_call_arg (stmt, 4);
3433 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3434 len = c_strlen (arg, 1);
3439 if (! integer_all_onesp (size) && !known_lower (stmt, len, size, true))
3440 return false;
3442 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3443 or if format doesn't contain % chars or is "%s". */
3444 if (! integer_zerop (flag))
3446 if (fmt_str == NULL)
3447 return false;
3448 if (strchr (fmt_str, target_percent) != NULL
3449 && strcmp (fmt_str, target_percent_s))
3450 return false;
3453 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3454 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3455 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3456 if (!fn)
3457 return false;
3459 /* Replace the called function and the first 4 argument by 2 retaining
3460 trailing varargs. */
3461 gimple_call_set_fndecl (stmt, fn);
3462 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3463 gimple_call_set_arg (stmt, 0, dest);
3464 gimple_call_set_arg (stmt, 1, fmt);
3465 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3466 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3467 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3468 fold_stmt (gsi);
3469 return true;
3472 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3473 ORIG may be null if this is a 2-argument call. We don't attempt to
3474 simplify calls with more than 3 arguments.
3476 Return true if simplification was possible, otherwise false. */
3478 bool
3479 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3481 gimple *stmt = gsi_stmt (*gsi);
3483 /* Verify the required arguments in the original call. We deal with two
3484 types of sprintf() calls: 'sprintf (str, fmt)' and
3485 'sprintf (dest, "%s", orig)'. */
3486 if (gimple_call_num_args (stmt) > 3)
3487 return false;
3489 tree orig = NULL_TREE;
3490 if (gimple_call_num_args (stmt) == 3)
3491 orig = gimple_call_arg (stmt, 2);
3493 /* Check whether the format is a literal string constant. */
3494 tree fmt = gimple_call_arg (stmt, 1);
3495 const char *fmt_str = c_getstr (fmt);
3496 if (fmt_str == NULL)
3497 return false;
3499 tree dest = gimple_call_arg (stmt, 0);
3501 if (!init_target_chars ())
3502 return false;
3504 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3505 if (!fn)
3506 return false;
3508 /* If the format doesn't contain % args or %%, use strcpy. */
3509 if (strchr (fmt_str, target_percent) == NULL)
3511 /* Don't optimize sprintf (buf, "abc", ptr++). */
3512 if (orig)
3513 return false;
3515 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3516 'format' is known to contain no % formats. */
3517 gimple_seq stmts = NULL;
3518 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3520 /* Propagate the NO_WARNING bit to avoid issuing the same
3521 warning more than once. */
3522 copy_warning (repl, stmt);
3524 gimple_seq_add_stmt_without_update (&stmts, repl);
3525 if (tree lhs = gimple_call_lhs (stmt))
3527 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3528 strlen (fmt_str)));
3529 gimple_seq_add_stmt_without_update (&stmts, repl);
3530 gsi_replace_with_seq_vops (gsi, stmts);
3531 /* gsi now points at the assignment to the lhs, get a
3532 stmt iterator to the memcpy call.
3533 ??? We can't use gsi_for_stmt as that doesn't work when the
3534 CFG isn't built yet. */
3535 gimple_stmt_iterator gsi2 = *gsi;
3536 gsi_prev (&gsi2);
3537 fold_stmt (&gsi2);
3539 else
3541 gsi_replace_with_seq_vops (gsi, stmts);
3542 fold_stmt (gsi);
3544 return true;
3547 /* If the format is "%s", use strcpy if the result isn't used. */
3548 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3550 /* Don't crash on sprintf (str1, "%s"). */
3551 if (!orig)
3552 return false;
3554 /* Don't fold calls with source arguments of invalid (nonpointer)
3555 types. */
3556 if (!POINTER_TYPE_P (TREE_TYPE (orig)))
3557 return false;
3559 tree orig_len = NULL_TREE;
3560 if (gimple_call_lhs (stmt))
3562 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3563 if (!orig_len)
3564 return false;
3567 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3568 gimple_seq stmts = NULL;
3569 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3571 /* Propagate the NO_WARNING bit to avoid issuing the same
3572 warning more than once. */
3573 copy_warning (repl, stmt);
3575 gimple_seq_add_stmt_without_update (&stmts, repl);
3576 if (tree lhs = gimple_call_lhs (stmt))
3578 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3579 TREE_TYPE (orig_len)))
3580 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3581 repl = gimple_build_assign (lhs, orig_len);
3582 gimple_seq_add_stmt_without_update (&stmts, repl);
3583 gsi_replace_with_seq_vops (gsi, stmts);
3584 /* gsi now points at the assignment to the lhs, get a
3585 stmt iterator to the memcpy call.
3586 ??? We can't use gsi_for_stmt as that doesn't work when the
3587 CFG isn't built yet. */
3588 gimple_stmt_iterator gsi2 = *gsi;
3589 gsi_prev (&gsi2);
3590 fold_stmt (&gsi2);
3592 else
3594 gsi_replace_with_seq_vops (gsi, stmts);
3595 fold_stmt (gsi);
3597 return true;
3599 return false;
3602 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3603 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3604 attempt to simplify calls with more than 4 arguments.
3606 Return true if simplification was possible, otherwise false. */
3608 bool
3609 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3611 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3612 tree dest = gimple_call_arg (stmt, 0);
3613 tree destsize = gimple_call_arg (stmt, 1);
3614 tree fmt = gimple_call_arg (stmt, 2);
3615 tree orig = NULL_TREE;
3616 const char *fmt_str = NULL;
3618 if (gimple_call_num_args (stmt) > 4)
3619 return false;
3621 if (gimple_call_num_args (stmt) == 4)
3622 orig = gimple_call_arg (stmt, 3);
3624 /* Check whether the format is a literal string constant. */
3625 fmt_str = c_getstr (fmt);
3626 if (fmt_str == NULL)
3627 return false;
3629 if (!init_target_chars ())
3630 return false;
3632 /* If the format doesn't contain % args or %%, use strcpy. */
3633 if (strchr (fmt_str, target_percent) == NULL)
3635 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3636 if (!fn)
3637 return false;
3639 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3640 if (orig)
3641 return false;
3643 tree len = build_int_cstu (TREE_TYPE (destsize), strlen (fmt_str));
3645 /* We could expand this as
3646 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3647 or to
3648 memcpy (str, fmt_with_nul_at_cstm1, cst);
3649 but in the former case that might increase code size
3650 and in the latter case grow .rodata section too much.
3651 So punt for now. */
3652 if (!known_lower (stmt, len, destsize, true))
3653 return false;
3655 gimple_seq stmts = NULL;
3656 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3657 gimple_seq_add_stmt_without_update (&stmts, repl);
3658 if (tree lhs = gimple_call_lhs (stmt))
3660 repl = gimple_build_assign (lhs,
3661 fold_convert (TREE_TYPE (lhs), len));
3662 gimple_seq_add_stmt_without_update (&stmts, repl);
3663 gsi_replace_with_seq_vops (gsi, stmts);
3664 /* gsi now points at the assignment to the lhs, get a
3665 stmt iterator to the memcpy call.
3666 ??? We can't use gsi_for_stmt as that doesn't work when the
3667 CFG isn't built yet. */
3668 gimple_stmt_iterator gsi2 = *gsi;
3669 gsi_prev (&gsi2);
3670 fold_stmt (&gsi2);
3672 else
3674 gsi_replace_with_seq_vops (gsi, stmts);
3675 fold_stmt (gsi);
3677 return true;
3680 /* If the format is "%s", use strcpy if the result isn't used. */
3681 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3683 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3684 if (!fn)
3685 return false;
3687 /* Don't crash on snprintf (str1, cst, "%s"). */
3688 if (!orig)
3689 return false;
3691 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3693 /* We could expand this as
3694 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3695 or to
3696 memcpy (str1, str2_with_nul_at_cstm1, cst);
3697 but in the former case that might increase code size
3698 and in the latter case grow .rodata section too much.
3699 So punt for now. */
3700 if (!known_lower (stmt, orig_len, destsize, true))
3701 return false;
3703 /* Convert snprintf (str1, cst, "%s", str2) into
3704 strcpy (str1, str2) if strlen (str2) < cst. */
3705 gimple_seq stmts = NULL;
3706 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3707 gimple_seq_add_stmt_without_update (&stmts, repl);
3708 if (tree lhs = gimple_call_lhs (stmt))
3710 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3711 TREE_TYPE (orig_len)))
3712 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3713 repl = gimple_build_assign (lhs, orig_len);
3714 gimple_seq_add_stmt_without_update (&stmts, repl);
3715 gsi_replace_with_seq_vops (gsi, stmts);
3716 /* gsi now points at the assignment to the lhs, get a
3717 stmt iterator to the memcpy call.
3718 ??? We can't use gsi_for_stmt as that doesn't work when the
3719 CFG isn't built yet. */
3720 gimple_stmt_iterator gsi2 = *gsi;
3721 gsi_prev (&gsi2);
3722 fold_stmt (&gsi2);
3724 else
3726 gsi_replace_with_seq_vops (gsi, stmts);
3727 fold_stmt (gsi);
3729 return true;
3731 return false;
3734 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3735 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3736 more than 3 arguments, and ARG may be null in the 2-argument case.
3738 Return NULL_TREE if no simplification was possible, otherwise return the
3739 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3740 code of the function to be simplified. */
3742 static bool
3743 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3744 tree fp, tree fmt, tree arg,
3745 enum built_in_function fcode)
3747 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3748 tree fn_fputc, fn_fputs;
3749 const char *fmt_str = NULL;
3751 /* If the return value is used, don't do the transformation. */
3752 if (gimple_call_lhs (stmt) != NULL_TREE)
3753 return false;
3755 /* Check whether the format is a literal string constant. */
3756 fmt_str = c_getstr (fmt);
3757 if (fmt_str == NULL)
3758 return false;
3760 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3762 /* If we're using an unlocked function, assume the other
3763 unlocked functions exist explicitly. */
3764 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3765 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3767 else
3769 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3770 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3773 if (!init_target_chars ())
3774 return false;
3776 /* If the format doesn't contain % args or %%, use strcpy. */
3777 if (strchr (fmt_str, target_percent) == NULL)
3779 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3780 && arg)
3781 return false;
3783 /* If the format specifier was "", fprintf does nothing. */
3784 if (fmt_str[0] == '\0')
3786 replace_call_with_value (gsi, NULL_TREE);
3787 return true;
3790 /* When "string" doesn't contain %, replace all cases of
3791 fprintf (fp, string) with fputs (string, fp). The fputs
3792 builtin will take care of special cases like length == 1. */
3793 if (fn_fputs)
3795 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3796 replace_call_with_call_and_fold (gsi, repl);
3797 return true;
3801 /* The other optimizations can be done only on the non-va_list variants. */
3802 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3803 return false;
3805 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3806 else if (strcmp (fmt_str, target_percent_s) == 0)
3808 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3809 return false;
3810 if (fn_fputs)
3812 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3813 replace_call_with_call_and_fold (gsi, repl);
3814 return true;
3818 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3819 else if (strcmp (fmt_str, target_percent_c) == 0)
3821 if (!arg
3822 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3823 return false;
3824 if (fn_fputc)
3826 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3827 replace_call_with_call_and_fold (gsi, repl);
3828 return true;
3832 return false;
3835 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3836 FMT and ARG are the arguments to the call; we don't fold cases with
3837 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3839 Return NULL_TREE if no simplification was possible, otherwise return the
3840 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3841 code of the function to be simplified. */
3843 static bool
3844 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3845 tree arg, enum built_in_function fcode)
3847 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3848 tree fn_putchar, fn_puts, newarg;
3849 const char *fmt_str = NULL;
3851 /* If the return value is used, don't do the transformation. */
3852 if (gimple_call_lhs (stmt) != NULL_TREE)
3853 return false;
3855 /* Check whether the format is a literal string constant. */
3856 fmt_str = c_getstr (fmt);
3857 if (fmt_str == NULL)
3858 return false;
3860 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3862 /* If we're using an unlocked function, assume the other
3863 unlocked functions exist explicitly. */
3864 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3865 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3867 else
3869 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3870 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3873 if (!init_target_chars ())
3874 return false;
3876 if (strcmp (fmt_str, target_percent_s) == 0
3877 || strchr (fmt_str, target_percent) == NULL)
3879 const char *str;
3881 if (strcmp (fmt_str, target_percent_s) == 0)
3883 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3884 return false;
3886 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3887 return false;
3889 str = c_getstr (arg);
3890 if (str == NULL)
3891 return false;
3893 else
3895 /* The format specifier doesn't contain any '%' characters. */
3896 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3897 && arg)
3898 return false;
3899 str = fmt_str;
3902 /* If the string was "", printf does nothing. */
3903 if (str[0] == '\0')
3905 replace_call_with_value (gsi, NULL_TREE);
3906 return true;
3909 /* If the string has length of 1, call putchar. */
3910 if (str[1] == '\0')
3912 /* Given printf("c"), (where c is any one character,)
3913 convert "c"[0] to an int and pass that to the replacement
3914 function. */
3915 newarg = build_int_cst (integer_type_node, str[0]);
3916 if (fn_putchar)
3918 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3919 replace_call_with_call_and_fold (gsi, repl);
3920 return true;
3923 else
3925 /* If the string was "string\n", call puts("string"). */
3926 size_t len = strlen (str);
3927 if ((unsigned char)str[len - 1] == target_newline
3928 && (size_t) (int) len == len
3929 && (int) len > 0)
3931 char *newstr;
3933 /* Create a NUL-terminated string that's one char shorter
3934 than the original, stripping off the trailing '\n'. */
3935 newstr = xstrdup (str);
3936 newstr[len - 1] = '\0';
3937 newarg = build_string_literal (len, newstr);
3938 free (newstr);
3939 if (fn_puts)
3941 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3942 replace_call_with_call_and_fold (gsi, repl);
3943 return true;
3946 else
3947 /* We'd like to arrange to call fputs(string,stdout) here,
3948 but we need stdout and don't have a way to get it yet. */
3949 return false;
3953 /* The other optimizations can be done only on the non-va_list variants. */
3954 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3955 return false;
3957 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3958 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3960 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3961 return false;
3962 if (fn_puts)
3964 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3965 replace_call_with_call_and_fold (gsi, repl);
3966 return true;
3970 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3971 else if (strcmp (fmt_str, target_percent_c) == 0)
3973 if (!arg || ! useless_type_conversion_p (integer_type_node,
3974 TREE_TYPE (arg)))
3975 return false;
3976 if (fn_putchar)
3978 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3979 replace_call_with_call_and_fold (gsi, repl);
3980 return true;
3984 return false;
3989 /* Fold a call to __builtin_strlen with known length LEN. */
3991 static bool
3992 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3994 gimple *stmt = gsi_stmt (*gsi);
3995 tree arg = gimple_call_arg (stmt, 0);
3997 wide_int minlen;
3998 wide_int maxlen;
4000 c_strlen_data lendata = { };
4001 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
4002 && !lendata.decl
4003 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
4004 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
4006 /* The range of lengths refers to either a single constant
4007 string or to the longest and shortest constant string
4008 referenced by the argument of the strlen() call, or to
4009 the strings that can possibly be stored in the arrays
4010 the argument refers to. */
4011 minlen = wi::to_wide (lendata.minlen);
4012 maxlen = wi::to_wide (lendata.maxlen);
4014 else
4016 unsigned prec = TYPE_PRECISION (sizetype);
4018 minlen = wi::shwi (0, prec);
4019 maxlen = wi::to_wide (max_object_size (), prec) - 2;
4022 if (minlen == maxlen)
4024 /* Fold the strlen call to a constant. */
4025 tree type = TREE_TYPE (lendata.minlen);
4026 tree len = force_gimple_operand_gsi (gsi,
4027 wide_int_to_tree (type, minlen),
4028 true, NULL, true, GSI_SAME_STMT);
4029 replace_call_with_value (gsi, len);
4030 return true;
4033 /* Set the strlen() range to [0, MAXLEN]. */
4034 if (tree lhs = gimple_call_lhs (stmt))
4035 set_strlen_range (lhs, minlen, maxlen);
4037 return false;
4040 /* Fold a call to __builtin_acc_on_device. */
4042 static bool
4043 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
4045 /* Defer folding until we know which compiler we're in. */
4046 if (symtab->state != EXPANSION)
4047 return false;
4049 unsigned val_host = GOMP_DEVICE_HOST;
4050 unsigned val_dev = GOMP_DEVICE_NONE;
4052 #ifdef ACCEL_COMPILER
4053 val_host = GOMP_DEVICE_NOT_HOST;
4054 val_dev = ACCEL_COMPILER_acc_device;
4055 #endif
4057 location_t loc = gimple_location (gsi_stmt (*gsi));
4059 tree host_eq = make_ssa_name (boolean_type_node);
4060 gimple *host_ass = gimple_build_assign
4061 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
4062 gimple_set_location (host_ass, loc);
4063 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
4065 tree dev_eq = make_ssa_name (boolean_type_node);
4066 gimple *dev_ass = gimple_build_assign
4067 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
4068 gimple_set_location (dev_ass, loc);
4069 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
4071 tree result = make_ssa_name (boolean_type_node);
4072 gimple *result_ass = gimple_build_assign
4073 (result, BIT_IOR_EXPR, host_eq, dev_eq);
4074 gimple_set_location (result_ass, loc);
4075 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
4077 replace_call_with_value (gsi, result);
4079 return true;
4082 /* Fold realloc (0, n) -> malloc (n). */
4084 static bool
4085 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
4087 gimple *stmt = gsi_stmt (*gsi);
4088 tree arg = gimple_call_arg (stmt, 0);
4089 tree size = gimple_call_arg (stmt, 1);
4091 if (operand_equal_p (arg, null_pointer_node, 0))
4093 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
4094 if (fn_malloc)
4096 gcall *repl = gimple_build_call (fn_malloc, 1, size);
4097 replace_call_with_call_and_fold (gsi, repl);
4098 return true;
4101 return false;
4104 /* Number of bytes into which any type but aggregate, vector or
4105 _BitInt types should fit. */
4106 static constexpr size_t clear_padding_unit
4107 = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
4108 /* Buffer size on which __builtin_clear_padding folding code works. */
4109 static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
4111 /* Data passed through __builtin_clear_padding folding. */
4112 struct clear_padding_struct {
4113 location_t loc;
4114 /* 0 during __builtin_clear_padding folding, nonzero during
4115 clear_type_padding_in_mask. In that case, instead of clearing the
4116 non-padding bits in union_ptr array clear the padding bits in there. */
4117 bool clear_in_mask;
4118 tree base;
4119 tree alias_type;
4120 gimple_stmt_iterator *gsi;
4121 /* Alignment of buf->base + 0. */
4122 unsigned align;
4123 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4124 HOST_WIDE_INT off;
4125 /* Number of padding bytes before buf->off that don't have padding clear
4126 code emitted yet. */
4127 HOST_WIDE_INT padding_bytes;
4128 /* The size of the whole object. Never emit code to touch
4129 buf->base + buf->sz or following bytes. */
4130 HOST_WIDE_INT sz;
4131 /* Number of bytes recorded in buf->buf. */
4132 size_t size;
4133 /* When inside union, instead of emitting code we and bits inside of
4134 the union_ptr array. */
4135 unsigned char *union_ptr;
4136 /* Set bits mean padding bits that need to be cleared by the builtin. */
4137 unsigned char buf[clear_padding_buf_size + clear_padding_unit];
4140 /* Emit code to clear padding requested in BUF->buf - set bits
4141 in there stand for padding that should be cleared. FULL is true
4142 if everything from the buffer should be flushed, otherwise
4143 it can leave up to 2 * clear_padding_unit bytes for further
4144 processing. */
4146 static void
4147 clear_padding_flush (clear_padding_struct *buf, bool full)
4149 gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
4150 if (!full && buf->size < 2 * clear_padding_unit)
4151 return;
4152 gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
4153 size_t end = buf->size;
4154 if (!full)
4155 end = ((end - clear_padding_unit - 1) / clear_padding_unit
4156 * clear_padding_unit);
4157 size_t padding_bytes = buf->padding_bytes;
4158 if (buf->union_ptr)
4160 if (buf->clear_in_mask)
4162 /* During clear_type_padding_in_mask, clear the padding
4163 bits set in buf->buf in the buf->union_ptr mask. */
4164 for (size_t i = 0; i < end; i++)
4166 if (buf->buf[i] == (unsigned char) ~0)
4167 padding_bytes++;
4168 else
4170 memset (&buf->union_ptr[buf->off + i - padding_bytes],
4171 0, padding_bytes);
4172 padding_bytes = 0;
4173 buf->union_ptr[buf->off + i] &= ~buf->buf[i];
4176 if (full)
4178 memset (&buf->union_ptr[buf->off + end - padding_bytes],
4179 0, padding_bytes);
4180 buf->off = 0;
4181 buf->size = 0;
4182 buf->padding_bytes = 0;
4184 else
4186 memmove (buf->buf, buf->buf + end, buf->size - end);
4187 buf->off += end;
4188 buf->size -= end;
4189 buf->padding_bytes = padding_bytes;
4191 return;
4193 /* Inside of a union, instead of emitting any code, instead
4194 clear all bits in the union_ptr buffer that are clear
4195 in buf. Whole padding bytes don't clear anything. */
4196 for (size_t i = 0; i < end; i++)
4198 if (buf->buf[i] == (unsigned char) ~0)
4199 padding_bytes++;
4200 else
4202 padding_bytes = 0;
4203 buf->union_ptr[buf->off + i] &= buf->buf[i];
4206 if (full)
4208 buf->off = 0;
4209 buf->size = 0;
4210 buf->padding_bytes = 0;
4212 else
4214 memmove (buf->buf, buf->buf + end, buf->size - end);
4215 buf->off += end;
4216 buf->size -= end;
4217 buf->padding_bytes = padding_bytes;
4219 return;
4221 size_t wordsize = UNITS_PER_WORD;
4222 for (size_t i = 0; i < end; i += wordsize)
4224 size_t nonzero_first = wordsize;
4225 size_t nonzero_last = 0;
4226 size_t zero_first = wordsize;
4227 size_t zero_last = 0;
4228 bool all_ones = true, bytes_only = true;
4229 if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4230 > (unsigned HOST_WIDE_INT) buf->sz)
4232 gcc_assert (wordsize > 1);
4233 wordsize /= 2;
4234 i -= wordsize;
4235 continue;
4237 for (size_t j = i; j < i + wordsize && j < end; j++)
4239 if (buf->buf[j])
4241 if (nonzero_first == wordsize)
4243 nonzero_first = j - i;
4244 nonzero_last = j - i;
4246 if (nonzero_last != j - i)
4247 all_ones = false;
4248 nonzero_last = j + 1 - i;
4250 else
4252 if (zero_first == wordsize)
4253 zero_first = j - i;
4254 zero_last = j + 1 - i;
4256 if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4258 all_ones = false;
4259 bytes_only = false;
4262 size_t padding_end = i;
4263 if (padding_bytes)
4265 if (nonzero_first == 0
4266 && nonzero_last == wordsize
4267 && all_ones)
4269 /* All bits are padding and we had some padding
4270 before too. Just extend it. */
4271 padding_bytes += wordsize;
4272 continue;
4274 if (all_ones && nonzero_first == 0)
4276 padding_bytes += nonzero_last;
4277 padding_end += nonzero_last;
4278 nonzero_first = wordsize;
4279 nonzero_last = 0;
4281 else if (bytes_only && nonzero_first == 0)
4283 gcc_assert (zero_first && zero_first != wordsize);
4284 padding_bytes += zero_first;
4285 padding_end += zero_first;
4287 tree atype, src;
4288 if (padding_bytes == 1)
4290 atype = char_type_node;
4291 src = build_zero_cst (char_type_node);
4293 else
4295 atype = build_array_type_nelts (char_type_node, padding_bytes);
4296 src = build_constructor (atype, NULL);
4298 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4299 build_int_cst (buf->alias_type,
4300 buf->off + padding_end
4301 - padding_bytes));
4302 gimple *g = gimple_build_assign (dst, src);
4303 gimple_set_location (g, buf->loc);
4304 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4305 padding_bytes = 0;
4306 buf->padding_bytes = 0;
4308 if (nonzero_first == wordsize)
4309 /* All bits in a word are 0, there are no padding bits. */
4310 continue;
4311 if (all_ones && nonzero_last == wordsize)
4313 /* All bits between nonzero_first and end of word are padding
4314 bits, start counting padding_bytes. */
4315 padding_bytes = nonzero_last - nonzero_first;
4316 continue;
4318 if (bytes_only)
4320 /* If bitfields aren't involved in this word, prefer storing
4321 individual bytes or groups of them over performing a RMW
4322 operation on the whole word. */
4323 gcc_assert (i + zero_last <= end);
4324 for (size_t j = padding_end; j < i + zero_last; j++)
4326 if (buf->buf[j])
4328 size_t k;
4329 for (k = j; k < i + zero_last; k++)
4330 if (buf->buf[k] == 0)
4331 break;
4332 HOST_WIDE_INT off = buf->off + j;
4333 tree atype, src;
4334 if (k - j == 1)
4336 atype = char_type_node;
4337 src = build_zero_cst (char_type_node);
4339 else
4341 atype = build_array_type_nelts (char_type_node, k - j);
4342 src = build_constructor (atype, NULL);
4344 tree dst = build2_loc (buf->loc, MEM_REF, atype,
4345 buf->base,
4346 build_int_cst (buf->alias_type, off));
4347 gimple *g = gimple_build_assign (dst, src);
4348 gimple_set_location (g, buf->loc);
4349 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4350 j = k;
4353 if (nonzero_last == wordsize)
4354 padding_bytes = nonzero_last - zero_last;
4355 continue;
4357 for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4359 if (nonzero_last - nonzero_first <= eltsz
4360 && ((nonzero_first & ~(eltsz - 1))
4361 == ((nonzero_last - 1) & ~(eltsz - 1))))
4363 tree type;
4364 if (eltsz == 1)
4365 type = char_type_node;
4366 else
4367 type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4369 size_t start = nonzero_first & ~(eltsz - 1);
4370 HOST_WIDE_INT off = buf->off + i + start;
4371 tree atype = type;
4372 if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4373 atype = build_aligned_type (type, buf->align);
4374 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4375 build_int_cst (buf->alias_type, off));
4376 tree src;
4377 gimple *g;
4378 if (all_ones
4379 && nonzero_first == start
4380 && nonzero_last == start + eltsz)
4381 src = build_zero_cst (type);
4382 else
4384 src = make_ssa_name (type);
4385 tree tmp_dst = unshare_expr (dst);
4386 /* The folding introduces a read from the tmp_dst, we should
4387 prevent uninitialized warning analysis from issuing warning
4388 for such fake read. In order to suppress warning only for
4389 this expr, we should set the location of tmp_dst to
4390 UNKNOWN_LOCATION first, then suppress_warning will call
4391 set_no_warning_bit to set the no_warning flag only for
4392 tmp_dst. */
4393 SET_EXPR_LOCATION (tmp_dst, UNKNOWN_LOCATION);
4394 suppress_warning (tmp_dst, OPT_Wuninitialized);
4395 g = gimple_build_assign (src, tmp_dst);
4396 gimple_set_location (g, buf->loc);
4397 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4398 tree mask = native_interpret_expr (type,
4399 buf->buf + i + start,
4400 eltsz);
4401 gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4402 mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4403 tree src_masked = make_ssa_name (type);
4404 g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4405 src, mask);
4406 gimple_set_location (g, buf->loc);
4407 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4408 src = src_masked;
4410 g = gimple_build_assign (dst, src);
4411 gimple_set_location (g, buf->loc);
4412 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4413 break;
4417 if (full)
4419 if (padding_bytes)
4421 tree atype, src;
4422 if (padding_bytes == 1)
4424 atype = char_type_node;
4425 src = build_zero_cst (char_type_node);
4427 else
4429 atype = build_array_type_nelts (char_type_node, padding_bytes);
4430 src = build_constructor (atype, NULL);
4432 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4433 build_int_cst (buf->alias_type,
4434 buf->off + end
4435 - padding_bytes));
4436 gimple *g = gimple_build_assign (dst, src);
4437 gimple_set_location (g, buf->loc);
4438 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4440 size_t end_rem = end % UNITS_PER_WORD;
4441 buf->off += end - end_rem;
4442 buf->size = end_rem;
4443 memset (buf->buf, 0, buf->size);
4444 buf->padding_bytes = 0;
4446 else
4448 memmove (buf->buf, buf->buf + end, buf->size - end);
4449 buf->off += end;
4450 buf->size -= end;
4451 buf->padding_bytes = padding_bytes;
4455 /* Append PADDING_BYTES padding bytes. */
4457 static void
4458 clear_padding_add_padding (clear_padding_struct *buf,
4459 HOST_WIDE_INT padding_bytes)
4461 if (padding_bytes == 0)
4462 return;
4463 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4464 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4465 clear_padding_flush (buf, false);
4466 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4467 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4469 memset (buf->buf + buf->size, ~0, clear_padding_buf_size - buf->size);
4470 padding_bytes -= clear_padding_buf_size - buf->size;
4471 buf->size = clear_padding_buf_size;
4472 clear_padding_flush (buf, false);
4473 gcc_assert (buf->padding_bytes);
4474 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4475 is guaranteed to be all ones. */
4476 padding_bytes += buf->size;
4477 buf->size = padding_bytes % UNITS_PER_WORD;
4478 memset (buf->buf, ~0, buf->size);
4479 buf->off += padding_bytes - buf->size;
4480 buf->padding_bytes += padding_bytes - buf->size;
4482 else
4484 memset (buf->buf + buf->size, ~0, padding_bytes);
4485 buf->size += padding_bytes;
4489 static void clear_padding_type (clear_padding_struct *, tree,
4490 HOST_WIDE_INT, bool);
4492 /* Clear padding bits of union type TYPE. */
4494 static void
4495 clear_padding_union (clear_padding_struct *buf, tree type,
4496 HOST_WIDE_INT sz, bool for_auto_init)
4498 clear_padding_struct *union_buf;
4499 HOST_WIDE_INT start_off = 0, next_off = 0;
4500 size_t start_size = 0;
4501 if (buf->union_ptr)
4503 start_off = buf->off + buf->size;
4504 next_off = start_off + sz;
4505 start_size = start_off % UNITS_PER_WORD;
4506 start_off -= start_size;
4507 clear_padding_flush (buf, true);
4508 union_buf = buf;
4510 else
4512 if (sz + buf->size > clear_padding_buf_size)
4513 clear_padding_flush (buf, false);
4514 union_buf = XALLOCA (clear_padding_struct);
4515 union_buf->loc = buf->loc;
4516 union_buf->clear_in_mask = buf->clear_in_mask;
4517 union_buf->base = NULL_TREE;
4518 union_buf->alias_type = NULL_TREE;
4519 union_buf->gsi = NULL;
4520 union_buf->align = 0;
4521 union_buf->off = 0;
4522 union_buf->padding_bytes = 0;
4523 union_buf->sz = sz;
4524 union_buf->size = 0;
4525 if (sz + buf->size <= clear_padding_buf_size)
4526 union_buf->union_ptr = buf->buf + buf->size;
4527 else
4528 union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4529 memset (union_buf->union_ptr, ~0, sz);
4532 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4533 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4535 if (DECL_SIZE_UNIT (field) == NULL_TREE)
4537 if (TREE_TYPE (field) == error_mark_node)
4538 continue;
4539 gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4540 && !COMPLETE_TYPE_P (TREE_TYPE (field)));
4541 if (!buf->clear_in_mask && !for_auto_init)
4542 error_at (buf->loc, "flexible array member %qD does not have "
4543 "well defined padding bits for %qs",
4544 field, "__builtin_clear_padding");
4545 continue;
4547 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4548 gcc_assert (union_buf->size == 0);
4549 union_buf->off = start_off;
4550 union_buf->size = start_size;
4551 memset (union_buf->buf, ~0, start_size);
4552 clear_padding_type (union_buf, TREE_TYPE (field), fldsz, for_auto_init);
4553 clear_padding_add_padding (union_buf, sz - fldsz);
4554 clear_padding_flush (union_buf, true);
4557 if (buf == union_buf)
4559 buf->off = next_off;
4560 buf->size = next_off % UNITS_PER_WORD;
4561 buf->off -= buf->size;
4562 memset (buf->buf, ~0, buf->size);
4564 else if (sz + buf->size <= clear_padding_buf_size)
4565 buf->size += sz;
4566 else
4568 unsigned char *union_ptr = union_buf->union_ptr;
4569 while (sz)
4571 clear_padding_flush (buf, false);
4572 HOST_WIDE_INT this_sz
4573 = MIN ((unsigned HOST_WIDE_INT) sz,
4574 clear_padding_buf_size - buf->size);
4575 memcpy (buf->buf + buf->size, union_ptr, this_sz);
4576 buf->size += this_sz;
4577 union_ptr += this_sz;
4578 sz -= this_sz;
4580 XDELETE (union_buf->union_ptr);
4584 /* The only known floating point formats with padding bits are the
4585 IEEE extended ones. */
4587 static bool
4588 clear_padding_real_needs_padding_p (tree type)
4590 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4591 return (fmt->b == 2
4592 && fmt->signbit_ro == fmt->signbit_rw
4593 && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4596 /* _BitInt has padding bits if it isn't extended in the ABI and has smaller
4597 precision than bits in limb or corresponding number of limbs. */
4599 static bool
4600 clear_padding_bitint_needs_padding_p (tree type)
4602 struct bitint_info info;
4603 bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
4604 gcc_assert (ok);
4605 if (info.extended)
4606 return false;
4607 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.abi_limb_mode);
4608 if (TYPE_PRECISION (type) < GET_MODE_PRECISION (limb_mode))
4609 return true;
4610 else if (TYPE_PRECISION (type) == GET_MODE_PRECISION (limb_mode))
4611 return false;
4612 else
4613 return (((unsigned) TYPE_PRECISION (type))
4614 % GET_MODE_PRECISION (limb_mode)) != 0;
4617 /* Return true if TYPE might contain any padding bits. */
4619 bool
4620 clear_padding_type_may_have_padding_p (tree type)
4622 switch (TREE_CODE (type))
4624 case RECORD_TYPE:
4625 case UNION_TYPE:
4626 return true;
4627 case ARRAY_TYPE:
4628 case COMPLEX_TYPE:
4629 case VECTOR_TYPE:
4630 return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4631 case REAL_TYPE:
4632 return clear_padding_real_needs_padding_p (type);
4633 case BITINT_TYPE:
4634 return clear_padding_bitint_needs_padding_p (type);
4635 default:
4636 return false;
4640 /* Emit a runtime loop:
4641 for (; buf.base != end; buf.base += sz)
4642 __builtin_clear_padding (buf.base); */
4644 static void
4645 clear_padding_emit_loop (clear_padding_struct *buf, tree type,
4646 tree end, bool for_auto_init)
4648 tree l1 = create_artificial_label (buf->loc);
4649 tree l2 = create_artificial_label (buf->loc);
4650 tree l3 = create_artificial_label (buf->loc);
4651 gimple *g = gimple_build_goto (l2);
4652 gimple_set_location (g, buf->loc);
4653 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4654 g = gimple_build_label (l1);
4655 gimple_set_location (g, buf->loc);
4656 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4657 clear_padding_type (buf, type, buf->sz, for_auto_init);
4658 clear_padding_flush (buf, true);
4659 g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4660 size_int (buf->sz));
4661 gimple_set_location (g, buf->loc);
4662 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4663 g = gimple_build_label (l2);
4664 gimple_set_location (g, buf->loc);
4665 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4666 g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4667 gimple_set_location (g, buf->loc);
4668 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4669 g = gimple_build_label (l3);
4670 gimple_set_location (g, buf->loc);
4671 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4674 /* Clear padding bits for TYPE. Called recursively from
4675 gimple_fold_builtin_clear_padding. If FOR_AUTO_INIT is true,
4676 the __builtin_clear_padding is not called by the end user,
4677 instead, it's inserted by the compiler to initialize the
4678 paddings of automatic variable. Therefore, we should not
4679 emit the error messages for flexible array members to confuse
4680 the end user. */
4682 static void
4683 clear_padding_type (clear_padding_struct *buf, tree type,
4684 HOST_WIDE_INT sz, bool for_auto_init)
4686 switch (TREE_CODE (type))
4688 case RECORD_TYPE:
4689 HOST_WIDE_INT cur_pos;
4690 cur_pos = 0;
4691 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4692 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4694 tree ftype = TREE_TYPE (field);
4695 if (DECL_BIT_FIELD (field))
4697 HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
4698 if (fldsz == 0)
4699 continue;
4700 HOST_WIDE_INT pos = int_byte_position (field);
4701 if (pos >= sz)
4702 continue;
4703 HOST_WIDE_INT bpos
4704 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4705 bpos %= BITS_PER_UNIT;
4706 HOST_WIDE_INT end
4707 = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4708 if (pos + end > cur_pos)
4710 clear_padding_add_padding (buf, pos + end - cur_pos);
4711 cur_pos = pos + end;
4713 gcc_assert (cur_pos > pos
4714 && ((unsigned HOST_WIDE_INT) buf->size
4715 >= (unsigned HOST_WIDE_INT) cur_pos - pos));
4716 unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4717 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4718 sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4719 " in %qs", "__builtin_clear_padding");
4720 else if (BYTES_BIG_ENDIAN)
4722 /* Big endian. */
4723 if (bpos + fldsz <= BITS_PER_UNIT)
4724 *p &= ~(((1 << fldsz) - 1)
4725 << (BITS_PER_UNIT - bpos - fldsz));
4726 else
4728 if (bpos)
4730 *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4731 p++;
4732 fldsz -= BITS_PER_UNIT - bpos;
4734 memset (p, 0, fldsz / BITS_PER_UNIT);
4735 p += fldsz / BITS_PER_UNIT;
4736 fldsz %= BITS_PER_UNIT;
4737 if (fldsz)
4738 *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4741 else
4743 /* Little endian. */
4744 if (bpos + fldsz <= BITS_PER_UNIT)
4745 *p &= ~(((1 << fldsz) - 1) << bpos);
4746 else
4748 if (bpos)
4750 *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4751 p++;
4752 fldsz -= BITS_PER_UNIT - bpos;
4754 memset (p, 0, fldsz / BITS_PER_UNIT);
4755 p += fldsz / BITS_PER_UNIT;
4756 fldsz %= BITS_PER_UNIT;
4757 if (fldsz)
4758 *p &= ~((1 << fldsz) - 1);
4762 else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4764 if (ftype == error_mark_node)
4765 continue;
4766 gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4767 && !COMPLETE_TYPE_P (ftype));
4768 if (!buf->clear_in_mask && !for_auto_init)
4769 error_at (buf->loc, "flexible array member %qD does not "
4770 "have well defined padding bits for %qs",
4771 field, "__builtin_clear_padding");
4773 else if (is_empty_type (ftype))
4774 continue;
4775 else
4777 HOST_WIDE_INT pos = int_byte_position (field);
4778 if (pos >= sz)
4779 continue;
4780 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4781 gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4782 clear_padding_add_padding (buf, pos - cur_pos);
4783 cur_pos = pos;
4784 if (tree asbase = lang_hooks.types.classtype_as_base (field))
4785 ftype = asbase;
4786 clear_padding_type (buf, ftype, fldsz, for_auto_init);
4787 cur_pos += fldsz;
4790 gcc_assert (sz >= cur_pos);
4791 clear_padding_add_padding (buf, sz - cur_pos);
4792 break;
4793 case ARRAY_TYPE:
4794 HOST_WIDE_INT nelts, fldsz;
4795 fldsz = int_size_in_bytes (TREE_TYPE (type));
4796 if (fldsz == 0)
4797 break;
4798 nelts = sz / fldsz;
4799 if (nelts > 1
4800 && sz > 8 * UNITS_PER_WORD
4801 && buf->union_ptr == NULL
4802 && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4804 /* For sufficiently large array of more than one elements,
4805 emit a runtime loop to keep code size manageable. */
4806 tree base = buf->base;
4807 unsigned int prev_align = buf->align;
4808 HOST_WIDE_INT off = buf->off + buf->size;
4809 HOST_WIDE_INT prev_sz = buf->sz;
4810 clear_padding_flush (buf, true);
4811 tree elttype = TREE_TYPE (type);
4812 buf->base = create_tmp_var (build_pointer_type (elttype));
4813 tree end = make_ssa_name (TREE_TYPE (buf->base));
4814 gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4815 base, size_int (off));
4816 gimple_set_location (g, buf->loc);
4817 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4818 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4819 size_int (sz));
4820 gimple_set_location (g, buf->loc);
4821 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4822 buf->sz = fldsz;
4823 buf->align = TYPE_ALIGN (elttype);
4824 buf->off = 0;
4825 buf->size = 0;
4826 clear_padding_emit_loop (buf, elttype, end, for_auto_init);
4827 buf->base = base;
4828 buf->sz = prev_sz;
4829 buf->align = prev_align;
4830 buf->size = off % UNITS_PER_WORD;
4831 buf->off = off - buf->size;
4832 memset (buf->buf, 0, buf->size);
4833 break;
4835 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4836 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4837 break;
4838 case UNION_TYPE:
4839 clear_padding_union (buf, type, sz, for_auto_init);
4840 break;
4841 case REAL_TYPE:
4842 gcc_assert ((size_t) sz <= clear_padding_unit);
4843 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4844 clear_padding_flush (buf, false);
4845 if (clear_padding_real_needs_padding_p (type))
4847 /* Use native_interpret_real + native_encode_expr to figure out
4848 which bits are padding. */
4849 memset (buf->buf + buf->size, ~0, sz);
4850 tree cst = native_interpret_real (type, buf->buf + buf->size, sz);
4851 gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
4852 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4853 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4854 for (size_t i = 0; i < (size_t) sz; i++)
4855 buf->buf[buf->size + i] ^= ~0;
4857 else
4858 memset (buf->buf + buf->size, 0, sz);
4859 buf->size += sz;
4860 break;
4861 case COMPLEX_TYPE:
4862 fldsz = int_size_in_bytes (TREE_TYPE (type));
4863 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4864 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4865 break;
4866 case VECTOR_TYPE:
4867 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
4868 fldsz = int_size_in_bytes (TREE_TYPE (type));
4869 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4870 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4871 break;
4872 case NULLPTR_TYPE:
4873 gcc_assert ((size_t) sz <= clear_padding_unit);
4874 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4875 clear_padding_flush (buf, false);
4876 memset (buf->buf + buf->size, ~0, sz);
4877 buf->size += sz;
4878 break;
4879 case BITINT_TYPE:
4881 struct bitint_info info;
4882 bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
4883 gcc_assert (ok);
4884 scalar_int_mode limb_mode
4885 = as_a <scalar_int_mode> (info.abi_limb_mode);
4886 if (TYPE_PRECISION (type) <= GET_MODE_PRECISION (limb_mode))
4888 gcc_assert ((size_t) sz <= clear_padding_unit);
4889 if ((unsigned HOST_WIDE_INT) sz + buf->size
4890 > clear_padding_buf_size)
4891 clear_padding_flush (buf, false);
4892 if (!info.extended
4893 && TYPE_PRECISION (type) < GET_MODE_PRECISION (limb_mode))
4895 int tprec = GET_MODE_PRECISION (limb_mode);
4896 int prec = TYPE_PRECISION (type);
4897 tree t = build_nonstandard_integer_type (tprec, 1);
4898 tree cst = wide_int_to_tree (t, wi::mask (prec, true, tprec));
4899 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4900 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4902 else
4903 memset (buf->buf + buf->size, 0, sz);
4904 buf->size += sz;
4905 break;
4907 tree limbtype
4908 = build_nonstandard_integer_type (GET_MODE_PRECISION (limb_mode), 1);
4909 fldsz = int_size_in_bytes (limbtype);
4910 nelts = int_size_in_bytes (type) / fldsz;
4911 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4913 if (!info.extended
4914 && i == (info.big_endian ? 0 : nelts - 1)
4915 && (((unsigned) TYPE_PRECISION (type))
4916 % TYPE_PRECISION (limbtype)) != 0)
4918 int tprec = GET_MODE_PRECISION (limb_mode);
4919 int prec = (((unsigned) TYPE_PRECISION (type)) % tprec);
4920 tree cst = wide_int_to_tree (limbtype,
4921 wi::mask (prec, true, tprec));
4922 int len = native_encode_expr (cst, buf->buf + buf->size,
4923 fldsz);
4924 gcc_assert (len > 0 && (size_t) len == (size_t) fldsz);
4925 buf->size += fldsz;
4927 else
4928 clear_padding_type (buf, limbtype, fldsz, for_auto_init);
4930 break;
4932 default:
4933 gcc_assert ((size_t) sz <= clear_padding_unit);
4934 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4935 clear_padding_flush (buf, false);
4936 memset (buf->buf + buf->size, 0, sz);
4937 buf->size += sz;
4938 break;
4942 /* Clear padding bits of TYPE in MASK. */
4944 void
4945 clear_type_padding_in_mask (tree type, unsigned char *mask)
4947 clear_padding_struct buf;
4948 buf.loc = UNKNOWN_LOCATION;
4949 buf.clear_in_mask = true;
4950 buf.base = NULL_TREE;
4951 buf.alias_type = NULL_TREE;
4952 buf.gsi = NULL;
4953 buf.align = 0;
4954 buf.off = 0;
4955 buf.padding_bytes = 0;
4956 buf.sz = int_size_in_bytes (type);
4957 buf.size = 0;
4958 buf.union_ptr = mask;
4959 clear_padding_type (&buf, type, buf.sz, false);
4960 clear_padding_flush (&buf, true);
4963 /* Fold __builtin_clear_padding builtin. */
4965 static bool
4966 gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
4968 gimple *stmt = gsi_stmt (*gsi);
4969 gcc_assert (gimple_call_num_args (stmt) == 2);
4970 tree ptr = gimple_call_arg (stmt, 0);
4971 tree typearg = gimple_call_arg (stmt, 1);
4972 /* The 2nd argument of __builtin_clear_padding's value is used to
4973 distinguish whether this call is made by the user or by the compiler
4974 for automatic variable initialization. */
4975 bool for_auto_init = (bool) TREE_INT_CST_LOW (typearg);
4976 tree type = TREE_TYPE (TREE_TYPE (typearg));
4977 location_t loc = gimple_location (stmt);
4978 clear_padding_struct buf;
4979 gimple_stmt_iterator gsiprev = *gsi;
4980 /* This should be folded during the lower pass. */
4981 gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
4982 gcc_assert (COMPLETE_TYPE_P (type));
4983 gsi_prev (&gsiprev);
4985 buf.loc = loc;
4986 buf.clear_in_mask = false;
4987 buf.base = ptr;
4988 buf.alias_type = NULL_TREE;
4989 buf.gsi = gsi;
4990 buf.align = get_pointer_alignment (ptr);
4991 unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
4992 buf.align = MAX (buf.align, talign);
4993 buf.off = 0;
4994 buf.padding_bytes = 0;
4995 buf.size = 0;
4996 buf.sz = int_size_in_bytes (type);
4997 buf.union_ptr = NULL;
4998 if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
4999 sorry_at (loc, "%s not supported for variable length aggregates",
5000 "__builtin_clear_padding");
5001 /* The implementation currently assumes 8-bit host and target
5002 chars which is the case for all currently supported targets
5003 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
5004 else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
5005 sorry_at (loc, "%s not supported on this target",
5006 "__builtin_clear_padding");
5007 else if (!clear_padding_type_may_have_padding_p (type))
5009 else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
5011 tree sz = TYPE_SIZE_UNIT (type);
5012 tree elttype = type;
5013 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
5014 while (TREE_CODE (elttype) == ARRAY_TYPE
5015 && int_size_in_bytes (elttype) < 0)
5016 elttype = TREE_TYPE (elttype);
5017 HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
5018 gcc_assert (eltsz >= 0);
5019 if (eltsz)
5021 buf.base = create_tmp_var (build_pointer_type (elttype));
5022 tree end = make_ssa_name (TREE_TYPE (buf.base));
5023 gimple *g = gimple_build_assign (buf.base, ptr);
5024 gimple_set_location (g, loc);
5025 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5026 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
5027 gimple_set_location (g, loc);
5028 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5029 buf.sz = eltsz;
5030 buf.align = TYPE_ALIGN (elttype);
5031 buf.alias_type = build_pointer_type (elttype);
5032 clear_padding_emit_loop (&buf, elttype, end, for_auto_init);
5035 else
5037 if (!is_gimple_mem_ref_addr (buf.base))
5039 buf.base = make_ssa_name (TREE_TYPE (ptr));
5040 gimple *g = gimple_build_assign (buf.base, ptr);
5041 gimple_set_location (g, loc);
5042 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5044 buf.alias_type = build_pointer_type (type);
5045 clear_padding_type (&buf, type, buf.sz, for_auto_init);
5046 clear_padding_flush (&buf, true);
5049 gimple_stmt_iterator gsiprev2 = *gsi;
5050 gsi_prev (&gsiprev2);
5051 if (gsi_stmt (gsiprev) == gsi_stmt (gsiprev2))
5052 gsi_replace (gsi, gimple_build_nop (), true);
5053 else
5055 gsi_remove (gsi, true);
5056 *gsi = gsiprev2;
5058 return true;
5061 /* Fold the non-target builtin at *GSI and return whether any simplification
5062 was made. */
5064 static bool
5065 gimple_fold_builtin (gimple_stmt_iterator *gsi)
5067 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
5068 tree callee = gimple_call_fndecl (stmt);
5070 /* Give up for always_inline inline builtins until they are
5071 inlined. */
5072 if (avoid_folding_inline_builtin (callee))
5073 return false;
5075 unsigned n = gimple_call_num_args (stmt);
5076 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
5077 switch (fcode)
5079 case BUILT_IN_BCMP:
5080 return gimple_fold_builtin_bcmp (gsi);
5081 case BUILT_IN_BCOPY:
5082 return gimple_fold_builtin_bcopy (gsi);
5083 case BUILT_IN_BZERO:
5084 return gimple_fold_builtin_bzero (gsi);
5086 case BUILT_IN_MEMSET:
5087 return gimple_fold_builtin_memset (gsi,
5088 gimple_call_arg (stmt, 1),
5089 gimple_call_arg (stmt, 2));
5090 case BUILT_IN_MEMCPY:
5091 case BUILT_IN_MEMPCPY:
5092 case BUILT_IN_MEMMOVE:
5093 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
5094 gimple_call_arg (stmt, 1), fcode);
5095 case BUILT_IN_SPRINTF_CHK:
5096 case BUILT_IN_VSPRINTF_CHK:
5097 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
5098 case BUILT_IN_STRCAT_CHK:
5099 return gimple_fold_builtin_strcat_chk (gsi);
5100 case BUILT_IN_STRNCAT_CHK:
5101 return gimple_fold_builtin_strncat_chk (gsi);
5102 case BUILT_IN_STRLEN:
5103 return gimple_fold_builtin_strlen (gsi);
5104 case BUILT_IN_STRCPY:
5105 return gimple_fold_builtin_strcpy (gsi,
5106 gimple_call_arg (stmt, 0),
5107 gimple_call_arg (stmt, 1));
5108 case BUILT_IN_STRNCPY:
5109 return gimple_fold_builtin_strncpy (gsi,
5110 gimple_call_arg (stmt, 0),
5111 gimple_call_arg (stmt, 1),
5112 gimple_call_arg (stmt, 2));
5113 case BUILT_IN_STRCAT:
5114 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
5115 gimple_call_arg (stmt, 1));
5116 case BUILT_IN_STRNCAT:
5117 return gimple_fold_builtin_strncat (gsi);
5118 case BUILT_IN_INDEX:
5119 case BUILT_IN_STRCHR:
5120 return gimple_fold_builtin_strchr (gsi, false);
5121 case BUILT_IN_RINDEX:
5122 case BUILT_IN_STRRCHR:
5123 return gimple_fold_builtin_strchr (gsi, true);
5124 case BUILT_IN_STRSTR:
5125 return gimple_fold_builtin_strstr (gsi);
5126 case BUILT_IN_STRCMP:
5127 case BUILT_IN_STRCMP_EQ:
5128 case BUILT_IN_STRCASECMP:
5129 case BUILT_IN_STRNCMP:
5130 case BUILT_IN_STRNCMP_EQ:
5131 case BUILT_IN_STRNCASECMP:
5132 return gimple_fold_builtin_string_compare (gsi);
5133 case BUILT_IN_MEMCHR:
5134 return gimple_fold_builtin_memchr (gsi);
5135 case BUILT_IN_FPUTS:
5136 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5137 gimple_call_arg (stmt, 1), false);
5138 case BUILT_IN_FPUTS_UNLOCKED:
5139 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5140 gimple_call_arg (stmt, 1), true);
5141 case BUILT_IN_MEMCPY_CHK:
5142 case BUILT_IN_MEMPCPY_CHK:
5143 case BUILT_IN_MEMMOVE_CHK:
5144 case BUILT_IN_MEMSET_CHK:
5145 return gimple_fold_builtin_memory_chk (gsi,
5146 gimple_call_arg (stmt, 0),
5147 gimple_call_arg (stmt, 1),
5148 gimple_call_arg (stmt, 2),
5149 gimple_call_arg (stmt, 3),
5150 fcode);
5151 case BUILT_IN_STPCPY:
5152 return gimple_fold_builtin_stpcpy (gsi);
5153 case BUILT_IN_STRCPY_CHK:
5154 case BUILT_IN_STPCPY_CHK:
5155 return gimple_fold_builtin_stxcpy_chk (gsi,
5156 gimple_call_arg (stmt, 0),
5157 gimple_call_arg (stmt, 1),
5158 gimple_call_arg (stmt, 2),
5159 fcode);
5160 case BUILT_IN_STRNCPY_CHK:
5161 case BUILT_IN_STPNCPY_CHK:
5162 return gimple_fold_builtin_stxncpy_chk (gsi,
5163 gimple_call_arg (stmt, 0),
5164 gimple_call_arg (stmt, 1),
5165 gimple_call_arg (stmt, 2),
5166 gimple_call_arg (stmt, 3),
5167 fcode);
5168 case BUILT_IN_SNPRINTF_CHK:
5169 case BUILT_IN_VSNPRINTF_CHK:
5170 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
5172 case BUILT_IN_FPRINTF:
5173 case BUILT_IN_FPRINTF_UNLOCKED:
5174 case BUILT_IN_VFPRINTF:
5175 if (n == 2 || n == 3)
5176 return gimple_fold_builtin_fprintf (gsi,
5177 gimple_call_arg (stmt, 0),
5178 gimple_call_arg (stmt, 1),
5179 n == 3
5180 ? gimple_call_arg (stmt, 2)
5181 : NULL_TREE,
5182 fcode);
5183 break;
5184 case BUILT_IN_FPRINTF_CHK:
5185 case BUILT_IN_VFPRINTF_CHK:
5186 if (n == 3 || n == 4)
5187 return gimple_fold_builtin_fprintf (gsi,
5188 gimple_call_arg (stmt, 0),
5189 gimple_call_arg (stmt, 2),
5190 n == 4
5191 ? gimple_call_arg (stmt, 3)
5192 : NULL_TREE,
5193 fcode);
5194 break;
5195 case BUILT_IN_PRINTF:
5196 case BUILT_IN_PRINTF_UNLOCKED:
5197 case BUILT_IN_VPRINTF:
5198 if (n == 1 || n == 2)
5199 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
5200 n == 2
5201 ? gimple_call_arg (stmt, 1)
5202 : NULL_TREE, fcode);
5203 break;
5204 case BUILT_IN_PRINTF_CHK:
5205 case BUILT_IN_VPRINTF_CHK:
5206 if (n == 2 || n == 3)
5207 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
5208 n == 3
5209 ? gimple_call_arg (stmt, 2)
5210 : NULL_TREE, fcode);
5211 break;
5212 case BUILT_IN_ACC_ON_DEVICE:
5213 return gimple_fold_builtin_acc_on_device (gsi,
5214 gimple_call_arg (stmt, 0));
5215 case BUILT_IN_REALLOC:
5216 return gimple_fold_builtin_realloc (gsi);
5218 case BUILT_IN_CLEAR_PADDING:
5219 return gimple_fold_builtin_clear_padding (gsi);
5221 default:;
5224 /* Try the generic builtin folder. */
5225 bool ignore = (gimple_call_lhs (stmt) == NULL);
5226 tree result = fold_call_stmt (stmt, ignore);
5227 if (result)
5229 if (ignore)
5230 STRIP_NOPS (result);
5231 else
5232 result = fold_convert (gimple_call_return_type (stmt), result);
5233 gimplify_and_update_call_from_tree (gsi, result);
5234 return true;
5237 return false;
5240 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5241 function calls to constants, where possible. */
5243 static tree
5244 fold_internal_goacc_dim (const gimple *call)
5246 int axis = oacc_get_ifn_dim_arg (call);
5247 int size = oacc_get_fn_dim_size (current_function_decl, axis);
5248 tree result = NULL_TREE;
5249 tree type = TREE_TYPE (gimple_call_lhs (call));
5251 switch (gimple_call_internal_fn (call))
5253 case IFN_GOACC_DIM_POS:
5254 /* If the size is 1, we know the answer. */
5255 if (size == 1)
5256 result = build_int_cst (type, 0);
5257 break;
5258 case IFN_GOACC_DIM_SIZE:
5259 /* If the size is not dynamic, we know the answer. */
5260 if (size)
5261 result = build_int_cst (type, size);
5262 break;
5263 default:
5264 break;
5267 return result;
5270 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5271 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5272 &var where var is only addressable because of such calls. */
5274 bool
5275 optimize_atomic_compare_exchange_p (gimple *stmt)
5277 if (gimple_call_num_args (stmt) != 6
5278 || !flag_inline_atomics
5279 || !optimize
5280 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
5281 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5282 || !gimple_vdef (stmt)
5283 || !gimple_vuse (stmt))
5284 return false;
5286 tree fndecl = gimple_call_fndecl (stmt);
5287 switch (DECL_FUNCTION_CODE (fndecl))
5289 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
5290 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
5291 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
5292 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
5293 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
5294 break;
5295 default:
5296 return false;
5299 tree expected = gimple_call_arg (stmt, 1);
5300 if (TREE_CODE (expected) != ADDR_EXPR
5301 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
5302 return false;
5304 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
5305 if (!is_gimple_reg_type (etype)
5306 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
5307 || TREE_THIS_VOLATILE (etype)
5308 || VECTOR_TYPE_P (etype)
5309 || TREE_CODE (etype) == COMPLEX_TYPE
5310 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5311 might not preserve all the bits. See PR71716. */
5312 || SCALAR_FLOAT_TYPE_P (etype)
5313 || maybe_ne (TYPE_PRECISION (etype),
5314 GET_MODE_BITSIZE (TYPE_MODE (etype))))
5315 return false;
5317 tree weak = gimple_call_arg (stmt, 3);
5318 if (!integer_zerop (weak) && !integer_onep (weak))
5319 return false;
5321 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5322 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5323 machine_mode mode = TYPE_MODE (itype);
5325 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
5326 == CODE_FOR_nothing
5327 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5328 return false;
5330 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
5331 return false;
5333 return true;
5336 /* Fold
5337 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5338 into
5339 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5340 i = IMAGPART_EXPR <t>;
5341 r = (_Bool) i;
5342 e = REALPART_EXPR <t>; */
5344 void
5345 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5347 gimple *stmt = gsi_stmt (*gsi);
5348 tree fndecl = gimple_call_fndecl (stmt);
5349 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5350 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5351 tree ctype = build_complex_type (itype);
5352 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
5353 bool throws = false;
5354 edge e = NULL;
5355 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5356 expected);
5357 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5358 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5359 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5361 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
5362 build1 (VIEW_CONVERT_EXPR, itype,
5363 gimple_assign_lhs (g)));
5364 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5366 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
5367 + int_size_in_bytes (itype);
5368 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5369 gimple_call_arg (stmt, 0),
5370 gimple_assign_lhs (g),
5371 gimple_call_arg (stmt, 2),
5372 build_int_cst (integer_type_node, flag),
5373 gimple_call_arg (stmt, 4),
5374 gimple_call_arg (stmt, 5));
5375 tree lhs = make_ssa_name (ctype);
5376 gimple_call_set_lhs (g, lhs);
5377 gimple_move_vops (g, stmt);
5378 tree oldlhs = gimple_call_lhs (stmt);
5379 if (stmt_can_throw_internal (cfun, stmt))
5381 throws = true;
5382 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
5384 gimple_call_set_nothrow (as_a <gcall *> (g),
5385 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
5386 gimple_call_set_lhs (stmt, NULL_TREE);
5387 gsi_replace (gsi, g, true);
5388 if (oldlhs)
5390 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
5391 build1 (IMAGPART_EXPR, itype, lhs));
5392 if (throws)
5394 gsi_insert_on_edge_immediate (e, g);
5395 *gsi = gsi_for_stmt (g);
5397 else
5398 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5399 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
5400 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5402 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
5403 build1 (REALPART_EXPR, itype, lhs));
5404 if (throws && oldlhs == NULL_TREE)
5406 gsi_insert_on_edge_immediate (e, g);
5407 *gsi = gsi_for_stmt (g);
5409 else
5410 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5411 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5413 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5414 VIEW_CONVERT_EXPR,
5415 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5416 gimple_assign_lhs (g)));
5417 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5419 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
5420 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5421 *gsi = gsiret;
5424 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5425 doesn't fit into TYPE. The test for overflow should be regardless of
5426 -fwrapv, and even for unsigned types. */
5428 bool
5429 arith_overflowed_p (enum tree_code code, const_tree type,
5430 const_tree arg0, const_tree arg1)
5432 widest2_int warg0 = widest2_int_cst (arg0);
5433 widest2_int warg1 = widest2_int_cst (arg1);
5434 widest2_int wres;
5435 switch (code)
5437 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
5438 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
5439 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
5440 default: gcc_unreachable ();
5442 signop sign = TYPE_SIGN (type);
5443 if (sign == UNSIGNED && wi::neg_p (wres))
5444 return true;
5445 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
5448 /* If IFN_{MASK,LEN,MASK_LEN}_LOAD/STORE call CALL is unconditional,
5449 return a MEM_REF for the memory it references, otherwise return null.
5450 VECTYPE is the type of the memory vector. MASK_P indicates it's for
5451 MASK if true, otherwise it's for LEN. */
5453 static tree
5454 gimple_fold_partial_load_store_mem_ref (gcall *call, tree vectype, bool mask_p)
5456 tree ptr = gimple_call_arg (call, 0);
5457 tree alias_align = gimple_call_arg (call, 1);
5458 if (!tree_fits_uhwi_p (alias_align))
5459 return NULL_TREE;
5461 if (mask_p)
5463 tree mask = gimple_call_arg (call, 2);
5464 if (!integer_all_onesp (mask))
5465 return NULL_TREE;
5467 else
5469 internal_fn ifn = gimple_call_internal_fn (call);
5470 int len_index = internal_fn_len_index (ifn);
5471 tree basic_len = gimple_call_arg (call, len_index);
5472 if (!poly_int_tree_p (basic_len))
5473 return NULL_TREE;
5474 tree bias = gimple_call_arg (call, len_index + 1);
5475 gcc_assert (TREE_CODE (bias) == INTEGER_CST);
5476 /* For LEN_LOAD/LEN_STORE/MASK_LEN_LOAD/MASK_LEN_STORE,
5477 we don't fold when (bias + len) != VF. */
5478 if (maybe_ne (wi::to_poly_widest (basic_len) + wi::to_widest (bias),
5479 GET_MODE_NUNITS (TYPE_MODE (vectype))))
5480 return NULL_TREE;
5482 /* For MASK_LEN_{LOAD,STORE}, we should also check whether
5483 the mask is all ones mask. */
5484 if (ifn == IFN_MASK_LEN_LOAD || ifn == IFN_MASK_LEN_STORE)
5486 tree mask = gimple_call_arg (call, internal_fn_mask_index (ifn));
5487 if (!integer_all_onesp (mask))
5488 return NULL_TREE;
5492 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
5493 if (TYPE_ALIGN (vectype) != align)
5494 vectype = build_aligned_type (vectype, align);
5495 tree offset = build_zero_cst (TREE_TYPE (alias_align));
5496 return fold_build2 (MEM_REF, vectype, ptr, offset);
5499 /* Try to fold IFN_{MASK,LEN}_LOAD call CALL. Return true on success.
5500 MASK_P indicates it's for MASK if true, otherwise it's for LEN. */
5502 static bool
5503 gimple_fold_partial_load (gimple_stmt_iterator *gsi, gcall *call, bool mask_p)
5505 tree lhs = gimple_call_lhs (call);
5506 if (!lhs)
5507 return false;
5509 if (tree rhs
5510 = gimple_fold_partial_load_store_mem_ref (call, TREE_TYPE (lhs), mask_p))
5512 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5513 gimple_set_location (new_stmt, gimple_location (call));
5514 gimple_move_vops (new_stmt, call);
5515 gsi_replace (gsi, new_stmt, false);
5516 return true;
5518 return false;
5521 /* Try to fold IFN_{MASK,LEN}_STORE call CALL. Return true on success.
5522 MASK_P indicates it's for MASK if true, otherwise it's for LEN. */
5524 static bool
5525 gimple_fold_partial_store (gimple_stmt_iterator *gsi, gcall *call,
5526 bool mask_p)
5528 internal_fn ifn = gimple_call_internal_fn (call);
5529 tree rhs = gimple_call_arg (call, internal_fn_stored_value_index (ifn));
5530 if (tree lhs
5531 = gimple_fold_partial_load_store_mem_ref (call, TREE_TYPE (rhs), mask_p))
5533 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5534 gimple_set_location (new_stmt, gimple_location (call));
5535 gimple_move_vops (new_stmt, call);
5536 gsi_replace (gsi, new_stmt, false);
5537 return true;
5539 return false;
5542 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5543 The statement may be replaced by another statement, e.g., if the call
5544 simplifies to a constant value. Return true if any changes were made.
5545 It is assumed that the operands have been previously folded. */
5547 static bool
5548 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
5550 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
5551 tree callee;
5552 bool changed = false;
5554 /* Check for virtual calls that became direct calls. */
5555 callee = gimple_call_fn (stmt);
5556 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
5558 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
5560 if (dump_file && virtual_method_call_p (callee)
5561 && !possible_polymorphic_call_target_p
5562 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
5563 (OBJ_TYPE_REF_EXPR (callee)))))
5565 fprintf (dump_file,
5566 "Type inheritance inconsistent devirtualization of ");
5567 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5568 fprintf (dump_file, " to ");
5569 print_generic_expr (dump_file, callee, TDF_SLIM);
5570 fprintf (dump_file, "\n");
5573 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
5574 changed = true;
5576 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
5578 bool final;
5579 vec <cgraph_node *>targets
5580 = possible_polymorphic_call_targets (callee, stmt, &final);
5581 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5583 tree lhs = gimple_call_lhs (stmt);
5584 if (dump_enabled_p ())
5586 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5587 "folding virtual function call to %s\n",
5588 targets.length () == 1
5589 ? targets[0]->name ()
5590 : "__builtin_unreachable");
5592 if (targets.length () == 1)
5594 tree fndecl = targets[0]->decl;
5595 gimple_call_set_fndecl (stmt, fndecl);
5596 changed = true;
5597 /* If changing the call to __cxa_pure_virtual
5598 or similar noreturn function, adjust gimple_call_fntype
5599 too. */
5600 if (gimple_call_noreturn_p (stmt)
5601 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
5602 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
5603 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5604 == void_type_node))
5605 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
5606 /* If the call becomes noreturn, remove the lhs. */
5607 if (lhs
5608 && gimple_call_noreturn_p (stmt)
5609 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
5610 || should_remove_lhs_p (lhs)))
5612 if (TREE_CODE (lhs) == SSA_NAME)
5614 tree var = create_tmp_var (TREE_TYPE (lhs));
5615 tree def = get_or_create_ssa_default_def (cfun, var);
5616 gimple *new_stmt = gimple_build_assign (lhs, def);
5617 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
5619 gimple_call_set_lhs (stmt, NULL_TREE);
5621 maybe_remove_unused_call_args (cfun, stmt);
5623 else
5625 location_t loc = gimple_location (stmt);
5626 gimple *new_stmt = gimple_build_builtin_unreachable (loc);
5627 gimple_call_set_ctrl_altering (new_stmt, false);
5628 /* If the call had a SSA name as lhs morph that into
5629 an uninitialized value. */
5630 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5632 tree var = create_tmp_var (TREE_TYPE (lhs));
5633 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
5634 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5635 set_ssa_default_def (cfun, var, lhs);
5637 gimple_move_vops (new_stmt, stmt);
5638 gsi_replace (gsi, new_stmt, false);
5639 return true;
5645 /* Check for indirect calls that became direct calls, and then
5646 no longer require a static chain. */
5647 if (gimple_call_chain (stmt))
5649 tree fn = gimple_call_fndecl (stmt);
5650 if (fn && !DECL_STATIC_CHAIN (fn))
5652 gimple_call_set_chain (stmt, NULL);
5653 changed = true;
5657 if (inplace)
5658 return changed;
5660 /* Check for builtins that CCP can handle using information not
5661 available in the generic fold routines. */
5662 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5664 if (gimple_fold_builtin (gsi))
5665 changed = true;
5667 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
5669 changed |= targetm.gimple_fold_builtin (gsi);
5671 else if (gimple_call_internal_p (stmt))
5673 enum tree_code subcode = ERROR_MARK;
5674 tree result = NULL_TREE;
5675 bool cplx_result = false;
5676 bool uaddc_usubc = false;
5677 tree overflow = NULL_TREE;
5678 switch (gimple_call_internal_fn (stmt))
5680 case IFN_BUILTIN_EXPECT:
5681 result = fold_builtin_expect (gimple_location (stmt),
5682 gimple_call_arg (stmt, 0),
5683 gimple_call_arg (stmt, 1),
5684 gimple_call_arg (stmt, 2),
5685 NULL_TREE);
5686 break;
5687 case IFN_UBSAN_OBJECT_SIZE:
5689 tree offset = gimple_call_arg (stmt, 1);
5690 tree objsize = gimple_call_arg (stmt, 2);
5691 if (integer_all_onesp (objsize)
5692 || (TREE_CODE (offset) == INTEGER_CST
5693 && TREE_CODE (objsize) == INTEGER_CST
5694 && tree_int_cst_le (offset, objsize)))
5696 replace_call_with_value (gsi, NULL_TREE);
5697 return true;
5700 break;
5701 case IFN_UBSAN_PTR:
5702 if (integer_zerop (gimple_call_arg (stmt, 1)))
5704 replace_call_with_value (gsi, NULL_TREE);
5705 return true;
5707 break;
5708 case IFN_UBSAN_BOUNDS:
5710 tree index = gimple_call_arg (stmt, 1);
5711 tree bound = gimple_call_arg (stmt, 2);
5712 if (TREE_CODE (index) == INTEGER_CST
5713 && TREE_CODE (bound) == INTEGER_CST)
5715 index = fold_convert (TREE_TYPE (bound), index);
5716 if (TREE_CODE (index) == INTEGER_CST
5717 && tree_int_cst_lt (index, bound))
5719 replace_call_with_value (gsi, NULL_TREE);
5720 return true;
5724 break;
5725 case IFN_GOACC_DIM_SIZE:
5726 case IFN_GOACC_DIM_POS:
5727 result = fold_internal_goacc_dim (stmt);
5728 break;
5729 case IFN_UBSAN_CHECK_ADD:
5730 subcode = PLUS_EXPR;
5731 break;
5732 case IFN_UBSAN_CHECK_SUB:
5733 subcode = MINUS_EXPR;
5734 break;
5735 case IFN_UBSAN_CHECK_MUL:
5736 subcode = MULT_EXPR;
5737 break;
5738 case IFN_ADD_OVERFLOW:
5739 subcode = PLUS_EXPR;
5740 cplx_result = true;
5741 break;
5742 case IFN_SUB_OVERFLOW:
5743 subcode = MINUS_EXPR;
5744 cplx_result = true;
5745 break;
5746 case IFN_MUL_OVERFLOW:
5747 subcode = MULT_EXPR;
5748 cplx_result = true;
5749 break;
5750 case IFN_UADDC:
5751 subcode = PLUS_EXPR;
5752 cplx_result = true;
5753 uaddc_usubc = true;
5754 break;
5755 case IFN_USUBC:
5756 subcode = MINUS_EXPR;
5757 cplx_result = true;
5758 uaddc_usubc = true;
5759 break;
5760 case IFN_MASK_LOAD:
5761 changed |= gimple_fold_partial_load (gsi, stmt, true);
5762 break;
5763 case IFN_MASK_STORE:
5764 changed |= gimple_fold_partial_store (gsi, stmt, true);
5765 break;
5766 case IFN_LEN_LOAD:
5767 case IFN_MASK_LEN_LOAD:
5768 changed |= gimple_fold_partial_load (gsi, stmt, false);
5769 break;
5770 case IFN_LEN_STORE:
5771 case IFN_MASK_LEN_STORE:
5772 changed |= gimple_fold_partial_store (gsi, stmt, false);
5773 break;
5774 default:
5775 break;
5777 if (subcode != ERROR_MARK)
5779 tree arg0 = gimple_call_arg (stmt, 0);
5780 tree arg1 = gimple_call_arg (stmt, 1);
5781 tree arg2 = NULL_TREE;
5782 tree type = TREE_TYPE (arg0);
5783 if (cplx_result)
5785 tree lhs = gimple_call_lhs (stmt);
5786 if (lhs == NULL_TREE)
5787 type = NULL_TREE;
5788 else
5789 type = TREE_TYPE (TREE_TYPE (lhs));
5790 if (uaddc_usubc)
5791 arg2 = gimple_call_arg (stmt, 2);
5793 if (type == NULL_TREE)
5795 else if (uaddc_usubc)
5797 if (!integer_zerop (arg2))
5799 /* x = y + 0 + 0; x = y - 0 - 0; */
5800 else if (integer_zerop (arg1))
5801 result = arg0;
5802 /* x = 0 + y + 0; */
5803 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
5804 result = arg1;
5805 /* x = y - y - 0; */
5806 else if (subcode == MINUS_EXPR
5807 && operand_equal_p (arg0, arg1, 0))
5808 result = integer_zero_node;
5810 /* x = y + 0; x = y - 0; x = y * 0; */
5811 else if (integer_zerop (arg1))
5812 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
5813 /* x = 0 + y; x = 0 * y; */
5814 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
5815 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
5816 /* x = y - y; */
5817 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
5818 result = integer_zero_node;
5819 /* x = y * 1; x = 1 * y; */
5820 else if (subcode == MULT_EXPR && integer_onep (arg1))
5821 result = arg0;
5822 else if (subcode == MULT_EXPR && integer_onep (arg0))
5823 result = arg1;
5824 if (result)
5826 if (result == integer_zero_node)
5827 result = build_zero_cst (type);
5828 else if (cplx_result && TREE_TYPE (result) != type)
5830 if (TREE_CODE (result) == INTEGER_CST)
5832 if (arith_overflowed_p (PLUS_EXPR, type, result,
5833 integer_zero_node))
5834 overflow = build_one_cst (type);
5836 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
5837 && TYPE_UNSIGNED (type))
5838 || (TYPE_PRECISION (type)
5839 < (TYPE_PRECISION (TREE_TYPE (result))
5840 + (TYPE_UNSIGNED (TREE_TYPE (result))
5841 && !TYPE_UNSIGNED (type)))))
5842 result = NULL_TREE;
5843 if (result)
5844 result = fold_convert (type, result);
5849 if (result)
5851 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
5852 result = drop_tree_overflow (result);
5853 if (cplx_result)
5855 if (overflow == NULL_TREE)
5856 overflow = build_zero_cst (TREE_TYPE (result));
5857 tree ctype = build_complex_type (TREE_TYPE (result));
5858 if (TREE_CODE (result) == INTEGER_CST
5859 && TREE_CODE (overflow) == INTEGER_CST)
5860 result = build_complex (ctype, result, overflow);
5861 else
5862 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
5863 ctype, result, overflow);
5865 gimplify_and_update_call_from_tree (gsi, result);
5866 changed = true;
5870 return changed;
5874 /* Return true whether NAME has a use on STMT. Note this can return
5875 false even though there's a use on STMT if SSA operands are not
5876 up-to-date. */
5878 static bool
5879 has_use_on_stmt (tree name, gimple *stmt)
5881 ssa_op_iter iter;
5882 tree op;
5883 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5884 if (op == name)
5885 return true;
5886 return false;
5889 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5890 gimple_simplify.
5892 Replaces *GSI with the simplification result in RCODE and OPS
5893 and the associated statements in *SEQ. Does the replacement
5894 according to INPLACE and returns true if the operation succeeded. */
5896 static bool
5897 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5898 gimple_match_op *res_op,
5899 gimple_seq *seq, bool inplace)
5901 gimple *stmt = gsi_stmt (*gsi);
5902 tree *ops = res_op->ops;
5903 unsigned int num_ops = res_op->num_ops;
5905 /* Play safe and do not allow abnormals to be mentioned in
5906 newly created statements. See also maybe_push_res_to_seq.
5907 As an exception allow such uses if there was a use of the
5908 same SSA name on the old stmt. */
5909 for (unsigned int i = 0; i < num_ops; ++i)
5910 if (TREE_CODE (ops[i]) == SSA_NAME
5911 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
5912 && !has_use_on_stmt (ops[i], stmt))
5913 return false;
5915 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
5916 for (unsigned int i = 0; i < 2; ++i)
5917 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
5918 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
5919 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
5920 return false;
5922 /* Don't insert new statements when INPLACE is true, even if we could
5923 reuse STMT for the final statement. */
5924 if (inplace && !gimple_seq_empty_p (*seq))
5925 return false;
5927 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
5929 gcc_assert (res_op->code.is_tree_code ());
5930 auto code = tree_code (res_op->code);
5931 if (TREE_CODE_CLASS (code) == tcc_comparison
5932 /* GIMPLE_CONDs condition may not throw. */
5933 && (!flag_exceptions
5934 || !cfun->can_throw_non_call_exceptions
5935 || !operation_could_trap_p (code,
5936 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
5937 false, NULL_TREE)))
5938 gimple_cond_set_condition (cond_stmt, code, ops[0], ops[1]);
5939 else if (code == SSA_NAME)
5940 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
5941 build_zero_cst (TREE_TYPE (ops[0])));
5942 else if (code == INTEGER_CST)
5944 if (integer_zerop (ops[0]))
5945 gimple_cond_make_false (cond_stmt);
5946 else
5947 gimple_cond_make_true (cond_stmt);
5949 else if (!inplace)
5951 tree res = maybe_push_res_to_seq (res_op, seq);
5952 if (!res)
5953 return false;
5954 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
5955 build_zero_cst (TREE_TYPE (res)));
5957 else
5958 return false;
5959 if (dump_file && (dump_flags & TDF_DETAILS))
5961 fprintf (dump_file, "gimple_simplified to ");
5962 if (!gimple_seq_empty_p (*seq))
5963 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5964 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5965 0, TDF_SLIM);
5967 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5968 return true;
5970 else if (is_gimple_assign (stmt)
5971 && res_op->code.is_tree_code ())
5973 auto code = tree_code (res_op->code);
5974 if (!inplace
5975 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (code))
5977 maybe_build_generic_op (res_op);
5978 gimple_assign_set_rhs_with_ops (gsi, code,
5979 res_op->op_or_null (0),
5980 res_op->op_or_null (1),
5981 res_op->op_or_null (2));
5982 if (dump_file && (dump_flags & TDF_DETAILS))
5984 fprintf (dump_file, "gimple_simplified to ");
5985 if (!gimple_seq_empty_p (*seq))
5986 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5987 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5988 0, TDF_SLIM);
5990 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5991 return true;
5994 else if (res_op->code.is_fn_code ()
5995 && gimple_call_combined_fn (stmt) == combined_fn (res_op->code))
5997 gcc_assert (num_ops == gimple_call_num_args (stmt));
5998 for (unsigned int i = 0; i < num_ops; ++i)
5999 gimple_call_set_arg (stmt, i, ops[i]);
6000 if (dump_file && (dump_flags & TDF_DETAILS))
6002 fprintf (dump_file, "gimple_simplified to ");
6003 if (!gimple_seq_empty_p (*seq))
6004 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
6005 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
6007 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
6008 return true;
6010 else if (!inplace)
6012 if (gimple_has_lhs (stmt))
6014 tree lhs = gimple_get_lhs (stmt);
6015 if (!maybe_push_res_to_seq (res_op, seq, lhs))
6016 return false;
6017 if (dump_file && (dump_flags & TDF_DETAILS))
6019 fprintf (dump_file, "gimple_simplified to ");
6020 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
6022 gsi_replace_with_seq_vops (gsi, *seq);
6023 return true;
6025 else
6026 gcc_unreachable ();
6029 return false;
6032 /* Canonicalize MEM_REFs invariant address operand after propagation. */
6034 static bool
6035 maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
6037 bool res = false;
6038 tree *orig_t = t;
6040 if (TREE_CODE (*t) == ADDR_EXPR)
6041 t = &TREE_OPERAND (*t, 0);
6043 /* The C and C++ frontends use an ARRAY_REF for indexing with their
6044 generic vector extension. The actual vector referenced is
6045 view-converted to an array type for this purpose. If the index
6046 is constant the canonical representation in the middle-end is a
6047 BIT_FIELD_REF so re-write the former to the latter here. */
6048 if (TREE_CODE (*t) == ARRAY_REF
6049 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
6050 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
6051 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
6053 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
6054 if (VECTOR_TYPE_P (vtype))
6056 tree low = array_ref_low_bound (*t);
6057 if (TREE_CODE (low) == INTEGER_CST)
6059 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
6061 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
6062 wi::to_widest (low));
6063 idx = wi::mul (idx, wi::to_widest
6064 (TYPE_SIZE (TREE_TYPE (*t))));
6065 widest_int ext
6066 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
6067 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
6069 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
6070 TREE_TYPE (*t),
6071 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
6072 TYPE_SIZE (TREE_TYPE (*t)),
6073 wide_int_to_tree (bitsizetype, idx));
6074 res = true;
6081 while (handled_component_p (*t))
6082 t = &TREE_OPERAND (*t, 0);
6084 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
6085 of invariant addresses into a SSA name MEM_REF address. */
6086 if (TREE_CODE (*t) == MEM_REF
6087 || TREE_CODE (*t) == TARGET_MEM_REF)
6089 tree addr = TREE_OPERAND (*t, 0);
6090 if (TREE_CODE (addr) == ADDR_EXPR
6091 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
6092 || handled_component_p (TREE_OPERAND (addr, 0))))
6094 tree base;
6095 poly_int64 coffset;
6096 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
6097 &coffset);
6098 if (!base)
6100 if (is_debug)
6101 return false;
6102 gcc_unreachable ();
6105 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
6106 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
6107 TREE_OPERAND (*t, 1),
6108 size_int (coffset));
6109 res = true;
6111 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
6112 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
6115 /* Canonicalize back MEM_REFs to plain reference trees if the object
6116 accessed is a decl that has the same access semantics as the MEM_REF. */
6117 if (TREE_CODE (*t) == MEM_REF
6118 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
6119 && integer_zerop (TREE_OPERAND (*t, 1))
6120 && MR_DEPENDENCE_CLIQUE (*t) == 0)
6122 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6123 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
6124 if (/* Same volatile qualification. */
6125 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
6126 /* Same TBAA behavior with -fstrict-aliasing. */
6127 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
6128 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
6129 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
6130 /* Same alignment. */
6131 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
6132 /* We have to look out here to not drop a required conversion
6133 from the rhs to the lhs if *t appears on the lhs or vice-versa
6134 if it appears on the rhs. Thus require strict type
6135 compatibility. */
6136 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
6138 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6139 res = true;
6143 else if (TREE_CODE (*orig_t) == ADDR_EXPR
6144 && TREE_CODE (*t) == MEM_REF
6145 && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
6147 tree base;
6148 poly_int64 coffset;
6149 base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
6150 &coffset);
6151 if (base)
6153 gcc_assert (TREE_CODE (base) == MEM_REF);
6154 poly_int64 moffset;
6155 if (mem_ref_offset (base).to_shwi (&moffset))
6157 coffset += moffset;
6158 if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
6160 coffset += moffset;
6161 *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
6162 return true;
6168 /* Canonicalize TARGET_MEM_REF in particular with respect to
6169 the indexes becoming constant. */
6170 else if (TREE_CODE (*t) == TARGET_MEM_REF)
6172 tree tem = maybe_fold_tmr (*t);
6173 if (tem)
6175 *t = tem;
6176 if (TREE_CODE (*orig_t) == ADDR_EXPR)
6177 recompute_tree_invariant_for_addr_expr (*orig_t);
6178 res = true;
6182 return res;
6185 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6186 distinguishes both cases. */
6188 static bool
6189 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
6191 bool changed = false;
6192 gimple *stmt = gsi_stmt (*gsi);
6193 bool nowarning = warning_suppressed_p (stmt, OPT_Wstrict_overflow);
6194 unsigned i;
6195 fold_defer_overflow_warnings ();
6197 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6198 after propagation.
6199 ??? This shouldn't be done in generic folding but in the
6200 propagation helpers which also know whether an address was
6201 propagated.
6202 Also canonicalize operand order. */
6203 switch (gimple_code (stmt))
6205 case GIMPLE_ASSIGN:
6206 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
6208 tree *rhs = gimple_assign_rhs1_ptr (stmt);
6209 if ((REFERENCE_CLASS_P (*rhs)
6210 || TREE_CODE (*rhs) == ADDR_EXPR)
6211 && maybe_canonicalize_mem_ref_addr (rhs))
6212 changed = true;
6213 tree *lhs = gimple_assign_lhs_ptr (stmt);
6214 if (REFERENCE_CLASS_P (*lhs)
6215 && maybe_canonicalize_mem_ref_addr (lhs))
6216 changed = true;
6217 /* Canonicalize &MEM[ssa_n, CST] to ssa_n p+ CST.
6218 This cannot be done in maybe_canonicalize_mem_ref_addr
6219 as the gimple now has two operands rather than one.
6220 The same reason why this can't be done in
6221 maybe_canonicalize_mem_ref_addr is the same reason why
6222 this can't be done inplace. */
6223 if (!inplace && TREE_CODE (*rhs) == ADDR_EXPR)
6225 tree inner = TREE_OPERAND (*rhs, 0);
6226 if (TREE_CODE (inner) == MEM_REF
6227 && TREE_CODE (TREE_OPERAND (inner, 0)) == SSA_NAME
6228 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6230 tree ptr = TREE_OPERAND (inner, 0);
6231 tree addon = TREE_OPERAND (inner, 1);
6232 addon = fold_convert (sizetype, addon);
6233 gimple_assign_set_rhs_with_ops (gsi, POINTER_PLUS_EXPR,
6234 ptr, addon);
6235 changed = true;
6236 stmt = gsi_stmt (*gsi);
6240 else
6242 /* Canonicalize operand order. */
6243 enum tree_code code = gimple_assign_rhs_code (stmt);
6244 if (TREE_CODE_CLASS (code) == tcc_comparison
6245 || commutative_tree_code (code)
6246 || commutative_ternary_tree_code (code))
6248 tree rhs1 = gimple_assign_rhs1 (stmt);
6249 tree rhs2 = gimple_assign_rhs2 (stmt);
6250 if (tree_swap_operands_p (rhs1, rhs2))
6252 gimple_assign_set_rhs1 (stmt, rhs2);
6253 gimple_assign_set_rhs2 (stmt, rhs1);
6254 if (TREE_CODE_CLASS (code) == tcc_comparison)
6255 gimple_assign_set_rhs_code (stmt,
6256 swap_tree_comparison (code));
6257 changed = true;
6261 break;
6262 case GIMPLE_CALL:
6264 gcall *call = as_a<gcall *> (stmt);
6265 for (i = 0; i < gimple_call_num_args (call); ++i)
6267 tree *arg = gimple_call_arg_ptr (call, i);
6268 if (REFERENCE_CLASS_P (*arg)
6269 && maybe_canonicalize_mem_ref_addr (arg))
6270 changed = true;
6272 tree *lhs = gimple_call_lhs_ptr (call);
6273 if (*lhs
6274 && REFERENCE_CLASS_P (*lhs)
6275 && maybe_canonicalize_mem_ref_addr (lhs))
6276 changed = true;
6277 if (*lhs)
6279 combined_fn cfn = gimple_call_combined_fn (call);
6280 internal_fn ifn = associated_internal_fn (cfn, TREE_TYPE (*lhs));
6281 int opno = first_commutative_argument (ifn);
6282 if (opno >= 0)
6284 tree arg1 = gimple_call_arg (call, opno);
6285 tree arg2 = gimple_call_arg (call, opno + 1);
6286 if (tree_swap_operands_p (arg1, arg2))
6288 gimple_call_set_arg (call, opno, arg2);
6289 gimple_call_set_arg (call, opno + 1, arg1);
6290 changed = true;
6294 break;
6296 case GIMPLE_ASM:
6298 gasm *asm_stmt = as_a <gasm *> (stmt);
6299 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
6301 tree link = gimple_asm_output_op (asm_stmt, i);
6302 tree op = TREE_VALUE (link);
6303 if (REFERENCE_CLASS_P (op)
6304 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6305 changed = true;
6307 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
6309 tree link = gimple_asm_input_op (asm_stmt, i);
6310 tree op = TREE_VALUE (link);
6311 if ((REFERENCE_CLASS_P (op)
6312 || TREE_CODE (op) == ADDR_EXPR)
6313 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6314 changed = true;
6317 break;
6318 case GIMPLE_DEBUG:
6319 if (gimple_debug_bind_p (stmt))
6321 tree *val = gimple_debug_bind_get_value_ptr (stmt);
6322 if (*val
6323 && (REFERENCE_CLASS_P (*val)
6324 || TREE_CODE (*val) == ADDR_EXPR)
6325 && maybe_canonicalize_mem_ref_addr (val, true))
6326 changed = true;
6328 break;
6329 case GIMPLE_COND:
6331 /* Canonicalize operand order. */
6332 tree lhs = gimple_cond_lhs (stmt);
6333 tree rhs = gimple_cond_rhs (stmt);
6334 if (tree_swap_operands_p (lhs, rhs))
6336 gcond *gc = as_a <gcond *> (stmt);
6337 gimple_cond_set_lhs (gc, rhs);
6338 gimple_cond_set_rhs (gc, lhs);
6339 gimple_cond_set_code (gc,
6340 swap_tree_comparison (gimple_cond_code (gc)));
6341 changed = true;
6344 default:;
6347 /* Dispatch to pattern-based folding. */
6348 if (!inplace
6349 || is_gimple_assign (stmt)
6350 || gimple_code (stmt) == GIMPLE_COND)
6352 gimple_seq seq = NULL;
6353 gimple_match_op res_op;
6354 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
6355 valueize, valueize))
6357 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
6358 changed = true;
6359 else
6360 gimple_seq_discard (seq);
6364 stmt = gsi_stmt (*gsi);
6366 /* Fold the main computation performed by the statement. */
6367 switch (gimple_code (stmt))
6369 case GIMPLE_ASSIGN:
6371 /* Try to canonicalize for boolean-typed X the comparisons
6372 X == 0, X == 1, X != 0, and X != 1. */
6373 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
6374 || gimple_assign_rhs_code (stmt) == NE_EXPR)
6376 tree lhs = gimple_assign_lhs (stmt);
6377 tree op1 = gimple_assign_rhs1 (stmt);
6378 tree op2 = gimple_assign_rhs2 (stmt);
6379 tree type = TREE_TYPE (op1);
6381 /* Check whether the comparison operands are of the same boolean
6382 type as the result type is.
6383 Check that second operand is an integer-constant with value
6384 one or zero. */
6385 if (TREE_CODE (op2) == INTEGER_CST
6386 && (integer_zerop (op2) || integer_onep (op2))
6387 && useless_type_conversion_p (TREE_TYPE (lhs), type))
6389 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
6390 bool is_logical_not = false;
6392 /* X == 0 and X != 1 is a logical-not.of X
6393 X == 1 and X != 0 is X */
6394 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
6395 || (cmp_code == NE_EXPR && integer_onep (op2)))
6396 is_logical_not = true;
6398 if (is_logical_not == false)
6399 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
6400 /* Only for one-bit precision typed X the transformation
6401 !X -> ~X is valied. */
6402 else if (TYPE_PRECISION (type) == 1)
6403 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
6404 /* Otherwise we use !X -> X ^ 1. */
6405 else
6406 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
6407 build_int_cst (type, 1));
6408 changed = true;
6409 break;
6413 unsigned old_num_ops = gimple_num_ops (stmt);
6414 tree lhs = gimple_assign_lhs (stmt);
6415 tree new_rhs = fold_gimple_assign (gsi);
6416 if (new_rhs
6417 && !useless_type_conversion_p (TREE_TYPE (lhs),
6418 TREE_TYPE (new_rhs)))
6419 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6420 if (new_rhs
6421 && (!inplace
6422 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6424 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6425 changed = true;
6427 break;
6430 case GIMPLE_CALL:
6431 changed |= gimple_fold_call (gsi, inplace);
6432 break;
6434 case GIMPLE_DEBUG:
6435 if (gimple_debug_bind_p (stmt))
6437 tree val = gimple_debug_bind_get_value (stmt);
6438 if (val && REFERENCE_CLASS_P (val))
6440 tree tem = maybe_fold_reference (val);
6441 if (tem)
6443 gimple_debug_bind_set_value (stmt, tem);
6444 changed = true;
6448 break;
6450 case GIMPLE_RETURN:
6452 greturn *ret_stmt = as_a<greturn *> (stmt);
6453 tree ret = gimple_return_retval(ret_stmt);
6455 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6457 tree val = valueize (ret);
6458 if (val && val != ret
6459 && may_propagate_copy (ret, val))
6461 gimple_return_set_retval (ret_stmt, val);
6462 changed = true;
6466 break;
6468 default:;
6471 stmt = gsi_stmt (*gsi);
6473 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
6474 return changed;
6477 /* Valueziation callback that ends up not following SSA edges. */
6479 tree
6480 no_follow_ssa_edges (tree)
6482 return NULL_TREE;
6485 /* Valueization callback that ends up following single-use SSA edges only. */
6487 tree
6488 follow_single_use_edges (tree val)
6490 if (TREE_CODE (val) == SSA_NAME
6491 && !has_single_use (val))
6492 return NULL_TREE;
6493 return val;
6496 /* Valueization callback that follows all SSA edges. */
6498 tree
6499 follow_all_ssa_edges (tree val)
6501 return val;
6504 /* Fold the statement pointed to by GSI. In some cases, this function may
6505 replace the whole statement with a new one. Returns true iff folding
6506 makes any changes.
6507 The statement pointed to by GSI should be in valid gimple form but may
6508 be in unfolded state as resulting from for example constant propagation
6509 which can produce *&x = 0. */
6511 bool
6512 fold_stmt (gimple_stmt_iterator *gsi)
6514 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
6517 bool
6518 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
6520 return fold_stmt_1 (gsi, false, valueize);
6523 /* Perform the minimal folding on statement *GSI. Only operations like
6524 *&x created by constant propagation are handled. The statement cannot
6525 be replaced with a new one. Return true if the statement was
6526 changed, false otherwise.
6527 The statement *GSI should be in valid gimple form but may
6528 be in unfolded state as resulting from for example constant propagation
6529 which can produce *&x = 0. */
6531 bool
6532 fold_stmt_inplace (gimple_stmt_iterator *gsi)
6534 gimple *stmt = gsi_stmt (*gsi);
6535 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
6536 gcc_assert (gsi_stmt (*gsi) == stmt);
6537 return changed;
6540 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6541 if EXPR is null or we don't know how.
6542 If non-null, the result always has boolean type. */
6544 static tree
6545 canonicalize_bool (tree expr, bool invert)
6547 if (!expr)
6548 return NULL_TREE;
6549 else if (invert)
6551 if (integer_nonzerop (expr))
6552 return boolean_false_node;
6553 else if (integer_zerop (expr))
6554 return boolean_true_node;
6555 else if (TREE_CODE (expr) == SSA_NAME)
6556 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
6557 build_int_cst (TREE_TYPE (expr), 0));
6558 else if (COMPARISON_CLASS_P (expr))
6559 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
6560 boolean_type_node,
6561 TREE_OPERAND (expr, 0),
6562 TREE_OPERAND (expr, 1));
6563 else
6564 return NULL_TREE;
6566 else
6568 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6569 return expr;
6570 if (integer_nonzerop (expr))
6571 return boolean_true_node;
6572 else if (integer_zerop (expr))
6573 return boolean_false_node;
6574 else if (TREE_CODE (expr) == SSA_NAME)
6575 return fold_build2 (NE_EXPR, boolean_type_node, expr,
6576 build_int_cst (TREE_TYPE (expr), 0));
6577 else if (COMPARISON_CLASS_P (expr))
6578 return fold_build2 (TREE_CODE (expr),
6579 boolean_type_node,
6580 TREE_OPERAND (expr, 0),
6581 TREE_OPERAND (expr, 1));
6582 else
6583 return NULL_TREE;
6587 /* Check to see if a boolean expression EXPR is logically equivalent to the
6588 comparison (OP1 CODE OP2). Check for various identities involving
6589 SSA_NAMEs. */
6591 static bool
6592 same_bool_comparison_p (const_tree expr, enum tree_code code,
6593 const_tree op1, const_tree op2)
6595 gimple *s;
6597 /* The obvious case. */
6598 if (TREE_CODE (expr) == code
6599 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
6600 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
6601 return true;
6603 /* Check for comparing (name, name != 0) and the case where expr
6604 is an SSA_NAME with a definition matching the comparison. */
6605 if (TREE_CODE (expr) == SSA_NAME
6606 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6608 if (operand_equal_p (expr, op1, 0))
6609 return ((code == NE_EXPR && integer_zerop (op2))
6610 || (code == EQ_EXPR && integer_nonzerop (op2)));
6611 s = SSA_NAME_DEF_STMT (expr);
6612 if (is_gimple_assign (s)
6613 && gimple_assign_rhs_code (s) == code
6614 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
6615 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
6616 return true;
6619 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6620 of name is a comparison, recurse. */
6621 if (TREE_CODE (op1) == SSA_NAME
6622 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
6624 s = SSA_NAME_DEF_STMT (op1);
6625 if (is_gimple_assign (s)
6626 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
6628 enum tree_code c = gimple_assign_rhs_code (s);
6629 if ((c == NE_EXPR && integer_zerop (op2))
6630 || (c == EQ_EXPR && integer_nonzerop (op2)))
6631 return same_bool_comparison_p (expr, c,
6632 gimple_assign_rhs1 (s),
6633 gimple_assign_rhs2 (s));
6634 if ((c == EQ_EXPR && integer_zerop (op2))
6635 || (c == NE_EXPR && integer_nonzerop (op2)))
6636 return same_bool_comparison_p (expr,
6637 invert_tree_comparison (c, false),
6638 gimple_assign_rhs1 (s),
6639 gimple_assign_rhs2 (s));
6642 return false;
6645 /* Check to see if two boolean expressions OP1 and OP2 are logically
6646 equivalent. */
6648 static bool
6649 same_bool_result_p (const_tree op1, const_tree op2)
6651 /* Simple cases first. */
6652 if (operand_equal_p (op1, op2, 0))
6653 return true;
6655 /* Check the cases where at least one of the operands is a comparison.
6656 These are a bit smarter than operand_equal_p in that they apply some
6657 identifies on SSA_NAMEs. */
6658 if (COMPARISON_CLASS_P (op2)
6659 && same_bool_comparison_p (op1, TREE_CODE (op2),
6660 TREE_OPERAND (op2, 0),
6661 TREE_OPERAND (op2, 1)))
6662 return true;
6663 if (COMPARISON_CLASS_P (op1)
6664 && same_bool_comparison_p (op2, TREE_CODE (op1),
6665 TREE_OPERAND (op1, 0),
6666 TREE_OPERAND (op1, 1)))
6667 return true;
6669 /* Default case. */
6670 return false;
6673 /* Forward declarations for some mutually recursive functions. */
6675 static tree
6676 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6677 enum tree_code code2, tree op2a, tree op2b, basic_block);
6678 static tree
6679 and_var_with_comparison (tree type, tree var, bool invert,
6680 enum tree_code code2, tree op2a, tree op2b,
6681 basic_block);
6682 static tree
6683 and_var_with_comparison_1 (tree type, gimple *stmt,
6684 enum tree_code code2, tree op2a, tree op2b,
6685 basic_block);
6686 static tree
6687 or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
6688 enum tree_code code2, tree op2a, tree op2b,
6689 basic_block);
6690 static tree
6691 or_var_with_comparison (tree, tree var, bool invert,
6692 enum tree_code code2, tree op2a, tree op2b,
6693 basic_block);
6694 static tree
6695 or_var_with_comparison_1 (tree, gimple *stmt,
6696 enum tree_code code2, tree op2a, tree op2b,
6697 basic_block);
6699 /* Helper function for and_comparisons_1: try to simplify the AND of the
6700 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6701 If INVERT is true, invert the value of the VAR before doing the AND.
6702 Return NULL_EXPR if we can't simplify this to a single expression. */
6704 static tree
6705 and_var_with_comparison (tree type, tree var, bool invert,
6706 enum tree_code code2, tree op2a, tree op2b,
6707 basic_block outer_cond_bb)
6709 tree t;
6710 gimple *stmt = SSA_NAME_DEF_STMT (var);
6712 /* We can only deal with variables whose definitions are assignments. */
6713 if (!is_gimple_assign (stmt))
6714 return NULL_TREE;
6716 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6717 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6718 Then we only have to consider the simpler non-inverted cases. */
6719 if (invert)
6720 t = or_var_with_comparison_1 (type, stmt,
6721 invert_tree_comparison (code2, false),
6722 op2a, op2b, outer_cond_bb);
6723 else
6724 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b,
6725 outer_cond_bb);
6726 return canonicalize_bool (t, invert);
6729 /* Try to simplify the AND of the ssa variable defined by the assignment
6730 STMT with the comparison specified by (OP2A CODE2 OP2B).
6731 Return NULL_EXPR if we can't simplify this to a single expression. */
6733 static tree
6734 and_var_with_comparison_1 (tree type, gimple *stmt,
6735 enum tree_code code2, tree op2a, tree op2b,
6736 basic_block outer_cond_bb)
6738 tree var = gimple_assign_lhs (stmt);
6739 tree true_test_var = NULL_TREE;
6740 tree false_test_var = NULL_TREE;
6741 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6743 /* Check for identities like (var AND (var == 0)) => false. */
6744 if (TREE_CODE (op2a) == SSA_NAME
6745 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6747 if ((code2 == NE_EXPR && integer_zerop (op2b))
6748 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6750 true_test_var = op2a;
6751 if (var == true_test_var)
6752 return var;
6754 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6755 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6757 false_test_var = op2a;
6758 if (var == false_test_var)
6759 return boolean_false_node;
6763 /* If the definition is a comparison, recurse on it. */
6764 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6766 tree t = and_comparisons_1 (type, innercode,
6767 gimple_assign_rhs1 (stmt),
6768 gimple_assign_rhs2 (stmt),
6769 code2,
6770 op2a,
6771 op2b, outer_cond_bb);
6772 if (t)
6773 return t;
6776 /* If the definition is an AND or OR expression, we may be able to
6777 simplify by reassociating. */
6778 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6779 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
6781 tree inner1 = gimple_assign_rhs1 (stmt);
6782 tree inner2 = gimple_assign_rhs2 (stmt);
6783 gimple *s;
6784 tree t;
6785 tree partial = NULL_TREE;
6786 bool is_and = (innercode == BIT_AND_EXPR);
6788 /* Check for boolean identities that don't require recursive examination
6789 of inner1/inner2:
6790 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6791 inner1 AND (inner1 OR inner2) => inner1
6792 !inner1 AND (inner1 AND inner2) => false
6793 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6794 Likewise for similar cases involving inner2. */
6795 if (inner1 == true_test_var)
6796 return (is_and ? var : inner1);
6797 else if (inner2 == true_test_var)
6798 return (is_and ? var : inner2);
6799 else if (inner1 == false_test_var)
6800 return (is_and
6801 ? boolean_false_node
6802 : and_var_with_comparison (type, inner2, false, code2, op2a,
6803 op2b, outer_cond_bb));
6804 else if (inner2 == false_test_var)
6805 return (is_and
6806 ? boolean_false_node
6807 : and_var_with_comparison (type, inner1, false, code2, op2a,
6808 op2b, outer_cond_bb));
6810 /* Next, redistribute/reassociate the AND across the inner tests.
6811 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6812 if (TREE_CODE (inner1) == SSA_NAME
6813 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6814 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6815 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6816 gimple_assign_rhs1 (s),
6817 gimple_assign_rhs2 (s),
6818 code2, op2a, op2b,
6819 outer_cond_bb)))
6821 /* Handle the AND case, where we are reassociating:
6822 (inner1 AND inner2) AND (op2a code2 op2b)
6823 => (t AND inner2)
6824 If the partial result t is a constant, we win. Otherwise
6825 continue on to try reassociating with the other inner test. */
6826 if (is_and)
6828 if (integer_onep (t))
6829 return inner2;
6830 else if (integer_zerop (t))
6831 return boolean_false_node;
6834 /* Handle the OR case, where we are redistributing:
6835 (inner1 OR inner2) AND (op2a code2 op2b)
6836 => (t OR (inner2 AND (op2a code2 op2b))) */
6837 else if (integer_onep (t))
6838 return boolean_true_node;
6840 /* Save partial result for later. */
6841 partial = t;
6844 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6845 if (TREE_CODE (inner2) == SSA_NAME
6846 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6847 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6848 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6849 gimple_assign_rhs1 (s),
6850 gimple_assign_rhs2 (s),
6851 code2, op2a, op2b,
6852 outer_cond_bb)))
6854 /* Handle the AND case, where we are reassociating:
6855 (inner1 AND inner2) AND (op2a code2 op2b)
6856 => (inner1 AND t) */
6857 if (is_and)
6859 if (integer_onep (t))
6860 return inner1;
6861 else if (integer_zerop (t))
6862 return boolean_false_node;
6863 /* If both are the same, we can apply the identity
6864 (x AND x) == x. */
6865 else if (partial && same_bool_result_p (t, partial))
6866 return t;
6869 /* Handle the OR case. where we are redistributing:
6870 (inner1 OR inner2) AND (op2a code2 op2b)
6871 => (t OR (inner1 AND (op2a code2 op2b)))
6872 => (t OR partial) */
6873 else
6875 if (integer_onep (t))
6876 return boolean_true_node;
6877 else if (partial)
6879 /* We already got a simplification for the other
6880 operand to the redistributed OR expression. The
6881 interesting case is when at least one is false.
6882 Or, if both are the same, we can apply the identity
6883 (x OR x) == x. */
6884 if (integer_zerop (partial))
6885 return t;
6886 else if (integer_zerop (t))
6887 return partial;
6888 else if (same_bool_result_p (t, partial))
6889 return t;
6894 return NULL_TREE;
6897 /* Try to simplify the AND of two comparisons defined by
6898 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6899 If this can be done without constructing an intermediate value,
6900 return the resulting tree; otherwise NULL_TREE is returned.
6901 This function is deliberately asymmetric as it recurses on SSA_DEFs
6902 in the first comparison but not the second. */
6904 static tree
6905 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6906 enum tree_code code2, tree op2a, tree op2b,
6907 basic_block outer_cond_bb)
6909 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6911 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6912 if (operand_equal_p (op1a, op2a, 0)
6913 && operand_equal_p (op1b, op2b, 0))
6915 /* Result will be either NULL_TREE, or a combined comparison. */
6916 tree t = combine_comparisons (UNKNOWN_LOCATION,
6917 TRUTH_ANDIF_EXPR, code1, code2,
6918 truth_type, op1a, op1b);
6919 if (t)
6920 return t;
6923 /* Likewise the swapped case of the above. */
6924 if (operand_equal_p (op1a, op2b, 0)
6925 && operand_equal_p (op1b, op2a, 0))
6927 /* Result will be either NULL_TREE, or a combined comparison. */
6928 tree t = combine_comparisons (UNKNOWN_LOCATION,
6929 TRUTH_ANDIF_EXPR, code1,
6930 swap_tree_comparison (code2),
6931 truth_type, op1a, op1b);
6932 if (t)
6933 return t;
6936 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6937 NAME's definition is a truth value. See if there are any simplifications
6938 that can be done against the NAME's definition. */
6939 if (TREE_CODE (op1a) == SSA_NAME
6940 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6941 && (integer_zerop (op1b) || integer_onep (op1b)))
6943 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6944 || (code1 == NE_EXPR && integer_onep (op1b)));
6945 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6946 switch (gimple_code (stmt))
6948 case GIMPLE_ASSIGN:
6949 /* Try to simplify by copy-propagating the definition. */
6950 return and_var_with_comparison (type, op1a, invert, code2, op2a,
6951 op2b, outer_cond_bb);
6953 case GIMPLE_PHI:
6954 /* If every argument to the PHI produces the same result when
6955 ANDed with the second comparison, we win.
6956 Do not do this unless the type is bool since we need a bool
6957 result here anyway. */
6958 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6960 tree result = NULL_TREE;
6961 unsigned i;
6962 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6964 tree arg = gimple_phi_arg_def (stmt, i);
6966 /* If this PHI has itself as an argument, ignore it.
6967 If all the other args produce the same result,
6968 we're still OK. */
6969 if (arg == gimple_phi_result (stmt))
6970 continue;
6971 else if (TREE_CODE (arg) == INTEGER_CST)
6973 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
6975 if (!result)
6976 result = boolean_false_node;
6977 else if (!integer_zerop (result))
6978 return NULL_TREE;
6980 else if (!result)
6981 result = fold_build2 (code2, boolean_type_node,
6982 op2a, op2b);
6983 else if (!same_bool_comparison_p (result,
6984 code2, op2a, op2b))
6985 return NULL_TREE;
6987 else if (TREE_CODE (arg) == SSA_NAME
6988 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6990 tree temp;
6991 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6992 /* In simple cases we can look through PHI nodes,
6993 but we have to be careful with loops.
6994 See PR49073. */
6995 if (! dom_info_available_p (CDI_DOMINATORS)
6996 || gimple_bb (def_stmt) == gimple_bb (stmt)
6997 || dominated_by_p (CDI_DOMINATORS,
6998 gimple_bb (def_stmt),
6999 gimple_bb (stmt)))
7000 return NULL_TREE;
7001 temp = and_var_with_comparison (type, arg, invert, code2,
7002 op2a, op2b,
7003 outer_cond_bb);
7004 if (!temp)
7005 return NULL_TREE;
7006 else if (!result)
7007 result = temp;
7008 else if (!same_bool_result_p (result, temp))
7009 return NULL_TREE;
7011 else
7012 return NULL_TREE;
7014 return result;
7017 default:
7018 break;
7021 return NULL_TREE;
7024 static basic_block fosa_bb;
7025 static vec<std::pair<tree, flow_sensitive_info_storage> > *fosa_unwind;
7026 static tree
7027 follow_outer_ssa_edges (tree val)
7029 if (TREE_CODE (val) == SSA_NAME
7030 && !SSA_NAME_IS_DEFAULT_DEF (val))
7032 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (val));
7033 if (!def_bb
7034 || def_bb == fosa_bb
7035 || (dom_info_available_p (CDI_DOMINATORS)
7036 && (def_bb == fosa_bb
7037 || dominated_by_p (CDI_DOMINATORS, fosa_bb, def_bb))))
7038 return val;
7039 /* We cannot temporarily rewrite stmts with undefined overflow
7040 behavior, so avoid expanding them. */
7041 if ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (val))
7042 || POINTER_TYPE_P (TREE_TYPE (val)))
7043 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (val)))
7044 return NULL_TREE;
7045 flow_sensitive_info_storage storage;
7046 storage.save_and_clear (val);
7047 /* If the definition does not dominate fosa_bb temporarily reset
7048 flow-sensitive info. */
7049 fosa_unwind->safe_push (std::make_pair (val, storage));
7050 return val;
7052 return val;
7055 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
7056 : try to simplify the AND/OR of the ssa variable VAR with the comparison
7057 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
7058 simplify this to a single expression. As we are going to lower the cost
7059 of building SSA names / gimple stmts significantly, we need to allocate
7060 them ont the stack. This will cause the code to be a bit ugly. */
7062 static tree
7063 maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
7064 enum tree_code code1,
7065 tree op1a, tree op1b,
7066 enum tree_code code2, tree op2a,
7067 tree op2b,
7068 basic_block outer_cond_bb)
7070 /* Allocate gimple stmt1 on the stack. */
7071 gassign *stmt1
7072 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
7073 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
7074 gimple_assign_set_rhs_code (stmt1, code1);
7075 gimple_assign_set_rhs1 (stmt1, op1a);
7076 gimple_assign_set_rhs2 (stmt1, op1b);
7077 gimple_set_bb (stmt1, NULL);
7079 /* Allocate gimple stmt2 on the stack. */
7080 gassign *stmt2
7081 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
7082 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
7083 gimple_assign_set_rhs_code (stmt2, code2);
7084 gimple_assign_set_rhs1 (stmt2, op2a);
7085 gimple_assign_set_rhs2 (stmt2, op2b);
7086 gimple_set_bb (stmt2, NULL);
7088 /* Allocate SSA names(lhs1) on the stack. */
7089 alignas (tree_node) unsigned char lhs1buf[sizeof (tree_ssa_name)];
7090 tree lhs1 = (tree) &lhs1buf[0];
7091 memset (lhs1, 0, sizeof (tree_ssa_name));
7092 TREE_SET_CODE (lhs1, SSA_NAME);
7093 TREE_TYPE (lhs1) = type;
7094 init_ssa_name_imm_use (lhs1);
7096 /* Allocate SSA names(lhs2) on the stack. */
7097 alignas (tree_node) unsigned char lhs2buf[sizeof (tree_ssa_name)];
7098 tree lhs2 = (tree) &lhs2buf[0];
7099 memset (lhs2, 0, sizeof (tree_ssa_name));
7100 TREE_SET_CODE (lhs2, SSA_NAME);
7101 TREE_TYPE (lhs2) = type;
7102 init_ssa_name_imm_use (lhs2);
7104 gimple_assign_set_lhs (stmt1, lhs1);
7105 gimple_assign_set_lhs (stmt2, lhs2);
7107 gimple_match_op op (gimple_match_cond::UNCOND, code,
7108 type, gimple_assign_lhs (stmt1),
7109 gimple_assign_lhs (stmt2));
7110 fosa_bb = outer_cond_bb;
7111 auto_vec<std::pair<tree, flow_sensitive_info_storage>, 8> unwind_stack;
7112 fosa_unwind = &unwind_stack;
7113 if (op.resimplify (NULL, (!outer_cond_bb
7114 ? follow_all_ssa_edges : follow_outer_ssa_edges)))
7116 fosa_unwind = NULL;
7117 for (auto p : unwind_stack)
7118 p.second.restore (p.first);
7119 if (gimple_simplified_result_is_gimple_val (&op))
7121 tree res = op.ops[0];
7122 if (res == lhs1)
7123 return build2 (code1, type, op1a, op1b);
7124 else if (res == lhs2)
7125 return build2 (code2, type, op2a, op2b);
7126 else
7127 return res;
7129 else if (op.code.is_tree_code ()
7130 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
7132 tree op0 = op.ops[0];
7133 tree op1 = op.ops[1];
7134 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
7135 return NULL_TREE; /* not simple */
7137 return build2 ((enum tree_code)op.code, op.type, op0, op1);
7140 fosa_unwind = NULL;
7141 for (auto p : unwind_stack)
7142 p.second.restore (p.first);
7144 return NULL_TREE;
7147 /* Try to simplify the AND of two comparisons, specified by
7148 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7149 If this can be simplified to a single expression (without requiring
7150 introducing more SSA variables to hold intermediate values),
7151 return the resulting tree. Otherwise return NULL_TREE.
7152 If the result expression is non-null, it has boolean type. */
7154 tree
7155 maybe_fold_and_comparisons (tree type,
7156 enum tree_code code1, tree op1a, tree op1b,
7157 enum tree_code code2, tree op2a, tree op2b,
7158 basic_block outer_cond_bb)
7160 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b,
7161 outer_cond_bb))
7162 return t;
7164 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b,
7165 outer_cond_bb))
7166 return t;
7168 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
7169 op1a, op1b, code2, op2a,
7170 op2b, outer_cond_bb))
7171 return t;
7173 return NULL_TREE;
7176 /* Helper function for or_comparisons_1: try to simplify the OR of the
7177 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
7178 If INVERT is true, invert the value of VAR before doing the OR.
7179 Return NULL_EXPR if we can't simplify this to a single expression. */
7181 static tree
7182 or_var_with_comparison (tree type, tree var, bool invert,
7183 enum tree_code code2, tree op2a, tree op2b,
7184 basic_block outer_cond_bb)
7186 tree t;
7187 gimple *stmt = SSA_NAME_DEF_STMT (var);
7189 /* We can only deal with variables whose definitions are assignments. */
7190 if (!is_gimple_assign (stmt))
7191 return NULL_TREE;
7193 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
7194 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
7195 Then we only have to consider the simpler non-inverted cases. */
7196 if (invert)
7197 t = and_var_with_comparison_1 (type, stmt,
7198 invert_tree_comparison (code2, false),
7199 op2a, op2b, outer_cond_bb);
7200 else
7201 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b,
7202 outer_cond_bb);
7203 return canonicalize_bool (t, invert);
7206 /* Try to simplify the OR of the ssa variable defined by the assignment
7207 STMT with the comparison specified by (OP2A CODE2 OP2B).
7208 Return NULL_EXPR if we can't simplify this to a single expression. */
7210 static tree
7211 or_var_with_comparison_1 (tree type, gimple *stmt,
7212 enum tree_code code2, tree op2a, tree op2b,
7213 basic_block outer_cond_bb)
7215 tree var = gimple_assign_lhs (stmt);
7216 tree true_test_var = NULL_TREE;
7217 tree false_test_var = NULL_TREE;
7218 enum tree_code innercode = gimple_assign_rhs_code (stmt);
7220 /* Check for identities like (var OR (var != 0)) => true . */
7221 if (TREE_CODE (op2a) == SSA_NAME
7222 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
7224 if ((code2 == NE_EXPR && integer_zerop (op2b))
7225 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
7227 true_test_var = op2a;
7228 if (var == true_test_var)
7229 return var;
7231 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
7232 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
7234 false_test_var = op2a;
7235 if (var == false_test_var)
7236 return boolean_true_node;
7240 /* If the definition is a comparison, recurse on it. */
7241 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
7243 tree t = or_comparisons_1 (type, innercode,
7244 gimple_assign_rhs1 (stmt),
7245 gimple_assign_rhs2 (stmt),
7246 code2, op2a, op2b, outer_cond_bb);
7247 if (t)
7248 return t;
7251 /* If the definition is an AND or OR expression, we may be able to
7252 simplify by reassociating. */
7253 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
7254 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
7256 tree inner1 = gimple_assign_rhs1 (stmt);
7257 tree inner2 = gimple_assign_rhs2 (stmt);
7258 gimple *s;
7259 tree t;
7260 tree partial = NULL_TREE;
7261 bool is_or = (innercode == BIT_IOR_EXPR);
7263 /* Check for boolean identities that don't require recursive examination
7264 of inner1/inner2:
7265 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7266 inner1 OR (inner1 AND inner2) => inner1
7267 !inner1 OR (inner1 OR inner2) => true
7268 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7270 if (inner1 == true_test_var)
7271 return (is_or ? var : inner1);
7272 else if (inner2 == true_test_var)
7273 return (is_or ? var : inner2);
7274 else if (inner1 == false_test_var)
7275 return (is_or
7276 ? boolean_true_node
7277 : or_var_with_comparison (type, inner2, false, code2, op2a,
7278 op2b, outer_cond_bb));
7279 else if (inner2 == false_test_var)
7280 return (is_or
7281 ? boolean_true_node
7282 : or_var_with_comparison (type, inner1, false, code2, op2a,
7283 op2b, outer_cond_bb));
7285 /* Next, redistribute/reassociate the OR across the inner tests.
7286 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7287 if (TREE_CODE (inner1) == SSA_NAME
7288 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
7289 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7290 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7291 gimple_assign_rhs1 (s),
7292 gimple_assign_rhs2 (s),
7293 code2, op2a, op2b,
7294 outer_cond_bb)))
7296 /* Handle the OR case, where we are reassociating:
7297 (inner1 OR inner2) OR (op2a code2 op2b)
7298 => (t OR inner2)
7299 If the partial result t is a constant, we win. Otherwise
7300 continue on to try reassociating with the other inner test. */
7301 if (is_or)
7303 if (integer_onep (t))
7304 return boolean_true_node;
7305 else if (integer_zerop (t))
7306 return inner2;
7309 /* Handle the AND case, where we are redistributing:
7310 (inner1 AND inner2) OR (op2a code2 op2b)
7311 => (t AND (inner2 OR (op2a code op2b))) */
7312 else if (integer_zerop (t))
7313 return boolean_false_node;
7315 /* Save partial result for later. */
7316 partial = t;
7319 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7320 if (TREE_CODE (inner2) == SSA_NAME
7321 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
7322 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7323 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7324 gimple_assign_rhs1 (s),
7325 gimple_assign_rhs2 (s),
7326 code2, op2a, op2b,
7327 outer_cond_bb)))
7329 /* Handle the OR case, where we are reassociating:
7330 (inner1 OR inner2) OR (op2a code2 op2b)
7331 => (inner1 OR t)
7332 => (t OR partial) */
7333 if (is_or)
7335 if (integer_zerop (t))
7336 return inner1;
7337 else if (integer_onep (t))
7338 return boolean_true_node;
7339 /* If both are the same, we can apply the identity
7340 (x OR x) == x. */
7341 else if (partial && same_bool_result_p (t, partial))
7342 return t;
7345 /* Handle the AND case, where we are redistributing:
7346 (inner1 AND inner2) OR (op2a code2 op2b)
7347 => (t AND (inner1 OR (op2a code2 op2b)))
7348 => (t AND partial) */
7349 else
7351 if (integer_zerop (t))
7352 return boolean_false_node;
7353 else if (partial)
7355 /* We already got a simplification for the other
7356 operand to the redistributed AND expression. The
7357 interesting case is when at least one is true.
7358 Or, if both are the same, we can apply the identity
7359 (x AND x) == x. */
7360 if (integer_onep (partial))
7361 return t;
7362 else if (integer_onep (t))
7363 return partial;
7364 else if (same_bool_result_p (t, partial))
7365 return t;
7370 return NULL_TREE;
7373 /* Try to simplify the OR of two comparisons defined by
7374 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7375 If this can be done without constructing an intermediate value,
7376 return the resulting tree; otherwise NULL_TREE is returned.
7377 This function is deliberately asymmetric as it recurses on SSA_DEFs
7378 in the first comparison but not the second. */
7380 static tree
7381 or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
7382 enum tree_code code2, tree op2a, tree op2b,
7383 basic_block outer_cond_bb)
7385 tree truth_type = truth_type_for (TREE_TYPE (op1a));
7387 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7388 if (operand_equal_p (op1a, op2a, 0)
7389 && operand_equal_p (op1b, op2b, 0))
7391 /* Result will be either NULL_TREE, or a combined comparison. */
7392 tree t = combine_comparisons (UNKNOWN_LOCATION,
7393 TRUTH_ORIF_EXPR, code1, code2,
7394 truth_type, op1a, op1b);
7395 if (t)
7396 return t;
7399 /* Likewise the swapped case of the above. */
7400 if (operand_equal_p (op1a, op2b, 0)
7401 && operand_equal_p (op1b, op2a, 0))
7403 /* Result will be either NULL_TREE, or a combined comparison. */
7404 tree t = combine_comparisons (UNKNOWN_LOCATION,
7405 TRUTH_ORIF_EXPR, code1,
7406 swap_tree_comparison (code2),
7407 truth_type, op1a, op1b);
7408 if (t)
7409 return t;
7412 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7413 NAME's definition is a truth value. See if there are any simplifications
7414 that can be done against the NAME's definition. */
7415 if (TREE_CODE (op1a) == SSA_NAME
7416 && (code1 == NE_EXPR || code1 == EQ_EXPR)
7417 && (integer_zerop (op1b) || integer_onep (op1b)))
7419 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7420 || (code1 == NE_EXPR && integer_onep (op1b)));
7421 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
7422 switch (gimple_code (stmt))
7424 case GIMPLE_ASSIGN:
7425 /* Try to simplify by copy-propagating the definition. */
7426 return or_var_with_comparison (type, op1a, invert, code2, op2a,
7427 op2b, outer_cond_bb);
7429 case GIMPLE_PHI:
7430 /* If every argument to the PHI produces the same result when
7431 ORed with the second comparison, we win.
7432 Do not do this unless the type is bool since we need a bool
7433 result here anyway. */
7434 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7436 tree result = NULL_TREE;
7437 unsigned i;
7438 for (i = 0; i < gimple_phi_num_args (stmt); i++)
7440 tree arg = gimple_phi_arg_def (stmt, i);
7442 /* If this PHI has itself as an argument, ignore it.
7443 If all the other args produce the same result,
7444 we're still OK. */
7445 if (arg == gimple_phi_result (stmt))
7446 continue;
7447 else if (TREE_CODE (arg) == INTEGER_CST)
7449 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
7451 if (!result)
7452 result = boolean_true_node;
7453 else if (!integer_onep (result))
7454 return NULL_TREE;
7456 else if (!result)
7457 result = fold_build2 (code2, boolean_type_node,
7458 op2a, op2b);
7459 else if (!same_bool_comparison_p (result,
7460 code2, op2a, op2b))
7461 return NULL_TREE;
7463 else if (TREE_CODE (arg) == SSA_NAME
7464 && !SSA_NAME_IS_DEFAULT_DEF (arg))
7466 tree temp;
7467 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
7468 /* In simple cases we can look through PHI nodes,
7469 but we have to be careful with loops.
7470 See PR49073. */
7471 if (! dom_info_available_p (CDI_DOMINATORS)
7472 || gimple_bb (def_stmt) == gimple_bb (stmt)
7473 || dominated_by_p (CDI_DOMINATORS,
7474 gimple_bb (def_stmt),
7475 gimple_bb (stmt)))
7476 return NULL_TREE;
7477 temp = or_var_with_comparison (type, arg, invert, code2,
7478 op2a, op2b, outer_cond_bb);
7479 if (!temp)
7480 return NULL_TREE;
7481 else if (!result)
7482 result = temp;
7483 else if (!same_bool_result_p (result, temp))
7484 return NULL_TREE;
7486 else
7487 return NULL_TREE;
7489 return result;
7492 default:
7493 break;
7496 return NULL_TREE;
7499 /* Try to simplify the OR of two comparisons, specified by
7500 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7501 If this can be simplified to a single expression (without requiring
7502 introducing more SSA variables to hold intermediate values),
7503 return the resulting tree. Otherwise return NULL_TREE.
7504 If the result expression is non-null, it has boolean type. */
7506 tree
7507 maybe_fold_or_comparisons (tree type,
7508 enum tree_code code1, tree op1a, tree op1b,
7509 enum tree_code code2, tree op2a, tree op2b,
7510 basic_block outer_cond_bb)
7512 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b,
7513 outer_cond_bb))
7514 return t;
7516 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b,
7517 outer_cond_bb))
7518 return t;
7520 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
7521 op1a, op1b, code2, op2a,
7522 op2b, outer_cond_bb))
7523 return t;
7525 return NULL_TREE;
7528 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7530 Either NULL_TREE, a simplified but non-constant or a constant
7531 is returned.
7533 ??? This should go into a gimple-fold-inline.h file to be eventually
7534 privatized with the single valueize function used in the various TUs
7535 to avoid the indirect function call overhead. */
7537 tree
7538 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
7539 tree (*gvalueize) (tree))
7541 gimple_match_op res_op;
7542 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7543 edges if there are intermediate VARYING defs. For this reason
7544 do not follow SSA edges here even though SCCVN can technically
7545 just deal fine with that. */
7546 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
7548 tree res = NULL_TREE;
7549 if (gimple_simplified_result_is_gimple_val (&res_op))
7550 res = res_op.ops[0];
7551 else if (mprts_hook)
7552 res = mprts_hook (&res_op);
7553 if (res)
7555 if (dump_file && dump_flags & TDF_DETAILS)
7557 fprintf (dump_file, "Match-and-simplified ");
7558 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
7559 fprintf (dump_file, " to ");
7560 print_generic_expr (dump_file, res);
7561 fprintf (dump_file, "\n");
7563 return res;
7567 location_t loc = gimple_location (stmt);
7568 switch (gimple_code (stmt))
7570 case GIMPLE_ASSIGN:
7572 enum tree_code subcode = gimple_assign_rhs_code (stmt);
7574 switch (get_gimple_rhs_class (subcode))
7576 case GIMPLE_SINGLE_RHS:
7578 tree rhs = gimple_assign_rhs1 (stmt);
7579 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
7581 if (TREE_CODE (rhs) == SSA_NAME)
7583 /* If the RHS is an SSA_NAME, return its known constant value,
7584 if any. */
7585 return (*valueize) (rhs);
7587 /* Handle propagating invariant addresses into address
7588 operations. */
7589 else if (TREE_CODE (rhs) == ADDR_EXPR
7590 && !is_gimple_min_invariant (rhs))
7592 poly_int64 offset = 0;
7593 tree base;
7594 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
7595 &offset,
7596 valueize);
7597 if (base
7598 && (CONSTANT_CLASS_P (base)
7599 || decl_address_invariant_p (base)))
7600 return build_invariant_address (TREE_TYPE (rhs),
7601 base, offset);
7603 else if (TREE_CODE (rhs) == CONSTRUCTOR
7604 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
7605 && known_eq (CONSTRUCTOR_NELTS (rhs),
7606 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
7608 unsigned i, nelts;
7609 tree val;
7611 nelts = CONSTRUCTOR_NELTS (rhs);
7612 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
7613 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7615 val = (*valueize) (val);
7616 if (TREE_CODE (val) == INTEGER_CST
7617 || TREE_CODE (val) == REAL_CST
7618 || TREE_CODE (val) == FIXED_CST)
7619 vec.quick_push (val);
7620 else
7621 return NULL_TREE;
7624 return vec.build ();
7626 if (subcode == OBJ_TYPE_REF)
7628 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
7629 /* If callee is constant, we can fold away the wrapper. */
7630 if (is_gimple_min_invariant (val))
7631 return val;
7634 if (kind == tcc_reference)
7636 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
7637 || TREE_CODE (rhs) == REALPART_EXPR
7638 || TREE_CODE (rhs) == IMAGPART_EXPR)
7639 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7641 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7642 return fold_unary_loc (EXPR_LOCATION (rhs),
7643 TREE_CODE (rhs),
7644 TREE_TYPE (rhs), val);
7646 else if (TREE_CODE (rhs) == BIT_FIELD_REF
7647 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7649 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7650 return fold_ternary_loc (EXPR_LOCATION (rhs),
7651 TREE_CODE (rhs),
7652 TREE_TYPE (rhs), val,
7653 TREE_OPERAND (rhs, 1),
7654 TREE_OPERAND (rhs, 2));
7656 else if (TREE_CODE (rhs) == MEM_REF
7657 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7659 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7660 if (TREE_CODE (val) == ADDR_EXPR
7661 && is_gimple_min_invariant (val))
7663 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
7664 unshare_expr (val),
7665 TREE_OPERAND (rhs, 1));
7666 if (tem)
7667 rhs = tem;
7670 return fold_const_aggregate_ref_1 (rhs, valueize);
7672 else if (kind == tcc_declaration)
7673 return get_symbol_constant_value (rhs);
7674 return rhs;
7677 case GIMPLE_UNARY_RHS:
7678 return NULL_TREE;
7680 case GIMPLE_BINARY_RHS:
7681 /* Translate &x + CST into an invariant form suitable for
7682 further propagation. */
7683 if (subcode == POINTER_PLUS_EXPR)
7685 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7686 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7687 if (TREE_CODE (op0) == ADDR_EXPR
7688 && TREE_CODE (op1) == INTEGER_CST)
7690 tree off = fold_convert (ptr_type_node, op1);
7691 return build1_loc
7692 (loc, ADDR_EXPR, TREE_TYPE (op0),
7693 fold_build2 (MEM_REF,
7694 TREE_TYPE (TREE_TYPE (op0)),
7695 unshare_expr (op0), off));
7698 /* Canonicalize bool != 0 and bool == 0 appearing after
7699 valueization. While gimple_simplify handles this
7700 it can get confused by the ~X == 1 -> X == 0 transform
7701 which we cant reduce to a SSA name or a constant
7702 (and we have no way to tell gimple_simplify to not
7703 consider those transforms in the first place). */
7704 else if (subcode == EQ_EXPR
7705 || subcode == NE_EXPR)
7707 tree lhs = gimple_assign_lhs (stmt);
7708 tree op0 = gimple_assign_rhs1 (stmt);
7709 if (useless_type_conversion_p (TREE_TYPE (lhs),
7710 TREE_TYPE (op0)))
7712 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7713 op0 = (*valueize) (op0);
7714 if (TREE_CODE (op0) == INTEGER_CST)
7715 std::swap (op0, op1);
7716 if (TREE_CODE (op1) == INTEGER_CST
7717 && ((subcode == NE_EXPR && integer_zerop (op1))
7718 || (subcode == EQ_EXPR && integer_onep (op1))))
7719 return op0;
7722 return NULL_TREE;
7724 case GIMPLE_TERNARY_RHS:
7726 /* Handle ternary operators that can appear in GIMPLE form. */
7727 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7728 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7729 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
7730 return fold_ternary_loc (loc, subcode,
7731 TREE_TYPE (gimple_assign_lhs (stmt)),
7732 op0, op1, op2);
7735 default:
7736 gcc_unreachable ();
7740 case GIMPLE_CALL:
7742 tree fn;
7743 gcall *call_stmt = as_a <gcall *> (stmt);
7745 if (gimple_call_internal_p (stmt))
7747 enum tree_code subcode = ERROR_MARK;
7748 switch (gimple_call_internal_fn (stmt))
7750 case IFN_UBSAN_CHECK_ADD:
7751 subcode = PLUS_EXPR;
7752 break;
7753 case IFN_UBSAN_CHECK_SUB:
7754 subcode = MINUS_EXPR;
7755 break;
7756 case IFN_UBSAN_CHECK_MUL:
7757 subcode = MULT_EXPR;
7758 break;
7759 case IFN_BUILTIN_EXPECT:
7761 tree arg0 = gimple_call_arg (stmt, 0);
7762 tree op0 = (*valueize) (arg0);
7763 if (TREE_CODE (op0) == INTEGER_CST)
7764 return op0;
7765 return NULL_TREE;
7767 default:
7768 return NULL_TREE;
7770 tree arg0 = gimple_call_arg (stmt, 0);
7771 tree arg1 = gimple_call_arg (stmt, 1);
7772 tree op0 = (*valueize) (arg0);
7773 tree op1 = (*valueize) (arg1);
7775 if (TREE_CODE (op0) != INTEGER_CST
7776 || TREE_CODE (op1) != INTEGER_CST)
7778 switch (subcode)
7780 case MULT_EXPR:
7781 /* x * 0 = 0 * x = 0 without overflow. */
7782 if (integer_zerop (op0) || integer_zerop (op1))
7783 return build_zero_cst (TREE_TYPE (arg0));
7784 break;
7785 case MINUS_EXPR:
7786 /* y - y = 0 without overflow. */
7787 if (operand_equal_p (op0, op1, 0))
7788 return build_zero_cst (TREE_TYPE (arg0));
7789 break;
7790 default:
7791 break;
7794 tree res
7795 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
7796 if (res
7797 && TREE_CODE (res) == INTEGER_CST
7798 && !TREE_OVERFLOW (res))
7799 return res;
7800 return NULL_TREE;
7803 fn = (*valueize) (gimple_call_fn (stmt));
7804 if (TREE_CODE (fn) == ADDR_EXPR
7805 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
7806 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
7807 && gimple_builtin_call_types_compatible_p (stmt,
7808 TREE_OPERAND (fn, 0)))
7810 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
7811 tree retval;
7812 unsigned i;
7813 for (i = 0; i < gimple_call_num_args (stmt); ++i)
7814 args[i] = (*valueize) (gimple_call_arg (stmt, i));
7815 retval = fold_builtin_call_array (loc,
7816 gimple_call_return_type (call_stmt),
7817 fn, gimple_call_num_args (stmt), args);
7818 if (retval)
7820 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7821 STRIP_NOPS (retval);
7822 retval = fold_convert (gimple_call_return_type (call_stmt),
7823 retval);
7825 return retval;
7827 return NULL_TREE;
7830 default:
7831 return NULL_TREE;
7835 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7836 Returns NULL_TREE if folding to a constant is not possible, otherwise
7837 returns a constant according to is_gimple_min_invariant. */
7839 tree
7840 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
7842 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
7843 if (res && is_gimple_min_invariant (res))
7844 return res;
7845 return NULL_TREE;
7849 /* The following set of functions are supposed to fold references using
7850 their constant initializers. */
7852 /* See if we can find constructor defining value of BASE.
7853 When we know the consructor with constant offset (such as
7854 base is array[40] and we do know constructor of array), then
7855 BIT_OFFSET is adjusted accordingly.
7857 As a special case, return error_mark_node when constructor
7858 is not explicitly available, but it is known to be zero
7859 such as 'static const int a;'. */
7860 static tree
7861 get_base_constructor (tree base, poly_int64 *bit_offset,
7862 tree (*valueize)(tree))
7864 poly_int64 bit_offset2, size, max_size;
7865 bool reverse;
7867 if (TREE_CODE (base) == MEM_REF)
7869 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
7870 if (!boff.to_shwi (bit_offset))
7871 return NULL_TREE;
7873 if (valueize
7874 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
7875 base = valueize (TREE_OPERAND (base, 0));
7876 if (!base || TREE_CODE (base) != ADDR_EXPR)
7877 return NULL_TREE;
7878 base = TREE_OPERAND (base, 0);
7880 else if (valueize
7881 && TREE_CODE (base) == SSA_NAME)
7882 base = valueize (base);
7884 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7885 DECL_INITIAL. If BASE is a nested reference into another
7886 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7887 the inner reference. */
7888 switch (TREE_CODE (base))
7890 case VAR_DECL:
7891 case CONST_DECL:
7893 tree init = ctor_for_folding (base);
7895 /* Our semantic is exact opposite of ctor_for_folding;
7896 NULL means unknown, while error_mark_node is 0. */
7897 if (init == error_mark_node)
7898 return NULL_TREE;
7899 if (!init)
7900 return error_mark_node;
7901 return init;
7904 case VIEW_CONVERT_EXPR:
7905 return get_base_constructor (TREE_OPERAND (base, 0),
7906 bit_offset, valueize);
7908 case ARRAY_REF:
7909 case COMPONENT_REF:
7910 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
7911 &reverse);
7912 if (!known_size_p (max_size) || maybe_ne (size, max_size))
7913 return NULL_TREE;
7914 *bit_offset += bit_offset2;
7915 return get_base_constructor (base, bit_offset, valueize);
7917 case CONSTRUCTOR:
7918 return base;
7920 default:
7921 if (CONSTANT_CLASS_P (base))
7922 return base;
7924 return NULL_TREE;
7928 /* CTOR is a CONSTRUCTOR of an array or vector type. Fold a reference of SIZE
7929 bits to the memory at bit OFFSET. If non-null, TYPE is the expected type of
7930 the reference; otherwise the type of the referenced element is used instead.
7931 When SIZE is zero, attempt to fold a reference to the entire element OFFSET
7932 refers to. Increment *SUBOFF by the bit offset of the accessed element. */
7934 static tree
7935 fold_array_ctor_reference (tree type, tree ctor,
7936 unsigned HOST_WIDE_INT offset,
7937 unsigned HOST_WIDE_INT size,
7938 tree from_decl,
7939 unsigned HOST_WIDE_INT *suboff)
7941 offset_int low_bound;
7942 offset_int elt_size;
7943 offset_int access_index;
7944 tree domain_type = NULL_TREE;
7945 HOST_WIDE_INT inner_offset;
7947 /* Compute low bound and elt size. */
7948 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
7949 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
7950 if (domain_type && TYPE_MIN_VALUE (domain_type))
7952 /* Static constructors for variably sized objects make no sense. */
7953 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
7954 return NULL_TREE;
7955 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
7957 else
7958 low_bound = 0;
7959 /* Static constructors for variably sized objects make no sense. */
7960 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
7961 return NULL_TREE;
7962 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
7964 /* When TYPE is non-null, verify that it specifies a constant-sized
7965 access of a multiple of the array element size. Avoid division
7966 by zero below when ELT_SIZE is zero, such as with the result of
7967 an initializer for a zero-length array or an empty struct. */
7968 if (elt_size == 0
7969 || (type
7970 && (!TYPE_SIZE_UNIT (type)
7971 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
7972 return NULL_TREE;
7974 /* Compute the array index we look for. */
7975 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
7976 elt_size);
7977 access_index += low_bound;
7979 /* And offset within the access. */
7980 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
7982 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
7983 if (size > elt_sz * BITS_PER_UNIT)
7985 /* native_encode_expr constraints. */
7986 if (size > MAX_BITSIZE_MODE_ANY_MODE
7987 || size % BITS_PER_UNIT != 0
7988 || inner_offset % BITS_PER_UNIT != 0
7989 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
7990 return NULL_TREE;
7992 unsigned ctor_idx;
7993 tree val = get_array_ctor_element_at_index (ctor, access_index,
7994 &ctor_idx);
7995 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7996 return build_zero_cst (type);
7998 /* native-encode adjacent ctor elements. */
7999 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8000 unsigned bufoff = 0;
8001 offset_int index = 0;
8002 offset_int max_index = access_index;
8003 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
8004 if (!val)
8005 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
8006 else if (!CONSTANT_CLASS_P (val))
8007 return NULL_TREE;
8008 if (!elt->index)
8010 else if (TREE_CODE (elt->index) == RANGE_EXPR)
8012 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
8013 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
8015 else
8016 index = max_index = wi::to_offset (elt->index);
8017 index = wi::umax (index, access_index);
8020 if (bufoff + elt_sz > sizeof (buf))
8021 elt_sz = sizeof (buf) - bufoff;
8022 int len = native_encode_expr (val, buf + bufoff, elt_sz,
8023 inner_offset / BITS_PER_UNIT);
8024 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
8025 return NULL_TREE;
8026 inner_offset = 0;
8027 bufoff += len;
8029 access_index += 1;
8030 if (wi::cmpu (access_index, index) == 0)
8031 val = elt->value;
8032 else if (wi::cmpu (access_index, max_index) > 0)
8034 ctor_idx++;
8035 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
8037 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
8038 ++max_index;
8040 else
8042 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
8043 index = 0;
8044 max_index = access_index;
8045 if (!elt->index)
8047 else if (TREE_CODE (elt->index) == RANGE_EXPR)
8049 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
8050 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
8052 else
8053 index = max_index = wi::to_offset (elt->index);
8054 index = wi::umax (index, access_index);
8055 if (wi::cmpu (access_index, index) == 0)
8056 val = elt->value;
8057 else
8058 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
8062 while (bufoff < size / BITS_PER_UNIT);
8063 *suboff += size;
8064 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
8067 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
8069 if (!size && TREE_CODE (val) != CONSTRUCTOR)
8071 /* For the final reference to the entire accessed element
8072 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
8073 may be null) in favor of the type of the element, and set
8074 SIZE to the size of the accessed element. */
8075 inner_offset = 0;
8076 type = TREE_TYPE (val);
8077 size = elt_sz * BITS_PER_UNIT;
8079 else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
8080 && TREE_CODE (val) == CONSTRUCTOR
8081 && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
8082 /* If this isn't the last element in the CTOR and a CTOR itself
8083 and it does not cover the whole object we are requesting give up
8084 since we're not set up for combining from multiple CTORs. */
8085 return NULL_TREE;
8087 *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
8088 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
8089 suboff);
8092 /* Memory not explicitly mentioned in constructor is 0 (or
8093 the reference is out of range). */
8094 return type ? build_zero_cst (type) : NULL_TREE;
8097 /* CTOR is a CONSTRUCTOR of a record or union type. Fold a reference of SIZE
8098 bits to the memory at bit OFFSET. If non-null, TYPE is the expected type of
8099 the reference; otherwise the type of the referenced member is used instead.
8100 When SIZE is zero, attempt to fold a reference to the entire member OFFSET
8101 refers to. Increment *SUBOFF by the bit offset of the accessed member. */
8103 static tree
8104 fold_nonarray_ctor_reference (tree type, tree ctor,
8105 unsigned HOST_WIDE_INT offset,
8106 unsigned HOST_WIDE_INT size,
8107 tree from_decl,
8108 unsigned HOST_WIDE_INT *suboff)
8110 unsigned HOST_WIDE_INT cnt;
8111 tree cfield, cval;
8113 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
8115 tree byte_offset = DECL_FIELD_OFFSET (cfield);
8116 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
8117 tree field_size = DECL_SIZE (cfield);
8119 if (!field_size)
8121 /* Determine the size of the flexible array member from
8122 the size of the initializer provided for it. */
8123 field_size = TYPE_SIZE (TREE_TYPE (cval));
8126 /* Variable sized objects in static constructors makes no sense,
8127 but field_size can be NULL for flexible array members. */
8128 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
8129 && TREE_CODE (byte_offset) == INTEGER_CST
8130 && (field_size != NULL_TREE
8131 ? TREE_CODE (field_size) == INTEGER_CST
8132 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
8134 /* Compute bit offset of the field. */
8135 offset_int bitoffset
8136 = (wi::to_offset (field_offset)
8137 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
8138 /* Compute bit offset where the field ends. */
8139 offset_int bitoffset_end;
8140 if (field_size != NULL_TREE)
8141 bitoffset_end = bitoffset + wi::to_offset (field_size);
8142 else
8143 bitoffset_end = 0;
8145 /* Compute the bit offset of the end of the desired access.
8146 As a special case, if the size of the desired access is
8147 zero, assume the access is to the entire field (and let
8148 the caller make any necessary adjustments by storing
8149 the actual bounds of the field in FIELDBOUNDS). */
8150 offset_int access_end = offset_int (offset);
8151 if (size)
8152 access_end += size;
8153 else
8154 access_end = bitoffset_end;
8156 /* Is there any overlap between the desired access at
8157 [OFFSET, OFFSET+SIZE) and the offset of the field within
8158 the object at [BITOFFSET, BITOFFSET_END)? */
8159 if (wi::cmps (access_end, bitoffset) > 0
8160 && (field_size == NULL_TREE
8161 || wi::lts_p (offset, bitoffset_end)))
8163 *suboff += bitoffset.to_uhwi ();
8165 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
8167 /* For the final reference to the entire accessed member
8168 (SIZE is zero), reset OFFSET, disegard TYPE (which may
8169 be null) in favor of the type of the member, and set
8170 SIZE to the size of the accessed member. */
8171 offset = bitoffset.to_uhwi ();
8172 type = TREE_TYPE (cval);
8173 size = (bitoffset_end - bitoffset).to_uhwi ();
8176 /* We do have overlap. Now see if the field is large enough
8177 to cover the access. Give up for accesses that extend
8178 beyond the end of the object or that span multiple fields. */
8179 if (wi::cmps (access_end, bitoffset_end) > 0)
8180 return NULL_TREE;
8181 if (offset < bitoffset)
8182 return NULL_TREE;
8184 offset_int inner_offset = offset_int (offset) - bitoffset;
8186 /* Integral bit-fields are left-justified on big-endian targets, so
8187 we must arrange for native_encode_int to start at their MSB. */
8188 if (DECL_BIT_FIELD (cfield) && INTEGRAL_TYPE_P (TREE_TYPE (cfield)))
8190 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8191 return NULL_TREE;
8192 const unsigned int encoding_size
8193 = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (cfield)));
8194 if (BYTES_BIG_ENDIAN)
8195 inner_offset += encoding_size - wi::to_offset (field_size);
8198 return fold_ctor_reference (type, cval,
8199 inner_offset.to_uhwi (), size,
8200 from_decl, suboff);
8204 if (!type)
8205 return NULL_TREE;
8207 return build_zero_cst (type);
8210 /* CTOR is a value initializing memory. Fold a reference of TYPE and
8211 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
8212 is zero, attempt to fold a reference to the entire subobject
8213 which OFFSET refers to. This is used when folding accesses to
8214 string members of aggregates. When non-null, set *SUBOFF to
8215 the bit offset of the accessed subobject. */
8217 tree
8218 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
8219 const poly_uint64 &poly_size, tree from_decl,
8220 unsigned HOST_WIDE_INT *suboff /* = NULL */)
8222 tree ret;
8224 /* We found the field with exact match. */
8225 if (type
8226 && useless_type_conversion_p (type, TREE_TYPE (ctor))
8227 && known_eq (poly_offset, 0U))
8228 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
8230 /* The remaining optimizations need a constant size and offset. */
8231 unsigned HOST_WIDE_INT size, offset;
8232 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
8233 return NULL_TREE;
8235 /* We are at the end of walk, see if we can view convert the
8236 result. */
8237 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
8238 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
8239 && known_eq (wi::to_poly_widest (TYPE_SIZE (type)), size)
8240 && known_eq (wi::to_poly_widest (TYPE_SIZE (TREE_TYPE (ctor))), size))
8242 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
8243 if (ret)
8245 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
8246 if (ret)
8247 STRIP_USELESS_TYPE_CONVERSION (ret);
8249 return ret;
8252 /* For constants and byte-aligned/sized reads, try to go through
8253 native_encode/interpret. */
8254 if (CONSTANT_CLASS_P (ctor)
8255 && BITS_PER_UNIT == 8
8256 && offset % BITS_PER_UNIT == 0
8257 && offset / BITS_PER_UNIT <= INT_MAX
8258 && size % BITS_PER_UNIT == 0
8259 && size <= MAX_BITSIZE_MODE_ANY_MODE
8260 && can_native_interpret_type_p (type))
8262 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8263 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
8264 offset / BITS_PER_UNIT);
8265 if (len > 0)
8266 return native_interpret_expr (type, buf, len);
8269 /* For constructors, try first a recursive local processing, but in any case
8270 this requires the native storage order. */
8271 if (TREE_CODE (ctor) == CONSTRUCTOR
8272 && !(AGGREGATE_TYPE_P (TREE_TYPE (ctor))
8273 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (ctor))))
8275 unsigned HOST_WIDE_INT dummy = 0;
8276 if (!suboff)
8277 suboff = &dummy;
8279 tree ret;
8280 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
8281 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
8282 ret = fold_array_ctor_reference (type, ctor, offset, size,
8283 from_decl, suboff);
8284 else
8285 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
8286 from_decl, suboff);
8288 /* Otherwise fall back to native_encode_initializer. This may be done
8289 only from the outermost fold_ctor_reference call (because it itself
8290 recurses into CONSTRUCTORs and doesn't update suboff). */
8291 if (ret == NULL_TREE
8292 && suboff == &dummy
8293 && BITS_PER_UNIT == 8
8294 && offset % BITS_PER_UNIT == 0
8295 && offset / BITS_PER_UNIT <= INT_MAX
8296 && size % BITS_PER_UNIT == 0
8297 && size <= MAX_BITSIZE_MODE_ANY_MODE
8298 && can_native_interpret_type_p (type))
8300 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8301 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
8302 offset / BITS_PER_UNIT);
8303 if (len > 0)
8304 return native_interpret_expr (type, buf, len);
8307 return ret;
8310 return NULL_TREE;
8313 /* Return the tree representing the element referenced by T if T is an
8314 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8315 names using VALUEIZE. Return NULL_TREE otherwise. */
8317 tree
8318 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
8320 tree ctor, idx, base;
8321 poly_int64 offset, size, max_size;
8322 tree tem;
8323 bool reverse;
8325 if (TREE_THIS_VOLATILE (t))
8326 return NULL_TREE;
8328 if (DECL_P (t))
8329 return get_symbol_constant_value (t);
8331 tem = fold_read_from_constant_string (t);
8332 if (tem)
8333 return tem;
8335 switch (TREE_CODE (t))
8337 case ARRAY_REF:
8338 case ARRAY_RANGE_REF:
8339 /* Constant indexes are handled well by get_base_constructor.
8340 Only special case variable offsets.
8341 FIXME: This code can't handle nested references with variable indexes
8342 (they will be handled only by iteration of ccp). Perhaps we can bring
8343 get_ref_base_and_extent here and make it use a valueize callback. */
8344 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
8345 && valueize
8346 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
8347 && poly_int_tree_p (idx))
8349 tree low_bound, unit_size;
8351 /* If the resulting bit-offset is constant, track it. */
8352 if ((low_bound = array_ref_low_bound (t),
8353 poly_int_tree_p (low_bound))
8354 && (unit_size = array_ref_element_size (t),
8355 tree_fits_uhwi_p (unit_size)))
8357 poly_offset_int woffset
8358 = wi::sext (wi::to_poly_offset (idx)
8359 - wi::to_poly_offset (low_bound),
8360 TYPE_PRECISION (sizetype));
8361 woffset *= tree_to_uhwi (unit_size);
8362 woffset *= BITS_PER_UNIT;
8363 if (woffset.to_shwi (&offset))
8365 base = TREE_OPERAND (t, 0);
8366 ctor = get_base_constructor (base, &offset, valueize);
8367 /* Empty constructor. Always fold to 0. */
8368 if (ctor == error_mark_node)
8369 return build_zero_cst (TREE_TYPE (t));
8370 /* Out of bound array access. Value is undefined,
8371 but don't fold. */
8372 if (maybe_lt (offset, 0))
8373 return NULL_TREE;
8374 /* We cannot determine ctor. */
8375 if (!ctor)
8376 return NULL_TREE;
8377 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
8378 tree_to_uhwi (unit_size)
8379 * BITS_PER_UNIT,
8380 base);
8384 /* Fallthru. */
8386 case COMPONENT_REF:
8387 case BIT_FIELD_REF:
8388 case TARGET_MEM_REF:
8389 case MEM_REF:
8390 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
8391 ctor = get_base_constructor (base, &offset, valueize);
8393 /* Empty constructor. Always fold to 0. */
8394 if (ctor == error_mark_node)
8395 return build_zero_cst (TREE_TYPE (t));
8396 /* We do not know precise address. */
8397 if (!known_size_p (max_size) || maybe_ne (max_size, size))
8398 return NULL_TREE;
8399 /* We cannot determine ctor. */
8400 if (!ctor)
8401 return NULL_TREE;
8403 /* Out of bound array access. Value is undefined, but don't fold. */
8404 if (maybe_lt (offset, 0))
8405 return NULL_TREE;
8407 tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
8408 if (tem)
8409 return tem;
8411 /* For bit field reads try to read the representative and
8412 adjust. */
8413 if (TREE_CODE (t) == COMPONENT_REF
8414 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
8415 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
8417 HOST_WIDE_INT csize, coffset;
8418 tree field = TREE_OPERAND (t, 1);
8419 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8420 if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
8421 && size.is_constant (&csize)
8422 && offset.is_constant (&coffset)
8423 && (coffset % BITS_PER_UNIT != 0
8424 || csize % BITS_PER_UNIT != 0)
8425 && !reverse
8426 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
8428 poly_int64 bitoffset;
8429 poly_uint64 field_offset, repr_offset;
8430 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8431 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
8432 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
8433 else
8434 bitoffset = 0;
8435 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8436 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
8437 HOST_WIDE_INT bitoff;
8438 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8439 - TYPE_PRECISION (TREE_TYPE (field)));
8440 if (bitoffset.is_constant (&bitoff)
8441 && bitoff >= 0
8442 && bitoff <= diff)
8444 offset -= bitoff;
8445 size = tree_to_uhwi (DECL_SIZE (repr));
8447 tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
8448 size, base);
8449 if (tem && TREE_CODE (tem) == INTEGER_CST)
8451 if (!BYTES_BIG_ENDIAN)
8452 tem = wide_int_to_tree (TREE_TYPE (field),
8453 wi::lrshift (wi::to_wide (tem),
8454 bitoff));
8455 else
8456 tem = wide_int_to_tree (TREE_TYPE (field),
8457 wi::lrshift (wi::to_wide (tem),
8458 diff - bitoff));
8459 return tem;
8464 break;
8466 case REALPART_EXPR:
8467 case IMAGPART_EXPR:
8469 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
8470 if (c && TREE_CODE (c) == COMPLEX_CST)
8471 return fold_build1_loc (EXPR_LOCATION (t),
8472 TREE_CODE (t), TREE_TYPE (t), c);
8473 break;
8476 default:
8477 break;
8480 return NULL_TREE;
8483 tree
8484 fold_const_aggregate_ref (tree t)
8486 return fold_const_aggregate_ref_1 (t, NULL);
8489 /* Lookup virtual method with index TOKEN in a virtual table V
8490 at OFFSET.
8491 Set CAN_REFER if non-NULL to false if method
8492 is not referable or if the virtual table is ill-formed (such as rewriten
8493 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8495 tree
8496 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
8497 tree v,
8498 unsigned HOST_WIDE_INT offset,
8499 bool *can_refer)
8501 tree vtable = v, init, fn;
8502 unsigned HOST_WIDE_INT size;
8503 unsigned HOST_WIDE_INT elt_size, access_index;
8504 tree domain_type;
8506 if (can_refer)
8507 *can_refer = true;
8509 /* First of all double check we have virtual table. */
8510 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
8512 /* Pass down that we lost track of the target. */
8513 if (can_refer)
8514 *can_refer = false;
8515 return NULL_TREE;
8518 init = ctor_for_folding (v);
8520 /* The virtual tables should always be born with constructors
8521 and we always should assume that they are avaialble for
8522 folding. At the moment we do not stream them in all cases,
8523 but it should never happen that ctor seem unreachable. */
8524 gcc_assert (init);
8525 if (init == error_mark_node)
8527 /* Pass down that we lost track of the target. */
8528 if (can_refer)
8529 *can_refer = false;
8530 return NULL_TREE;
8532 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
8533 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
8534 offset *= BITS_PER_UNIT;
8535 offset += token * size;
8537 /* Lookup the value in the constructor that is assumed to be array.
8538 This is equivalent to
8539 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8540 offset, size, NULL);
8541 but in a constant time. We expect that frontend produced a simple
8542 array without indexed initializers. */
8544 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
8545 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
8546 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
8547 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
8549 access_index = offset / BITS_PER_UNIT / elt_size;
8550 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
8552 /* The C++ FE can now produce indexed fields, and we check if the indexes
8553 match. */
8554 if (access_index < CONSTRUCTOR_NELTS (init))
8556 fn = CONSTRUCTOR_ELT (init, access_index)->value;
8557 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
8558 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8559 STRIP_NOPS (fn);
8561 else
8562 fn = NULL;
8564 /* For type inconsistent program we may end up looking up virtual method
8565 in virtual table that does not contain TOKEN entries. We may overrun
8566 the virtual table and pick up a constant or RTTI info pointer.
8567 In any case the call is undefined. */
8568 if (!fn
8569 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
8570 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
8571 fn = builtin_decl_unreachable ();
8572 else
8574 fn = TREE_OPERAND (fn, 0);
8576 /* When cgraph node is missing and function is not public, we cannot
8577 devirtualize. This can happen in WHOPR when the actual method
8578 ends up in other partition, because we found devirtualization
8579 possibility too late. */
8580 if (!can_refer_decl_in_current_unit_p (fn, vtable))
8582 if (can_refer)
8584 *can_refer = false;
8585 return fn;
8587 return NULL_TREE;
8591 /* Make sure we create a cgraph node for functions we'll reference.
8592 They can be non-existent if the reference comes from an entry
8593 of an external vtable for example. */
8594 cgraph_node::get_create (fn);
8596 return fn;
8599 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8600 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8601 KNOWN_BINFO carries the binfo describing the true type of
8602 OBJ_TYPE_REF_OBJECT(REF).
8603 Set CAN_REFER if non-NULL to false if method
8604 is not referable or if the virtual table is ill-formed (such as rewriten
8605 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8607 tree
8608 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
8609 bool *can_refer)
8611 unsigned HOST_WIDE_INT offset;
8612 tree v;
8614 v = BINFO_VTABLE (known_binfo);
8615 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8616 if (!v)
8617 return NULL_TREE;
8619 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
8621 if (can_refer)
8622 *can_refer = false;
8623 return NULL_TREE;
8625 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
8628 /* Given a pointer value T, return a simplified version of an
8629 indirection through T, or NULL_TREE if no simplification is
8630 possible. Note that the resulting type may be different from
8631 the type pointed to in the sense that it is still compatible
8632 from the langhooks point of view. */
8634 tree
8635 gimple_fold_indirect_ref (tree t)
8637 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
8638 tree sub = t;
8639 tree subtype;
8641 STRIP_NOPS (sub);
8642 subtype = TREE_TYPE (sub);
8643 if (!POINTER_TYPE_P (subtype)
8644 || TYPE_REF_CAN_ALIAS_ALL (ptype))
8645 return NULL_TREE;
8647 if (TREE_CODE (sub) == ADDR_EXPR)
8649 tree op = TREE_OPERAND (sub, 0);
8650 tree optype = TREE_TYPE (op);
8651 /* *&p => p */
8652 if (useless_type_conversion_p (type, optype))
8653 return op;
8655 /* *(foo *)&fooarray => fooarray[0] */
8656 if (TREE_CODE (optype) == ARRAY_TYPE
8657 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
8658 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8660 tree type_domain = TYPE_DOMAIN (optype);
8661 tree min_val = size_zero_node;
8662 if (type_domain && TYPE_MIN_VALUE (type_domain))
8663 min_val = TYPE_MIN_VALUE (type_domain);
8664 if (TREE_CODE (min_val) == INTEGER_CST)
8665 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
8667 /* *(foo *)&complexfoo => __real__ complexfoo */
8668 else if (TREE_CODE (optype) == COMPLEX_TYPE
8669 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8670 return fold_build1 (REALPART_EXPR, type, op);
8671 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8672 else if (TREE_CODE (optype) == VECTOR_TYPE
8673 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8675 tree part_width = TYPE_SIZE (type);
8676 tree index = bitsize_int (0);
8677 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
8681 /* *(p + CST) -> ... */
8682 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
8683 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
8685 tree addr = TREE_OPERAND (sub, 0);
8686 tree off = TREE_OPERAND (sub, 1);
8687 tree addrtype;
8689 STRIP_NOPS (addr);
8690 addrtype = TREE_TYPE (addr);
8692 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8693 if (TREE_CODE (addr) == ADDR_EXPR
8694 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
8695 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
8696 && tree_fits_uhwi_p (off))
8698 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
8699 tree part_width = TYPE_SIZE (type);
8700 unsigned HOST_WIDE_INT part_widthi
8701 = tree_to_shwi (part_width) / BITS_PER_UNIT;
8702 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
8703 tree index = bitsize_int (indexi);
8704 if (known_lt (offset / part_widthi,
8705 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
8706 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
8707 part_width, index);
8710 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8711 if (TREE_CODE (addr) == ADDR_EXPR
8712 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
8713 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
8715 tree size = TYPE_SIZE_UNIT (type);
8716 if (tree_int_cst_equal (size, off))
8717 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
8720 /* *(p + CST) -> MEM_REF <p, CST>. */
8721 if (TREE_CODE (addr) != ADDR_EXPR
8722 || DECL_P (TREE_OPERAND (addr, 0)))
8723 return fold_build2 (MEM_REF, type,
8724 addr,
8725 wide_int_to_tree (ptype, wi::to_wide (off)));
8728 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8729 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
8730 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
8731 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
8733 tree type_domain;
8734 tree min_val = size_zero_node;
8735 tree osub = sub;
8736 sub = gimple_fold_indirect_ref (sub);
8737 if (! sub)
8738 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
8739 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
8740 if (type_domain && TYPE_MIN_VALUE (type_domain))
8741 min_val = TYPE_MIN_VALUE (type_domain);
8742 if (TREE_CODE (min_val) == INTEGER_CST)
8743 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
8746 return NULL_TREE;
8749 /* Return true if CODE is an operation that when operating on signed
8750 integer types involves undefined behavior on overflow and the
8751 operation can be expressed with unsigned arithmetic. */
8753 bool
8754 arith_code_with_undefined_signed_overflow (tree_code code)
8756 switch (code)
8758 case ABS_EXPR:
8759 case PLUS_EXPR:
8760 case MINUS_EXPR:
8761 case MULT_EXPR:
8762 case NEGATE_EXPR:
8763 case POINTER_PLUS_EXPR:
8764 return true;
8765 default:
8766 return false;
8770 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8771 operation that can be transformed to unsigned arithmetic by converting
8772 its operand, carrying out the operation in the corresponding unsigned
8773 type and converting the result back to the original type.
8775 If IN_PLACE is true, *GSI points to STMT, adjust the stmt in place and
8776 return NULL.
8777 Otherwise returns a sequence of statements that replace STMT and also
8778 contain a modified form of STMT itself. */
8780 static gimple_seq
8781 rewrite_to_defined_overflow (gimple_stmt_iterator *gsi, gimple *stmt,
8782 bool in_place)
8784 if (dump_file && (dump_flags & TDF_DETAILS))
8786 fprintf (dump_file, "rewriting stmt with undefined signed "
8787 "overflow ");
8788 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
8791 tree lhs = gimple_assign_lhs (stmt);
8792 tree type = unsigned_type_for (TREE_TYPE (lhs));
8793 gimple_seq stmts = NULL;
8794 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
8795 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
8796 else
8797 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
8799 tree op = gimple_op (stmt, i);
8800 op = gimple_convert (&stmts, type, op);
8801 gimple_set_op (stmt, i, op);
8803 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
8804 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
8805 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
8806 gimple_set_modified (stmt, true);
8807 if (in_place)
8809 if (stmts)
8810 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
8811 stmts = NULL;
8813 else
8814 gimple_seq_add_stmt (&stmts, stmt);
8815 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
8816 if (in_place)
8818 gsi_insert_after (gsi, cvt, GSI_SAME_STMT);
8819 update_stmt (stmt);
8821 else
8822 gimple_seq_add_stmt (&stmts, cvt);
8824 return stmts;
8827 void
8828 rewrite_to_defined_overflow (gimple_stmt_iterator *gsi)
8830 rewrite_to_defined_overflow (gsi, gsi_stmt (*gsi), true);
8833 gimple_seq
8834 rewrite_to_defined_overflow (gimple *stmt)
8836 return rewrite_to_defined_overflow (nullptr, stmt, false);
8839 /* The valueization hook we use for the gimple_build API simplification.
8840 This makes us match fold_buildN behavior by only combining with
8841 statements in the sequence(s) we are currently building. */
8843 static tree
8844 gimple_build_valueize (tree op)
8846 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
8847 return op;
8848 return NULL_TREE;
8851 /* Helper for gimple_build to perform the final insertion of stmts on SEQ. */
8853 static inline void
8854 gimple_build_insert_seq (gimple_stmt_iterator *gsi,
8855 bool before, gsi_iterator_update update,
8856 gimple_seq seq)
8858 if (before)
8860 if (gsi->bb)
8861 gsi_insert_seq_before (gsi, seq, update);
8862 else
8863 gsi_insert_seq_before_without_update (gsi, seq, update);
8865 else
8867 if (gsi->bb)
8868 gsi_insert_seq_after (gsi, seq, update);
8869 else
8870 gsi_insert_seq_after_without_update (gsi, seq, update);
8874 /* Build the expression CODE OP0 of type TYPE with location LOC,
8875 simplifying it first if possible. Returns the built
8876 expression value and inserts statements possibly defining it
8877 before GSI if BEFORE is true or after GSI if false and advance
8878 the iterator accordingly.
8879 If gsi refers to a basic block simplifying is allowed to look
8880 at all SSA defs while when it does not it is restricted to
8881 SSA defs that are not associated with a basic block yet,
8882 indicating they belong to the currently building sequence. */
8884 tree
8885 gimple_build (gimple_stmt_iterator *gsi,
8886 bool before, gsi_iterator_update update,
8887 location_t loc, enum tree_code code, tree type, tree op0)
8889 gimple_seq seq = NULL;
8890 tree res
8891 = gimple_simplify (code, type, op0, &seq,
8892 gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
8893 if (!res)
8895 res = create_tmp_reg_or_ssa_name (type);
8896 gimple *stmt;
8897 if (code == REALPART_EXPR
8898 || code == IMAGPART_EXPR
8899 || code == VIEW_CONVERT_EXPR)
8900 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
8901 else
8902 stmt = gimple_build_assign (res, code, op0);
8903 gimple_set_location (stmt, loc);
8904 gimple_seq_add_stmt_without_update (&seq, stmt);
8906 gimple_build_insert_seq (gsi, before, update, seq);
8907 return res;
8910 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8911 simplifying it first if possible. Returns the built
8912 expression value inserting any new statements at GSI honoring BEFORE
8913 and UPDATE. */
8915 tree
8916 gimple_build (gimple_stmt_iterator *gsi,
8917 bool before, gsi_iterator_update update,
8918 location_t loc, enum tree_code code, tree type,
8919 tree op0, tree op1)
8921 gimple_seq seq = NULL;
8922 tree res
8923 = gimple_simplify (code, type, op0, op1, &seq,
8924 gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
8925 if (!res)
8927 res = create_tmp_reg_or_ssa_name (type);
8928 gimple *stmt = gimple_build_assign (res, code, op0, op1);
8929 gimple_set_location (stmt, loc);
8930 gimple_seq_add_stmt_without_update (&seq, stmt);
8932 gimple_build_insert_seq (gsi, before, update, seq);
8933 return res;
8936 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8937 simplifying it first if possible. Returns the built
8938 expression value inserting any new statements at GSI honoring BEFORE
8939 and UPDATE. */
8941 tree
8942 gimple_build (gimple_stmt_iterator *gsi,
8943 bool before, gsi_iterator_update update,
8944 location_t loc, enum tree_code code, tree type,
8945 tree op0, tree op1, tree op2)
8948 gimple_seq seq = NULL;
8949 tree res
8950 = gimple_simplify (code, type, op0, op1, op2, &seq,
8951 gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
8952 if (!res)
8954 res = create_tmp_reg_or_ssa_name (type);
8955 gimple *stmt;
8956 if (code == BIT_FIELD_REF)
8957 stmt = gimple_build_assign (res, code,
8958 build3 (code, type, op0, op1, op2));
8959 else
8960 stmt = gimple_build_assign (res, code, op0, op1, op2);
8961 gimple_set_location (stmt, loc);
8962 gimple_seq_add_stmt_without_update (&seq, stmt);
8964 gimple_build_insert_seq (gsi, before, update, seq);
8965 return res;
8968 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8969 void) with a location LOC. Returns the built expression value (or NULL_TREE
8970 if TYPE is void) inserting any new statements at GSI honoring BEFORE
8971 and UPDATE. */
8973 tree
8974 gimple_build (gimple_stmt_iterator *gsi,
8975 bool before, gsi_iterator_update update,
8976 location_t loc, combined_fn fn, tree type)
8978 tree res = NULL_TREE;
8979 gimple_seq seq = NULL;
8980 gcall *stmt;
8981 if (internal_fn_p (fn))
8982 stmt = gimple_build_call_internal (as_internal_fn (fn), 0);
8983 else
8985 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8986 stmt = gimple_build_call (decl, 0);
8988 if (!VOID_TYPE_P (type))
8990 res = create_tmp_reg_or_ssa_name (type);
8991 gimple_call_set_lhs (stmt, res);
8993 gimple_set_location (stmt, loc);
8994 gimple_seq_add_stmt_without_update (&seq, stmt);
8995 gimple_build_insert_seq (gsi, before, update, seq);
8996 return res;
8999 /* Build the call FN (ARG0) with a result of type TYPE
9000 (or no result if TYPE is void) with location LOC,
9001 simplifying it first if possible. Returns the built
9002 expression value (or NULL_TREE if TYPE is void) inserting any new
9003 statements at GSI honoring BEFORE and UPDATE. */
9005 tree
9006 gimple_build (gimple_stmt_iterator *gsi,
9007 bool before, gsi_iterator_update update,
9008 location_t loc, combined_fn fn,
9009 tree type, tree arg0)
9011 gimple_seq seq = NULL;
9012 tree res = gimple_simplify (fn, type, arg0, &seq, gimple_build_valueize);
9013 if (!res)
9015 gcall *stmt;
9016 if (internal_fn_p (fn))
9017 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
9018 else
9020 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
9021 stmt = gimple_build_call (decl, 1, arg0);
9023 if (!VOID_TYPE_P (type))
9025 res = create_tmp_reg_or_ssa_name (type);
9026 gimple_call_set_lhs (stmt, res);
9028 gimple_set_location (stmt, loc);
9029 gimple_seq_add_stmt_without_update (&seq, stmt);
9031 gimple_build_insert_seq (gsi, before, update, seq);
9032 return res;
9035 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
9036 (or no result if TYPE is void) with location LOC,
9037 simplifying it first if possible. Returns the built
9038 expression value (or NULL_TREE if TYPE is void) inserting any new
9039 statements at GSI honoring BEFORE and UPDATE. */
9041 tree
9042 gimple_build (gimple_stmt_iterator *gsi,
9043 bool before, gsi_iterator_update update,
9044 location_t loc, combined_fn fn,
9045 tree type, tree arg0, tree arg1)
9047 gimple_seq seq = NULL;
9048 tree res = gimple_simplify (fn, type, arg0, arg1, &seq,
9049 gimple_build_valueize);
9050 if (!res)
9052 gcall *stmt;
9053 if (internal_fn_p (fn))
9054 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
9055 else
9057 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
9058 stmt = gimple_build_call (decl, 2, arg0, arg1);
9060 if (!VOID_TYPE_P (type))
9062 res = create_tmp_reg_or_ssa_name (type);
9063 gimple_call_set_lhs (stmt, res);
9065 gimple_set_location (stmt, loc);
9066 gimple_seq_add_stmt_without_update (&seq, stmt);
9068 gimple_build_insert_seq (gsi, before, update, seq);
9069 return res;
9072 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
9073 (or no result if TYPE is void) with location LOC,
9074 simplifying it first if possible. Returns the built
9075 expression value (or NULL_TREE if TYPE is void) inserting any new
9076 statements at GSI honoring BEFORE and UPDATE. */
9078 tree
9079 gimple_build (gimple_stmt_iterator *gsi,
9080 bool before, gsi_iterator_update update,
9081 location_t loc, combined_fn fn,
9082 tree type, tree arg0, tree arg1, tree arg2)
9084 gimple_seq seq = NULL;
9085 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
9086 &seq, gimple_build_valueize);
9087 if (!res)
9089 gcall *stmt;
9090 if (internal_fn_p (fn))
9091 stmt = gimple_build_call_internal (as_internal_fn (fn),
9092 3, arg0, arg1, arg2);
9093 else
9095 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
9096 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
9098 if (!VOID_TYPE_P (type))
9100 res = create_tmp_reg_or_ssa_name (type);
9101 gimple_call_set_lhs (stmt, res);
9103 gimple_set_location (stmt, loc);
9104 gimple_seq_add_stmt_without_update (&seq, stmt);
9106 gimple_build_insert_seq (gsi, before, update, seq);
9107 return res;
9110 /* Build CODE (OP0) with a result of type TYPE (or no result if TYPE is
9111 void) with location LOC, simplifying it first if possible. Returns the
9112 built expression value (or NULL_TREE if TYPE is void) inserting any new
9113 statements at GSI honoring BEFORE and UPDATE. */
9115 tree
9116 gimple_build (gimple_stmt_iterator *gsi,
9117 bool before, gsi_iterator_update update,
9118 location_t loc, code_helper code, tree type, tree op0)
9120 if (code.is_tree_code ())
9121 return gimple_build (gsi, before, update, loc, tree_code (code), type, op0);
9122 return gimple_build (gsi, before, update, loc, combined_fn (code), type, op0);
9125 /* Build CODE (OP0, OP1) with a result of type TYPE (or no result if TYPE is
9126 void) with location LOC, simplifying it first if possible. Returns the
9127 built expression value (or NULL_TREE if TYPE is void) inserting any new
9128 statements at GSI honoring BEFORE and UPDATE. */
9130 tree
9131 gimple_build (gimple_stmt_iterator *gsi,
9132 bool before, gsi_iterator_update update,
9133 location_t loc, code_helper code, tree type, tree op0, tree op1)
9135 if (code.is_tree_code ())
9136 return gimple_build (gsi, before, update,
9137 loc, tree_code (code), type, op0, op1);
9138 return gimple_build (gsi, before, update,
9139 loc, combined_fn (code), type, op0, op1);
9142 /* Build CODE (OP0, OP1, OP2) with a result of type TYPE (or no result if TYPE
9143 is void) with location LOC, simplifying it first if possible. Returns the
9144 built expression value (or NULL_TREE if TYPE is void) inserting any new
9145 statements at GSI honoring BEFORE and UPDATE. */
9147 tree
9148 gimple_build (gimple_stmt_iterator *gsi,
9149 bool before, gsi_iterator_update update,
9150 location_t loc, code_helper code,
9151 tree type, tree op0, tree op1, tree op2)
9153 if (code.is_tree_code ())
9154 return gimple_build (gsi, before, update,
9155 loc, tree_code (code), type, op0, op1, op2);
9156 return gimple_build (gsi, before, update,
9157 loc, combined_fn (code), type, op0, op1, op2);
9160 /* Build the conversion (TYPE) OP with a result of type TYPE
9161 with location LOC if such conversion is neccesary in GIMPLE,
9162 simplifying it first.
9163 Returns the built expression inserting any new statements
9164 at GSI honoring BEFORE and UPDATE. */
9166 tree
9167 gimple_convert (gimple_stmt_iterator *gsi,
9168 bool before, gsi_iterator_update update,
9169 location_t loc, tree type, tree op)
9171 if (useless_type_conversion_p (type, TREE_TYPE (op)))
9172 return op;
9173 return gimple_build (gsi, before, update, loc, NOP_EXPR, type, op);
9176 /* Build the conversion (ptrofftype) OP with a result of a type
9177 compatible with ptrofftype with location LOC if such conversion
9178 is neccesary in GIMPLE, simplifying it first.
9179 Returns the built expression value inserting any new statements
9180 at GSI honoring BEFORE and UPDATE. */
9182 tree
9183 gimple_convert_to_ptrofftype (gimple_stmt_iterator *gsi,
9184 bool before, gsi_iterator_update update,
9185 location_t loc, tree op)
9187 if (ptrofftype_p (TREE_TYPE (op)))
9188 return op;
9189 return gimple_convert (gsi, before, update, loc, sizetype, op);
9192 /* Build a vector of type TYPE in which each element has the value OP.
9193 Return a gimple value for the result, inserting any new statements
9194 at GSI honoring BEFORE and UPDATE. */
9196 tree
9197 gimple_build_vector_from_val (gimple_stmt_iterator *gsi,
9198 bool before, gsi_iterator_update update,
9199 location_t loc, tree type, tree op)
9201 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
9202 && !CONSTANT_CLASS_P (op))
9203 return gimple_build (gsi, before, update,
9204 loc, VEC_DUPLICATE_EXPR, type, op);
9206 tree res, vec = build_vector_from_val (type, op);
9207 if (is_gimple_val (vec))
9208 return vec;
9209 if (gimple_in_ssa_p (cfun))
9210 res = make_ssa_name (type);
9211 else
9212 res = create_tmp_reg (type);
9213 gimple_seq seq = NULL;
9214 gimple *stmt = gimple_build_assign (res, vec);
9215 gimple_set_location (stmt, loc);
9216 gimple_seq_add_stmt_without_update (&seq, stmt);
9217 gimple_build_insert_seq (gsi, before, update, seq);
9218 return res;
9221 /* Build a vector from BUILDER, handling the case in which some elements
9222 are non-constant. Return a gimple value for the result, inserting
9223 any new instructions to GSI honoring BEFORE and UPDATE.
9225 BUILDER must not have a stepped encoding on entry. This is because
9226 the function is not geared up to handle the arithmetic that would
9227 be needed in the variable case, and any code building a vector that
9228 is known to be constant should use BUILDER->build () directly. */
9230 tree
9231 gimple_build_vector (gimple_stmt_iterator *gsi,
9232 bool before, gsi_iterator_update update,
9233 location_t loc, tree_vector_builder *builder)
9235 gcc_assert (builder->nelts_per_pattern () <= 2);
9236 unsigned int encoded_nelts = builder->encoded_nelts ();
9237 for (unsigned int i = 0; i < encoded_nelts; ++i)
9238 if (!CONSTANT_CLASS_P ((*builder)[i]))
9240 gimple_seq seq = NULL;
9241 tree type = builder->type ();
9242 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
9243 vec<constructor_elt, va_gc> *v;
9244 vec_alloc (v, nelts);
9245 for (i = 0; i < nelts; ++i)
9246 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
9248 tree res;
9249 if (gimple_in_ssa_p (cfun))
9250 res = make_ssa_name (type);
9251 else
9252 res = create_tmp_reg (type);
9253 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
9254 gimple_set_location (stmt, loc);
9255 gimple_seq_add_stmt_without_update (&seq, stmt);
9256 gimple_build_insert_seq (gsi, before, update, seq);
9257 return res;
9259 return builder->build ();
9262 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
9263 and generate a value guaranteed to be rounded upwards to ALIGN.
9265 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
9267 tree
9268 gimple_build_round_up (gimple_stmt_iterator *gsi,
9269 bool before, gsi_iterator_update update,
9270 location_t loc, tree type,
9271 tree old_size, unsigned HOST_WIDE_INT align)
9273 unsigned HOST_WIDE_INT tg_mask = align - 1;
9274 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
9275 gcc_assert (INTEGRAL_TYPE_P (type));
9276 tree tree_mask = build_int_cst (type, tg_mask);
9277 tree oversize = gimple_build (gsi, before, update,
9278 loc, PLUS_EXPR, type, old_size, tree_mask);
9280 tree mask = build_int_cst (type, -align);
9281 return gimple_build (gsi, before, update,
9282 loc, BIT_AND_EXPR, type, oversize, mask);
9285 /* Return true if the result of assignment STMT is known to be non-negative.
9286 If the return value is based on the assumption that signed overflow is
9287 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9288 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9290 static bool
9291 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9292 int depth)
9294 enum tree_code code = gimple_assign_rhs_code (stmt);
9295 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
9296 switch (get_gimple_rhs_class (code))
9298 case GIMPLE_UNARY_RHS:
9299 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
9300 type,
9301 gimple_assign_rhs1 (stmt),
9302 strict_overflow_p, depth);
9303 case GIMPLE_BINARY_RHS:
9304 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
9305 type,
9306 gimple_assign_rhs1 (stmt),
9307 gimple_assign_rhs2 (stmt),
9308 strict_overflow_p, depth);
9309 case GIMPLE_TERNARY_RHS:
9310 return false;
9311 case GIMPLE_SINGLE_RHS:
9312 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
9313 strict_overflow_p, depth);
9314 case GIMPLE_INVALID_RHS:
9315 break;
9317 gcc_unreachable ();
9320 /* Return true if return value of call STMT is known to be non-negative.
9321 If the return value is based on the assumption that signed overflow is
9322 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9323 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9325 static bool
9326 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9327 int depth)
9329 tree arg0 = gimple_call_num_args (stmt) > 0 ?
9330 gimple_call_arg (stmt, 0) : NULL_TREE;
9331 tree arg1 = gimple_call_num_args (stmt) > 1 ?
9332 gimple_call_arg (stmt, 1) : NULL_TREE;
9333 tree lhs = gimple_call_lhs (stmt);
9334 return (lhs
9335 && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs),
9336 gimple_call_combined_fn (stmt),
9337 arg0, arg1,
9338 strict_overflow_p, depth));
9341 /* Return true if return value of call STMT is known to be non-negative.
9342 If the return value is based on the assumption that signed overflow is
9343 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9344 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9346 static bool
9347 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9348 int depth)
9350 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9352 tree arg = gimple_phi_arg_def (stmt, i);
9353 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
9354 return false;
9356 return true;
9359 /* Return true if STMT is known to compute a non-negative value.
9360 If the return value is based on the assumption that signed overflow is
9361 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9362 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9364 bool
9365 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9366 int depth)
9368 tree type = gimple_range_type (stmt);
9369 if (type && frange::supports_p (type))
9371 frange r;
9372 bool sign;
9373 if (get_global_range_query ()->range_of_stmt (r, stmt)
9374 && r.signbit_p (sign))
9375 return !sign;
9377 switch (gimple_code (stmt))
9379 case GIMPLE_ASSIGN:
9380 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
9381 depth);
9382 case GIMPLE_CALL:
9383 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
9384 depth);
9385 case GIMPLE_PHI:
9386 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
9387 depth);
9388 default:
9389 return false;
9393 /* Return true if the floating-point value computed by assignment STMT
9394 is known to have an integer value. We also allow +Inf, -Inf and NaN
9395 to be considered integer values. Return false for signaling NaN.
9397 DEPTH is the current nesting depth of the query. */
9399 static bool
9400 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
9402 enum tree_code code = gimple_assign_rhs_code (stmt);
9403 switch (get_gimple_rhs_class (code))
9405 case GIMPLE_UNARY_RHS:
9406 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
9407 gimple_assign_rhs1 (stmt), depth);
9408 case GIMPLE_BINARY_RHS:
9409 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
9410 gimple_assign_rhs1 (stmt),
9411 gimple_assign_rhs2 (stmt), depth);
9412 case GIMPLE_TERNARY_RHS:
9413 return false;
9414 case GIMPLE_SINGLE_RHS:
9415 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
9416 case GIMPLE_INVALID_RHS:
9417 break;
9419 gcc_unreachable ();
9422 /* Return true if the floating-point value computed by call STMT is known
9423 to have an integer value. We also allow +Inf, -Inf and NaN to be
9424 considered integer values. Return false for signaling NaN.
9426 DEPTH is the current nesting depth of the query. */
9428 static bool
9429 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
9431 tree arg0 = (gimple_call_num_args (stmt) > 0
9432 ? gimple_call_arg (stmt, 0)
9433 : NULL_TREE);
9434 tree arg1 = (gimple_call_num_args (stmt) > 1
9435 ? gimple_call_arg (stmt, 1)
9436 : NULL_TREE);
9437 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
9438 arg0, arg1, depth);
9441 /* Return true if the floating-point result of phi STMT is known to have
9442 an integer value. We also allow +Inf, -Inf and NaN to be considered
9443 integer values. Return false for signaling NaN.
9445 DEPTH is the current nesting depth of the query. */
9447 static bool
9448 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
9450 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9452 tree arg = gimple_phi_arg_def (stmt, i);
9453 if (!integer_valued_real_single_p (arg, depth + 1))
9454 return false;
9456 return true;
9459 /* Return true if the floating-point value computed by STMT is known
9460 to have an integer value. We also allow +Inf, -Inf and NaN to be
9461 considered integer values. Return false for signaling NaN.
9463 DEPTH is the current nesting depth of the query. */
9465 bool
9466 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
9468 switch (gimple_code (stmt))
9470 case GIMPLE_ASSIGN:
9471 return gimple_assign_integer_valued_real_p (stmt, depth);
9472 case GIMPLE_CALL:
9473 return gimple_call_integer_valued_real_p (stmt, depth);
9474 case GIMPLE_PHI:
9475 return gimple_phi_integer_valued_real_p (stmt, depth);
9476 default:
9477 return false;