Daily bump.
[official-gcc.git] / gcc / gimple-fold.cc
blob5191102df6882ba552db856ec62457ce9ae0f563
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2024 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.cc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-access.h"
34 #include "gimple-ssa-warn-restrict.h"
35 #include "fold-const.h"
36 #include "stmt.h"
37 #include "expr.h"
38 #include "stor-layout.h"
39 #include "dumpfile.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
42 #include "gimplify.h"
43 #include "tree-into-ssa.h"
44 #include "tree-dfa.h"
45 #include "tree-object-size.h"
46 #include "tree-ssa.h"
47 #include "tree-ssa-propagate.h"
48 #include "ipa-utils.h"
49 #include "tree-ssa-address.h"
50 #include "langhooks.h"
51 #include "gimplify-me.h"
52 #include "dbgcnt.h"
53 #include "builtins.h"
54 #include "tree-eh.h"
55 #include "gimple-match.h"
56 #include "gomp-constants.h"
57 #include "optabs-query.h"
58 #include "omp-general.h"
59 #include "tree-cfg.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "asan.h"
64 #include "diagnostic-core.h"
65 #include "intl.h"
66 #include "calls.h"
67 #include "tree-vector-builder.h"
68 #include "tree-ssa-strlen.h"
69 #include "varasm.h"
70 #include "internal-fn.h"
71 #include "gimple-range.h"
73 enum strlen_range_kind {
74 /* Compute the exact constant string length. */
75 SRK_STRLEN,
76 /* Compute the maximum constant string length. */
77 SRK_STRLENMAX,
78 /* Compute a range of string lengths bounded by object sizes. When
79 the length of a string cannot be determined, consider as the upper
80 bound the size of the enclosing object the string may be a member
81 or element of. Also determine the size of the largest character
82 array the string may refer to. */
83 SRK_LENRANGE,
84 /* Determine the integer value of the argument (not string length). */
85 SRK_INT_VALUE
88 static bool
89 get_range_strlen (tree, bitmap, strlen_range_kind, c_strlen_data *, unsigned);
91 /* Return true when DECL can be referenced from current unit.
92 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
93 We can get declarations that are not possible to reference for various
94 reasons:
96 1) When analyzing C++ virtual tables.
97 C++ virtual tables do have known constructors even
98 when they are keyed to other compilation unit.
99 Those tables can contain pointers to methods and vars
100 in other units. Those methods have both STATIC and EXTERNAL
101 set.
102 2) In WHOPR mode devirtualization might lead to reference
103 to method that was partitioned elsehwere.
104 In this case we have static VAR_DECL or FUNCTION_DECL
105 that has no corresponding callgraph/varpool node
106 declaring the body.
107 3) COMDAT functions referred by external vtables that
108 we devirtualize only during final compilation stage.
109 At this time we already decided that we will not output
110 the function body and thus we can't reference the symbol
111 directly. */
113 static bool
114 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
116 varpool_node *vnode;
117 struct cgraph_node *node;
118 symtab_node *snode;
120 if (DECL_ABSTRACT_P (decl))
121 return false;
123 /* We are concerned only about static/external vars and functions. */
124 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
125 || !VAR_OR_FUNCTION_DECL_P (decl))
126 return true;
128 /* Static objects can be referred only if they are defined and not optimized
129 out yet. */
130 if (!TREE_PUBLIC (decl))
132 if (DECL_EXTERNAL (decl))
133 return false;
134 /* Before we start optimizing unreachable code we can be sure all
135 static objects are defined. */
136 if (symtab->function_flags_ready)
137 return true;
138 snode = symtab_node::get (decl);
139 if (!snode || !snode->definition)
140 return false;
141 node = dyn_cast <cgraph_node *> (snode);
142 return !node || !node->inlined_to;
145 /* We will later output the initializer, so we can refer to it.
146 So we are concerned only when DECL comes from initializer of
147 external var or var that has been optimized out. */
148 if (!from_decl
149 || !VAR_P (from_decl)
150 || (!DECL_EXTERNAL (from_decl)
151 && (vnode = varpool_node::get (from_decl)) != NULL
152 && vnode->definition)
153 || (flag_ltrans
154 && (vnode = varpool_node::get (from_decl)) != NULL
155 && vnode->in_other_partition))
156 return true;
157 /* We are folding reference from external vtable. The vtable may reffer
158 to a symbol keyed to other compilation unit. The other compilation
159 unit may be in separate DSO and the symbol may be hidden. */
160 if (DECL_VISIBILITY_SPECIFIED (decl)
161 && DECL_EXTERNAL (decl)
162 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
163 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
164 return false;
165 /* When function is public, we always can introduce new reference.
166 Exception are the COMDAT functions where introducing a direct
167 reference imply need to include function body in the curren tunit. */
168 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
169 return true;
170 /* We have COMDAT. We are going to check if we still have definition
171 or if the definition is going to be output in other partition.
172 Bypass this when gimplifying; all needed functions will be produced.
174 As observed in PR20991 for already optimized out comdat virtual functions
175 it may be tempting to not necessarily give up because the copy will be
176 output elsewhere when corresponding vtable is output.
177 This is however not possible - ABI specify that COMDATs are output in
178 units where they are used and when the other unit was compiled with LTO
179 it is possible that vtable was kept public while the function itself
180 was privatized. */
181 if (!symtab->function_flags_ready)
182 return true;
184 snode = symtab_node::get (decl);
185 if (!snode
186 || ((!snode->definition || DECL_EXTERNAL (decl))
187 && (!snode->in_other_partition
188 || (!snode->forced_by_abi && !snode->force_output))))
189 return false;
190 node = dyn_cast <cgraph_node *> (snode);
191 return !node || !node->inlined_to;
194 /* Create a temporary for TYPE for a statement STMT. If the current function
195 is in SSA form, a SSA name is created. Otherwise a temporary register
196 is made. */
198 tree
199 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
201 if (gimple_in_ssa_p (cfun))
202 return make_ssa_name (type, stmt);
203 else
204 return create_tmp_reg (type);
207 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
208 acceptable form for is_gimple_min_invariant.
209 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
211 tree
212 canonicalize_constructor_val (tree cval, tree from_decl)
214 if (CONSTANT_CLASS_P (cval))
215 return cval;
217 tree orig_cval = cval;
218 STRIP_NOPS (cval);
219 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
220 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
222 tree ptr = TREE_OPERAND (cval, 0);
223 if (is_gimple_min_invariant (ptr))
224 cval = build1_loc (EXPR_LOCATION (cval),
225 ADDR_EXPR, TREE_TYPE (ptr),
226 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
227 ptr,
228 fold_convert (ptr_type_node,
229 TREE_OPERAND (cval, 1))));
231 if (TREE_CODE (cval) == ADDR_EXPR)
233 tree base = NULL_TREE;
234 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
236 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
237 if (base)
238 TREE_OPERAND (cval, 0) = base;
240 else
241 base = get_base_address (TREE_OPERAND (cval, 0));
242 if (!base)
243 return NULL_TREE;
245 if (VAR_OR_FUNCTION_DECL_P (base)
246 && !can_refer_decl_in_current_unit_p (base, from_decl))
247 return NULL_TREE;
248 if (TREE_TYPE (base) == error_mark_node)
249 return NULL_TREE;
250 if (VAR_P (base))
251 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
252 but since the use can be in a debug stmt we can't. */
254 else if (TREE_CODE (base) == FUNCTION_DECL)
256 /* Make sure we create a cgraph node for functions we'll reference.
257 They can be non-existent if the reference comes from an entry
258 of an external vtable for example. */
259 cgraph_node::get_create (base);
261 /* Fixup types in global initializers. */
262 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
263 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
265 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
266 cval = fold_convert (TREE_TYPE (orig_cval), cval);
267 return cval;
269 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
270 if (TREE_CODE (cval) == INTEGER_CST)
272 if (TREE_OVERFLOW_P (cval))
273 cval = drop_tree_overflow (cval);
274 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
275 cval = fold_convert (TREE_TYPE (orig_cval), cval);
276 return cval;
278 return orig_cval;
281 /* If SYM is a constant variable with known value, return the value.
282 NULL_TREE is returned otherwise. */
284 tree
285 get_symbol_constant_value (tree sym)
287 tree val = ctor_for_folding (sym);
288 if (val != error_mark_node)
290 if (val)
292 val = canonicalize_constructor_val (unshare_expr (val), sym);
293 if (val
294 && is_gimple_min_invariant (val)
295 && useless_type_conversion_p (TREE_TYPE (sym), TREE_TYPE (val)))
296 return val;
297 else
298 return NULL_TREE;
300 /* Variables declared 'const' without an initializer
301 have zero as the initializer if they may not be
302 overridden at link or run time. */
303 if (!val
304 && is_gimple_reg_type (TREE_TYPE (sym)))
305 return build_zero_cst (TREE_TYPE (sym));
308 return NULL_TREE;
313 /* Subroutine of fold_stmt. We perform constant folding of the
314 memory reference tree EXPR. */
316 static tree
317 maybe_fold_reference (tree expr)
319 tree result = NULL_TREE;
321 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
322 || TREE_CODE (expr) == REALPART_EXPR
323 || TREE_CODE (expr) == IMAGPART_EXPR)
324 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
325 result = fold_unary_loc (EXPR_LOCATION (expr),
326 TREE_CODE (expr),
327 TREE_TYPE (expr),
328 TREE_OPERAND (expr, 0));
329 else if (TREE_CODE (expr) == BIT_FIELD_REF
330 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
331 result = fold_ternary_loc (EXPR_LOCATION (expr),
332 TREE_CODE (expr),
333 TREE_TYPE (expr),
334 TREE_OPERAND (expr, 0),
335 TREE_OPERAND (expr, 1),
336 TREE_OPERAND (expr, 2));
337 else
338 result = fold_const_aggregate_ref (expr);
340 if (result && is_gimple_min_invariant (result))
341 return result;
343 return NULL_TREE;
346 /* Return true if EXPR is an acceptable right-hand-side for a
347 GIMPLE assignment. We validate the entire tree, not just
348 the root node, thus catching expressions that embed complex
349 operands that are not permitted in GIMPLE. This function
350 is needed because the folding routines in fold-const.cc
351 may return such expressions in some cases, e.g., an array
352 access with an embedded index addition. It may make more
353 sense to have folding routines that are sensitive to the
354 constraints on GIMPLE operands, rather than abandoning any
355 any attempt to fold if the usual folding turns out to be too
356 aggressive. */
358 bool
359 valid_gimple_rhs_p (tree expr)
361 enum tree_code code = TREE_CODE (expr);
363 switch (TREE_CODE_CLASS (code))
365 case tcc_declaration:
366 if (!is_gimple_variable (expr))
367 return false;
368 break;
370 case tcc_constant:
371 /* All constants are ok. */
372 break;
374 case tcc_comparison:
375 /* GENERIC allows comparisons with non-boolean types, reject
376 those for GIMPLE. Let vector-typed comparisons pass - rules
377 for GENERIC and GIMPLE are the same here. */
378 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
379 && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
380 || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
381 && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
382 return false;
384 /* Fallthru. */
385 case tcc_binary:
386 if (!is_gimple_val (TREE_OPERAND (expr, 0))
387 || !is_gimple_val (TREE_OPERAND (expr, 1)))
388 return false;
389 break;
391 case tcc_unary:
392 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
393 return false;
394 break;
396 case tcc_expression:
397 switch (code)
399 case ADDR_EXPR:
401 tree t;
402 if (is_gimple_min_invariant (expr))
403 return true;
404 t = TREE_OPERAND (expr, 0);
405 while (handled_component_p (t))
407 /* ??? More checks needed, see the GIMPLE verifier. */
408 if ((TREE_CODE (t) == ARRAY_REF
409 || TREE_CODE (t) == ARRAY_RANGE_REF)
410 && !is_gimple_val (TREE_OPERAND (t, 1)))
411 return false;
412 t = TREE_OPERAND (t, 0);
414 if (!is_gimple_id (t))
415 return false;
417 break;
419 default:
420 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
422 if (!is_gimple_val (TREE_OPERAND (expr, 0))
423 || !is_gimple_val (TREE_OPERAND (expr, 1))
424 || !is_gimple_val (TREE_OPERAND (expr, 2)))
425 return false;
426 break;
428 return false;
430 break;
432 case tcc_vl_exp:
433 return false;
435 case tcc_exceptional:
436 if (code == CONSTRUCTOR)
438 unsigned i;
439 tree elt;
440 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
441 if (!is_gimple_val (elt))
442 return false;
443 return true;
445 if (code != SSA_NAME)
446 return false;
447 break;
449 case tcc_reference:
450 if (code == BIT_FIELD_REF)
451 return is_gimple_val (TREE_OPERAND (expr, 0));
452 return false;
454 default:
455 return false;
458 return true;
462 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
463 replacement rhs for the statement or NULL_TREE if no simplification
464 could be made. It is assumed that the operands have been previously
465 folded. */
467 static tree
468 fold_gimple_assign (gimple_stmt_iterator *si)
470 gimple *stmt = gsi_stmt (*si);
471 enum tree_code subcode = gimple_assign_rhs_code (stmt);
472 location_t loc = gimple_location (stmt);
474 tree result = NULL_TREE;
476 switch (get_gimple_rhs_class (subcode))
478 case GIMPLE_SINGLE_RHS:
480 tree rhs = gimple_assign_rhs1 (stmt);
482 if (TREE_CLOBBER_P (rhs))
483 return NULL_TREE;
485 if (REFERENCE_CLASS_P (rhs))
486 return maybe_fold_reference (rhs);
488 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
490 tree val = OBJ_TYPE_REF_EXPR (rhs);
491 if (is_gimple_min_invariant (val))
492 return val;
493 else if (flag_devirtualize && virtual_method_call_p (rhs))
495 bool final;
496 vec <cgraph_node *>targets
497 = possible_polymorphic_call_targets (rhs, stmt, &final);
498 if (final && targets.length () <= 1 && dbg_cnt (devirt))
500 if (dump_enabled_p ())
502 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
503 "resolving virtual function address "
504 "reference to function %s\n",
505 targets.length () == 1
506 ? targets[0]->name ()
507 : "NULL");
509 if (targets.length () == 1)
511 val = fold_convert (TREE_TYPE (val),
512 build_fold_addr_expr_loc
513 (loc, targets[0]->decl));
514 STRIP_USELESS_TYPE_CONVERSION (val);
516 else
517 /* We cannot use __builtin_unreachable here because it
518 cannot have address taken. */
519 val = build_int_cst (TREE_TYPE (val), 0);
520 return val;
525 else if (TREE_CODE (rhs) == ADDR_EXPR)
527 tree ref = TREE_OPERAND (rhs, 0);
528 if (TREE_CODE (ref) == MEM_REF
529 && integer_zerop (TREE_OPERAND (ref, 1)))
531 result = TREE_OPERAND (ref, 0);
532 if (!useless_type_conversion_p (TREE_TYPE (rhs),
533 TREE_TYPE (result)))
534 result = build1 (NOP_EXPR, TREE_TYPE (rhs), result);
535 return result;
539 else if (TREE_CODE (rhs) == CONSTRUCTOR
540 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
542 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
543 unsigned i;
544 tree val;
546 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
547 if (! CONSTANT_CLASS_P (val))
548 return NULL_TREE;
550 return build_vector_from_ctor (TREE_TYPE (rhs),
551 CONSTRUCTOR_ELTS (rhs));
554 else if (DECL_P (rhs)
555 && is_gimple_reg_type (TREE_TYPE (rhs)))
556 return get_symbol_constant_value (rhs);
558 break;
560 case GIMPLE_UNARY_RHS:
561 break;
563 case GIMPLE_BINARY_RHS:
564 break;
566 case GIMPLE_TERNARY_RHS:
567 result = fold_ternary_loc (loc, subcode,
568 TREE_TYPE (gimple_assign_lhs (stmt)),
569 gimple_assign_rhs1 (stmt),
570 gimple_assign_rhs2 (stmt),
571 gimple_assign_rhs3 (stmt));
573 if (result)
575 STRIP_USELESS_TYPE_CONVERSION (result);
576 if (valid_gimple_rhs_p (result))
577 return result;
579 break;
581 case GIMPLE_INVALID_RHS:
582 gcc_unreachable ();
585 return NULL_TREE;
589 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
590 adjusting the replacement stmts location and virtual operands.
591 If the statement has a lhs the last stmt in the sequence is expected
592 to assign to that lhs. */
594 void
595 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
597 gimple *stmt = gsi_stmt (*si_p);
599 if (gimple_has_location (stmt))
600 annotate_all_with_location (stmts, gimple_location (stmt));
602 /* First iterate over the replacement statements backward, assigning
603 virtual operands to their defining statements. */
604 gimple *laststore = NULL;
605 for (gimple_stmt_iterator i = gsi_last (stmts);
606 !gsi_end_p (i); gsi_prev (&i))
608 gimple *new_stmt = gsi_stmt (i);
609 if ((gimple_assign_single_p (new_stmt)
610 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
611 || (is_gimple_call (new_stmt)
612 && (gimple_call_flags (new_stmt)
613 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
615 tree vdef;
616 if (!laststore)
617 vdef = gimple_vdef (stmt);
618 else
619 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
620 gimple_set_vdef (new_stmt, vdef);
621 if (vdef && TREE_CODE (vdef) == SSA_NAME)
622 SSA_NAME_DEF_STMT (vdef) = new_stmt;
623 laststore = new_stmt;
627 /* Second iterate over the statements forward, assigning virtual
628 operands to their uses. */
629 tree reaching_vuse = gimple_vuse (stmt);
630 for (gimple_stmt_iterator i = gsi_start (stmts);
631 !gsi_end_p (i); gsi_next (&i))
633 gimple *new_stmt = gsi_stmt (i);
634 /* If the new statement possibly has a VUSE, update it with exact SSA
635 name we know will reach this one. */
636 if (gimple_has_mem_ops (new_stmt))
637 gimple_set_vuse (new_stmt, reaching_vuse);
638 gimple_set_modified (new_stmt, true);
639 if (gimple_vdef (new_stmt))
640 reaching_vuse = gimple_vdef (new_stmt);
643 /* If the new sequence does not do a store release the virtual
644 definition of the original statement. */
645 if (reaching_vuse
646 && reaching_vuse == gimple_vuse (stmt))
648 tree vdef = gimple_vdef (stmt);
649 if (vdef
650 && TREE_CODE (vdef) == SSA_NAME)
652 unlink_stmt_vdef (stmt);
653 release_ssa_name (vdef);
657 /* Finally replace the original statement with the sequence. */
658 gsi_replace_with_seq (si_p, stmts, false);
661 /* Helper function for update_gimple_call and
662 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
663 with GIMPLE_CALL NEW_STMT. */
665 static void
666 finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
667 gimple *stmt)
669 tree lhs = gimple_call_lhs (stmt);
670 gimple_call_set_lhs (new_stmt, lhs);
671 if (lhs && TREE_CODE (lhs) == SSA_NAME)
672 SSA_NAME_DEF_STMT (lhs) = new_stmt;
673 gimple_move_vops (new_stmt, stmt);
674 gimple_set_location (new_stmt, gimple_location (stmt));
675 if (gimple_block (new_stmt) == NULL_TREE)
676 gimple_set_block (new_stmt, gimple_block (stmt));
677 gsi_replace (si_p, new_stmt, false);
680 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
681 with number of arguments NARGS, where the arguments in GIMPLE form
682 follow NARGS argument. */
684 bool
685 update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
687 va_list ap;
688 gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
690 gcc_assert (is_gimple_call (stmt));
691 va_start (ap, nargs);
692 new_stmt = gimple_build_call_valist (fn, nargs, ap);
693 finish_update_gimple_call (si_p, new_stmt, stmt);
694 va_end (ap);
695 return true;
698 /* Return true if EXPR is a CALL_EXPR suitable for representation
699 as a single GIMPLE_CALL statement. If the arguments require
700 further gimplification, return false. */
702 static bool
703 valid_gimple_call_p (tree expr)
705 unsigned i, nargs;
707 if (TREE_CODE (expr) != CALL_EXPR)
708 return false;
710 nargs = call_expr_nargs (expr);
711 for (i = 0; i < nargs; i++)
713 tree arg = CALL_EXPR_ARG (expr, i);
714 if (is_gimple_reg_type (TREE_TYPE (arg)))
716 if (!is_gimple_val (arg))
717 return false;
719 else
720 if (!is_gimple_lvalue (arg))
721 return false;
724 return true;
727 /* Convert EXPR into a GIMPLE value suitable for substitution on the
728 RHS of an assignment. Insert the necessary statements before
729 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
730 is replaced. If the call is expected to produces a result, then it
731 is replaced by an assignment of the new RHS to the result variable.
732 If the result is to be ignored, then the call is replaced by a
733 GIMPLE_NOP. A proper VDEF chain is retained by making the first
734 VUSE and the last VDEF of the whole sequence be the same as the replaced
735 statement and using new SSA names for stores in between. */
737 void
738 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
740 tree lhs;
741 gimple *stmt, *new_stmt;
742 gimple_stmt_iterator i;
743 gimple_seq stmts = NULL;
745 stmt = gsi_stmt (*si_p);
747 gcc_assert (is_gimple_call (stmt));
749 if (valid_gimple_call_p (expr))
751 /* The call has simplified to another call. */
752 tree fn = CALL_EXPR_FN (expr);
753 unsigned i;
754 unsigned nargs = call_expr_nargs (expr);
755 vec<tree> args = vNULL;
756 gcall *new_stmt;
758 if (nargs > 0)
760 args.create (nargs);
761 args.safe_grow_cleared (nargs, true);
763 for (i = 0; i < nargs; i++)
764 args[i] = CALL_EXPR_ARG (expr, i);
767 new_stmt = gimple_build_call_vec (fn, args);
768 finish_update_gimple_call (si_p, new_stmt, stmt);
769 args.release ();
770 return;
773 lhs = gimple_call_lhs (stmt);
774 if (lhs == NULL_TREE)
776 push_gimplify_context (gimple_in_ssa_p (cfun));
777 gimplify_and_add (expr, &stmts);
778 pop_gimplify_context (NULL);
780 /* We can end up with folding a memcpy of an empty class assignment
781 which gets optimized away by C++ gimplification. */
782 if (gimple_seq_empty_p (stmts))
784 if (gimple_in_ssa_p (cfun))
786 unlink_stmt_vdef (stmt);
787 release_defs (stmt);
789 gsi_replace (si_p, gimple_build_nop (), false);
790 return;
793 else
795 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
796 new_stmt = gimple_build_assign (lhs, tmp);
797 i = gsi_last (stmts);
798 gsi_insert_after_without_update (&i, new_stmt,
799 GSI_CONTINUE_LINKING);
802 gsi_replace_with_seq_vops (si_p, stmts);
806 /* Replace the call at *GSI with the gimple value VAL. */
808 void
809 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
811 gimple *stmt = gsi_stmt (*gsi);
812 tree lhs = gimple_call_lhs (stmt);
813 gimple *repl;
814 if (lhs)
816 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
817 val = fold_convert (TREE_TYPE (lhs), val);
818 repl = gimple_build_assign (lhs, val);
820 else
821 repl = gimple_build_nop ();
822 tree vdef = gimple_vdef (stmt);
823 if (vdef && TREE_CODE (vdef) == SSA_NAME)
825 unlink_stmt_vdef (stmt);
826 release_ssa_name (vdef);
828 gsi_replace (gsi, repl, false);
831 /* Replace the call at *GSI with the new call REPL and fold that
832 again. */
834 static void
835 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
837 gimple *stmt = gsi_stmt (*gsi);
838 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
839 gimple_set_location (repl, gimple_location (stmt));
840 gimple_move_vops (repl, stmt);
841 gsi_replace (gsi, repl, false);
842 fold_stmt (gsi);
845 /* Return true if VAR is a VAR_DECL or a component thereof. */
847 static bool
848 var_decl_component_p (tree var)
850 tree inner = var;
851 while (handled_component_p (inner))
852 inner = TREE_OPERAND (inner, 0);
853 return (DECL_P (inner)
854 || (TREE_CODE (inner) == MEM_REF
855 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
858 /* Return TRUE if the SIZE argument, representing the size of an
859 object, is in a range of values of which exactly zero is valid. */
861 static bool
862 size_must_be_zero_p (tree size)
864 if (integer_zerop (size))
865 return true;
867 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
868 return false;
870 tree type = TREE_TYPE (size);
871 int prec = TYPE_PRECISION (type);
873 /* Compute the value of SSIZE_MAX, the largest positive value that
874 can be stored in ssize_t, the signed counterpart of size_t. */
875 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
876 wide_int zero = wi::zero (TYPE_PRECISION (type));
877 value_range valid_range (type, zero, ssize_max);
878 value_range vr;
879 get_range_query (cfun)->range_of_expr (vr, size);
881 if (vr.undefined_p ())
882 vr.set_varying (TREE_TYPE (size));
883 vr.intersect (valid_range);
884 return vr.zero_p ();
887 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
888 diagnose (otherwise undefined) overlapping copies without preventing
889 folding. When folded, GCC guarantees that overlapping memcpy has
890 the same semantics as memmove. Call to the library memcpy need not
891 provide the same guarantee. Return false if no simplification can
892 be made. */
894 static bool
895 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
896 tree dest, tree src, enum built_in_function code)
898 gimple *stmt = gsi_stmt (*gsi);
899 tree lhs = gimple_call_lhs (stmt);
900 tree len = gimple_call_arg (stmt, 2);
901 location_t loc = gimple_location (stmt);
903 /* If the LEN parameter is a constant zero or in range where
904 the only valid value is zero, return DEST. */
905 if (size_must_be_zero_p (len))
907 gimple *repl;
908 if (gimple_call_lhs (stmt))
909 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
910 else
911 repl = gimple_build_nop ();
912 tree vdef = gimple_vdef (stmt);
913 if (vdef && TREE_CODE (vdef) == SSA_NAME)
915 unlink_stmt_vdef (stmt);
916 release_ssa_name (vdef);
918 gsi_replace (gsi, repl, false);
919 return true;
922 /* If SRC and DEST are the same (and not volatile), return
923 DEST{,+LEN,+LEN-1}. */
924 if (operand_equal_p (src, dest, 0))
926 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
927 It's safe and may even be emitted by GCC itself (see bug
928 32667). */
929 unlink_stmt_vdef (stmt);
930 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
931 release_ssa_name (gimple_vdef (stmt));
932 if (!lhs)
934 gsi_replace (gsi, gimple_build_nop (), false);
935 return true;
937 goto done;
939 else
941 /* We cannot (easily) change the type of the copy if it is a storage
942 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
943 modify the storage order of objects (see storage_order_barrier_p). */
944 tree srctype
945 = POINTER_TYPE_P (TREE_TYPE (src))
946 ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
947 tree desttype
948 = POINTER_TYPE_P (TREE_TYPE (dest))
949 ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
950 tree destvar, srcvar, srcoff;
951 unsigned int src_align, dest_align;
952 unsigned HOST_WIDE_INT tmp_len;
953 const char *tmp_str;
955 /* Build accesses at offset zero with a ref-all character type. */
956 tree off0
957 = build_int_cst (build_pointer_type_for_mode (char_type_node,
958 ptr_mode, true), 0);
960 /* If we can perform the copy efficiently with first doing all loads
961 and then all stores inline it that way. Currently efficiently
962 means that we can load all the memory into a single integer
963 register which is what MOVE_MAX gives us. */
964 src_align = get_pointer_alignment (src);
965 dest_align = get_pointer_alignment (dest);
966 if (tree_fits_uhwi_p (len)
967 && compare_tree_int (len, MOVE_MAX) <= 0
968 /* FIXME: Don't transform copies from strings with known length.
969 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
970 from being handled, and the case was XFAILed for that reason.
971 Now that it is handled and the XFAIL removed, as soon as other
972 strlenopt tests that rely on it for passing are adjusted, this
973 hack can be removed. */
974 && !c_strlen (src, 1)
975 && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
976 && memchr (tmp_str, 0, tmp_len) == NULL)
977 && !(srctype
978 && AGGREGATE_TYPE_P (srctype)
979 && TYPE_REVERSE_STORAGE_ORDER (srctype))
980 && !(desttype
981 && AGGREGATE_TYPE_P (desttype)
982 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
984 unsigned ilen = tree_to_uhwi (len);
985 if (pow2p_hwi (ilen))
987 /* Detect out-of-bounds accesses without issuing warnings.
988 Avoid folding out-of-bounds copies but to avoid false
989 positives for unreachable code defer warning until after
990 DCE has worked its magic.
991 -Wrestrict is still diagnosed. */
992 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
993 dest, src, len, len,
994 false, false))
995 if (warning != OPT_Wrestrict)
996 return false;
998 scalar_int_mode mode;
999 if (int_mode_for_size (ilen * 8, 0).exists (&mode)
1000 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
1001 /* If the destination pointer is not aligned we must be able
1002 to emit an unaligned store. */
1003 && (dest_align >= GET_MODE_ALIGNMENT (mode)
1004 || !targetm.slow_unaligned_access (mode, dest_align)
1005 || (optab_handler (movmisalign_optab, mode)
1006 != CODE_FOR_nothing)))
1008 tree type = build_nonstandard_integer_type (ilen * 8, 1);
1009 tree srctype = type;
1010 tree desttype = type;
1011 if (src_align < GET_MODE_ALIGNMENT (mode))
1012 srctype = build_aligned_type (type, src_align);
1013 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
1014 tree tem = fold_const_aggregate_ref (srcmem);
1015 if (tem)
1016 srcmem = tem;
1017 else if (src_align < GET_MODE_ALIGNMENT (mode)
1018 && targetm.slow_unaligned_access (mode, src_align)
1019 && (optab_handler (movmisalign_optab, mode)
1020 == CODE_FOR_nothing))
1021 srcmem = NULL_TREE;
1022 if (srcmem)
1024 gimple *new_stmt;
1025 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
1027 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
1028 srcmem
1029 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
1030 new_stmt);
1031 gimple_assign_set_lhs (new_stmt, srcmem);
1032 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1033 gimple_set_location (new_stmt, loc);
1034 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1036 if (dest_align < GET_MODE_ALIGNMENT (mode))
1037 desttype = build_aligned_type (type, dest_align);
1038 new_stmt
1039 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
1040 dest, off0),
1041 srcmem);
1042 gimple_move_vops (new_stmt, stmt);
1043 if (!lhs)
1045 gsi_replace (gsi, new_stmt, false);
1046 return true;
1048 gimple_set_location (new_stmt, loc);
1049 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1050 goto done;
1056 if (code == BUILT_IN_MEMMOVE)
1058 /* Both DEST and SRC must be pointer types.
1059 ??? This is what old code did. Is the testing for pointer types
1060 really mandatory?
1062 If either SRC is readonly or length is 1, we can use memcpy. */
1063 if (!dest_align || !src_align)
1064 return false;
1065 if (readonly_data_expr (src)
1066 || (tree_fits_uhwi_p (len)
1067 && (MIN (src_align, dest_align) / BITS_PER_UNIT
1068 >= tree_to_uhwi (len))))
1070 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1071 if (!fn)
1072 return false;
1073 gimple_call_set_fndecl (stmt, fn);
1074 gimple_call_set_arg (stmt, 0, dest);
1075 gimple_call_set_arg (stmt, 1, src);
1076 fold_stmt (gsi);
1077 return true;
1080 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1081 if (TREE_CODE (src) == ADDR_EXPR
1082 && TREE_CODE (dest) == ADDR_EXPR)
1084 tree src_base, dest_base, fn;
1085 poly_int64 src_offset = 0, dest_offset = 0;
1086 poly_uint64 maxsize;
1088 srcvar = TREE_OPERAND (src, 0);
1089 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
1090 if (src_base == NULL)
1091 src_base = srcvar;
1092 destvar = TREE_OPERAND (dest, 0);
1093 dest_base = get_addr_base_and_unit_offset (destvar,
1094 &dest_offset);
1095 if (dest_base == NULL)
1096 dest_base = destvar;
1097 if (!poly_int_tree_p (len, &maxsize))
1098 maxsize = -1;
1099 if (SSA_VAR_P (src_base)
1100 && SSA_VAR_P (dest_base))
1102 if (operand_equal_p (src_base, dest_base, 0)
1103 && ranges_maybe_overlap_p (src_offset, maxsize,
1104 dest_offset, maxsize))
1105 return false;
1107 else if (TREE_CODE (src_base) == MEM_REF
1108 && TREE_CODE (dest_base) == MEM_REF)
1110 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
1111 TREE_OPERAND (dest_base, 0), 0))
1112 return false;
1113 poly_offset_int full_src_offset
1114 = mem_ref_offset (src_base) + src_offset;
1115 poly_offset_int full_dest_offset
1116 = mem_ref_offset (dest_base) + dest_offset;
1117 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
1118 full_dest_offset, maxsize))
1119 return false;
1121 else
1122 return false;
1124 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1125 if (!fn)
1126 return false;
1127 gimple_call_set_fndecl (stmt, fn);
1128 gimple_call_set_arg (stmt, 0, dest);
1129 gimple_call_set_arg (stmt, 1, src);
1130 fold_stmt (gsi);
1131 return true;
1134 /* If the destination and source do not alias optimize into
1135 memcpy as well. */
1136 if ((is_gimple_min_invariant (dest)
1137 || TREE_CODE (dest) == SSA_NAME)
1138 && (is_gimple_min_invariant (src)
1139 || TREE_CODE (src) == SSA_NAME))
1141 ao_ref destr, srcr;
1142 ao_ref_init_from_ptr_and_size (&destr, dest, len);
1143 ao_ref_init_from_ptr_and_size (&srcr, src, len);
1144 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1146 tree fn;
1147 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1148 if (!fn)
1149 return false;
1150 gimple_call_set_fndecl (stmt, fn);
1151 gimple_call_set_arg (stmt, 0, dest);
1152 gimple_call_set_arg (stmt, 1, src);
1153 fold_stmt (gsi);
1154 return true;
1158 return false;
1161 if (!tree_fits_shwi_p (len))
1162 return false;
1163 if (!srctype
1164 || (AGGREGATE_TYPE_P (srctype)
1165 && TYPE_REVERSE_STORAGE_ORDER (srctype)))
1166 return false;
1167 if (!desttype
1168 || (AGGREGATE_TYPE_P (desttype)
1169 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
1170 return false;
1171 /* In the following try to find a type that is most natural to be
1172 used for the memcpy source and destination and that allows
1173 the most optimization when memcpy is turned into a plain assignment
1174 using that type. In theory we could always use a char[len] type
1175 but that only gains us that the destination and source possibly
1176 no longer will have their address taken. */
1177 if (TREE_CODE (srctype) == ARRAY_TYPE
1178 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1179 srctype = TREE_TYPE (srctype);
1180 if (TREE_CODE (desttype) == ARRAY_TYPE
1181 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1182 desttype = TREE_TYPE (desttype);
1183 if (TREE_ADDRESSABLE (srctype)
1184 || TREE_ADDRESSABLE (desttype))
1185 return false;
1187 /* Make sure we are not copying using a floating-point mode or
1188 a type whose size possibly does not match its precision. */
1189 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1190 || TREE_CODE (desttype) == BOOLEAN_TYPE
1191 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1192 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1193 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1194 || TREE_CODE (srctype) == BOOLEAN_TYPE
1195 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1196 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1197 if (!srctype)
1198 srctype = desttype;
1199 if (!desttype)
1200 desttype = srctype;
1201 if (!srctype)
1202 return false;
1204 src_align = get_pointer_alignment (src);
1205 dest_align = get_pointer_alignment (dest);
1207 /* Choose between src and destination type for the access based
1208 on alignment, whether the access constitutes a register access
1209 and whether it may actually expose a declaration for SSA rewrite
1210 or SRA decomposition. Also try to expose a string constant, we
1211 might be able to concatenate several of them later into a single
1212 string store. */
1213 destvar = NULL_TREE;
1214 srcvar = NULL_TREE;
1215 if (TREE_CODE (dest) == ADDR_EXPR
1216 && var_decl_component_p (TREE_OPERAND (dest, 0))
1217 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1218 && dest_align >= TYPE_ALIGN (desttype)
1219 && (is_gimple_reg_type (desttype)
1220 || src_align >= TYPE_ALIGN (desttype)))
1221 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1222 else if (TREE_CODE (src) == ADDR_EXPR
1223 && var_decl_component_p (TREE_OPERAND (src, 0))
1224 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1225 && src_align >= TYPE_ALIGN (srctype)
1226 && (is_gimple_reg_type (srctype)
1227 || dest_align >= TYPE_ALIGN (srctype)))
1228 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1229 /* FIXME: Don't transform copies from strings with known original length.
1230 As soon as strlenopt tests that rely on it for passing are adjusted,
1231 this hack can be removed. */
1232 else if (gimple_call_alloca_for_var_p (stmt)
1233 && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1234 && integer_zerop (srcoff)
1235 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1236 && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1237 srctype = TREE_TYPE (srcvar);
1238 else
1239 return false;
1241 /* Now that we chose an access type express the other side in
1242 terms of it if the target allows that with respect to alignment
1243 constraints. */
1244 if (srcvar == NULL_TREE)
1246 if (src_align >= TYPE_ALIGN (desttype))
1247 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1248 else
1250 enum machine_mode mode = TYPE_MODE (desttype);
1251 if ((mode == BLKmode && STRICT_ALIGNMENT)
1252 || (targetm.slow_unaligned_access (mode, src_align)
1253 && (optab_handler (movmisalign_optab, mode)
1254 == CODE_FOR_nothing)))
1255 return false;
1256 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1257 src_align);
1258 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1261 else if (destvar == NULL_TREE)
1263 if (dest_align >= TYPE_ALIGN (srctype))
1264 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1265 else
1267 enum machine_mode mode = TYPE_MODE (srctype);
1268 if ((mode == BLKmode && STRICT_ALIGNMENT)
1269 || (targetm.slow_unaligned_access (mode, dest_align)
1270 && (optab_handler (movmisalign_optab, mode)
1271 == CODE_FOR_nothing)))
1272 return false;
1273 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1274 dest_align);
1275 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1279 /* Same as above, detect out-of-bounds accesses without issuing
1280 warnings. Avoid folding out-of-bounds copies but to avoid
1281 false positives for unreachable code defer warning until
1282 after DCE has worked its magic.
1283 -Wrestrict is still diagnosed. */
1284 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1285 dest, src, len, len,
1286 false, false))
1287 if (warning != OPT_Wrestrict)
1288 return false;
1290 gimple *new_stmt;
1291 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1293 tree tem = fold_const_aggregate_ref (srcvar);
1294 if (tem)
1295 srcvar = tem;
1296 if (! is_gimple_min_invariant (srcvar))
1298 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1299 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1300 new_stmt);
1301 gimple_assign_set_lhs (new_stmt, srcvar);
1302 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1303 gimple_set_location (new_stmt, loc);
1304 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1306 new_stmt = gimple_build_assign (destvar, srcvar);
1307 goto set_vop_and_replace;
1310 /* We get an aggregate copy. If the source is a STRING_CST, then
1311 directly use its type to perform the copy. */
1312 if (TREE_CODE (srcvar) == STRING_CST)
1313 desttype = srctype;
1315 /* Or else, use an unsigned char[] type to perform the copy in order
1316 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1317 types or float modes behavior on copying. */
1318 else
1320 desttype = build_array_type_nelts (unsigned_char_type_node,
1321 tree_to_uhwi (len));
1322 srctype = desttype;
1323 if (src_align > TYPE_ALIGN (srctype))
1324 srctype = build_aligned_type (srctype, src_align);
1325 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1328 if (dest_align > TYPE_ALIGN (desttype))
1329 desttype = build_aligned_type (desttype, dest_align);
1330 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1331 new_stmt = gimple_build_assign (destvar, srcvar);
1333 set_vop_and_replace:
1334 gimple_move_vops (new_stmt, stmt);
1335 if (!lhs)
1337 gsi_replace (gsi, new_stmt, false);
1338 return true;
1340 gimple_set_location (new_stmt, loc);
1341 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1344 done:
1345 gimple_seq stmts = NULL;
1346 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1347 len = NULL_TREE;
1348 else if (code == BUILT_IN_MEMPCPY)
1350 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1351 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1352 TREE_TYPE (dest), dest, len);
1354 else
1355 gcc_unreachable ();
1357 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1358 gimple *repl = gimple_build_assign (lhs, dest);
1359 gsi_replace (gsi, repl, false);
1360 return true;
1363 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1364 to built-in memcmp (a, b, len). */
1366 static bool
1367 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1369 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1371 if (!fn)
1372 return false;
1374 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1376 gimple *stmt = gsi_stmt (*gsi);
1377 tree a = gimple_call_arg (stmt, 0);
1378 tree b = gimple_call_arg (stmt, 1);
1379 tree len = gimple_call_arg (stmt, 2);
1381 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1382 replace_call_with_call_and_fold (gsi, repl);
1384 return true;
1387 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1388 to built-in memmove (dest, src, len). */
1390 static bool
1391 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1393 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1395 if (!fn)
1396 return false;
1398 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1399 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1400 len) into memmove (dest, src, len). */
1402 gimple *stmt = gsi_stmt (*gsi);
1403 tree src = gimple_call_arg (stmt, 0);
1404 tree dest = gimple_call_arg (stmt, 1);
1405 tree len = gimple_call_arg (stmt, 2);
1407 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1408 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1409 replace_call_with_call_and_fold (gsi, repl);
1411 return true;
1414 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1415 to built-in memset (dest, 0, len). */
1417 static bool
1418 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1420 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1422 if (!fn)
1423 return false;
1425 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1427 gimple *stmt = gsi_stmt (*gsi);
1428 tree dest = gimple_call_arg (stmt, 0);
1429 tree len = gimple_call_arg (stmt, 1);
1431 gimple_seq seq = NULL;
1432 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1433 gimple_seq_add_stmt_without_update (&seq, repl);
1434 gsi_replace_with_seq_vops (gsi, seq);
1435 fold_stmt (gsi);
1437 return true;
1440 /* Fold function call to builtin memset or bzero at *GSI setting the
1441 memory of size LEN to VAL. Return whether a simplification was made. */
1443 static bool
1444 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1446 gimple *stmt = gsi_stmt (*gsi);
1447 tree etype;
1448 unsigned HOST_WIDE_INT length, cval;
1450 /* If the LEN parameter is zero, return DEST. */
1451 if (integer_zerop (len))
1453 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1454 return true;
1457 if (! tree_fits_uhwi_p (len))
1458 return false;
1460 if (TREE_CODE (c) != INTEGER_CST)
1461 return false;
1463 tree dest = gimple_call_arg (stmt, 0);
1464 tree var = dest;
1465 if (TREE_CODE (var) != ADDR_EXPR)
1466 return false;
1468 var = TREE_OPERAND (var, 0);
1469 if (TREE_THIS_VOLATILE (var))
1470 return false;
1472 etype = TREE_TYPE (var);
1473 if (TREE_CODE (etype) == ARRAY_TYPE)
1474 etype = TREE_TYPE (etype);
1476 if ((!INTEGRAL_TYPE_P (etype)
1477 && !POINTER_TYPE_P (etype))
1478 || TREE_CODE (etype) == BITINT_TYPE)
1479 return NULL_TREE;
1481 if (! var_decl_component_p (var))
1482 return NULL_TREE;
1484 length = tree_to_uhwi (len);
1485 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1486 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1487 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
1488 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1489 return NULL_TREE;
1491 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1492 return NULL_TREE;
1494 if (!type_has_mode_precision_p (etype))
1495 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1496 TYPE_UNSIGNED (etype));
1498 if (integer_zerop (c))
1499 cval = 0;
1500 else
1502 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1503 return NULL_TREE;
1505 cval = TREE_INT_CST_LOW (c);
1506 cval &= 0xff;
1507 cval |= cval << 8;
1508 cval |= cval << 16;
1509 cval |= (cval << 31) << 1;
1512 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1513 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1514 gimple_move_vops (store, stmt);
1515 gimple_set_location (store, gimple_location (stmt));
1516 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1517 if (gimple_call_lhs (stmt))
1519 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1520 gsi_replace (gsi, asgn, false);
1522 else
1524 gimple_stmt_iterator gsi2 = *gsi;
1525 gsi_prev (gsi);
1526 gsi_remove (&gsi2, true);
1529 return true;
1532 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1534 static bool
1535 get_range_strlen_tree (tree arg, bitmap visited, strlen_range_kind rkind,
1536 c_strlen_data *pdata, unsigned eltsize)
1538 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1540 /* The length computed by this invocation of the function. */
1541 tree val = NULL_TREE;
1543 /* True if VAL is an optimistic (tight) bound determined from
1544 the size of the character array in which the string may be
1545 stored. In that case, the computed VAL is used to set
1546 PDATA->MAXBOUND. */
1547 bool tight_bound = false;
1549 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1550 if (TREE_CODE (arg) == ADDR_EXPR
1551 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1553 tree op = TREE_OPERAND (arg, 0);
1554 if (integer_zerop (TREE_OPERAND (op, 1)))
1556 tree aop0 = TREE_OPERAND (op, 0);
1557 if (TREE_CODE (aop0) == INDIRECT_REF
1558 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1559 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1560 pdata, eltsize);
1562 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1563 && rkind == SRK_LENRANGE)
1565 /* Fail if an array is the last member of a struct object
1566 since it could be treated as a (fake) flexible array
1567 member. */
1568 tree idx = TREE_OPERAND (op, 1);
1570 arg = TREE_OPERAND (op, 0);
1571 tree optype = TREE_TYPE (arg);
1572 if (tree dom = TYPE_DOMAIN (optype))
1573 if (tree bound = TYPE_MAX_VALUE (dom))
1574 if (TREE_CODE (bound) == INTEGER_CST
1575 && TREE_CODE (idx) == INTEGER_CST
1576 && tree_int_cst_lt (bound, idx))
1577 return false;
1581 if (rkind == SRK_INT_VALUE)
1583 /* We are computing the maximum value (not string length). */
1584 val = arg;
1585 if (TREE_CODE (val) != INTEGER_CST
1586 || tree_int_cst_sgn (val) < 0)
1587 return false;
1589 else
1591 c_strlen_data lendata = { };
1592 val = c_strlen (arg, 1, &lendata, eltsize);
1594 if (!val && lendata.decl)
1596 /* ARG refers to an unterminated const character array.
1597 DATA.DECL with size DATA.LEN. */
1598 val = lendata.minlen;
1599 pdata->decl = lendata.decl;
1603 /* Set if VAL represents the maximum length based on array size (set
1604 when exact length cannot be determined). */
1605 bool maxbound = false;
1607 if (!val && rkind == SRK_LENRANGE)
1609 if (TREE_CODE (arg) == ADDR_EXPR)
1610 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1611 pdata, eltsize);
1613 if (TREE_CODE (arg) == ARRAY_REF)
1615 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1617 /* Determine the "innermost" array type. */
1618 while (TREE_CODE (optype) == ARRAY_TYPE
1619 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1620 optype = TREE_TYPE (optype);
1622 /* Avoid arrays of pointers. */
1623 tree eltype = TREE_TYPE (optype);
1624 if (TREE_CODE (optype) != ARRAY_TYPE
1625 || !INTEGRAL_TYPE_P (eltype))
1626 return false;
1628 /* Fail when the array bound is unknown or zero. */
1629 val = TYPE_SIZE_UNIT (optype);
1630 if (!val
1631 || TREE_CODE (val) != INTEGER_CST
1632 || integer_zerop (val))
1633 return false;
1635 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1636 integer_one_node);
1638 /* Set the minimum size to zero since the string in
1639 the array could have zero length. */
1640 pdata->minlen = ssize_int (0);
1642 tight_bound = true;
1644 else if (TREE_CODE (arg) == COMPONENT_REF
1645 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1646 == ARRAY_TYPE))
1648 /* Use the type of the member array to determine the upper
1649 bound on the length of the array. This may be overly
1650 optimistic if the array itself isn't NUL-terminated and
1651 the caller relies on the subsequent member to contain
1652 the NUL but that would only be considered valid if
1653 the array were the last member of a struct. */
1655 tree fld = TREE_OPERAND (arg, 1);
1657 tree optype = TREE_TYPE (fld);
1659 /* Determine the "innermost" array type. */
1660 while (TREE_CODE (optype) == ARRAY_TYPE
1661 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1662 optype = TREE_TYPE (optype);
1664 /* Fail when the array bound is unknown or zero. */
1665 val = TYPE_SIZE_UNIT (optype);
1666 if (!val
1667 || TREE_CODE (val) != INTEGER_CST
1668 || integer_zerop (val))
1669 return false;
1670 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1671 integer_one_node);
1673 /* Set the minimum size to zero since the string in
1674 the array could have zero length. */
1675 pdata->minlen = ssize_int (0);
1677 /* The array size determined above is an optimistic bound
1678 on the length. If the array isn't nul-terminated the
1679 length computed by the library function would be greater.
1680 Even though using strlen to cross the subobject boundary
1681 is undefined, avoid drawing conclusions from the member
1682 type about the length here. */
1683 tight_bound = true;
1685 else if (TREE_CODE (arg) == MEM_REF
1686 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1687 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1688 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1690 /* Handle a MEM_REF into a DECL accessing an array of integers,
1691 being conservative about references to extern structures with
1692 flexible array members that can be initialized to arbitrary
1693 numbers of elements as an extension (static structs are okay). */
1694 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1695 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1696 && (decl_binds_to_current_def_p (ref)
1697 || !array_ref_flexible_size_p (arg)))
1699 /* Fail if the offset is out of bounds. Such accesses
1700 should be diagnosed at some point. */
1701 val = DECL_SIZE_UNIT (ref);
1702 if (!val
1703 || TREE_CODE (val) != INTEGER_CST
1704 || integer_zerop (val))
1705 return false;
1707 poly_offset_int psiz = wi::to_offset (val);
1708 poly_offset_int poff = mem_ref_offset (arg);
1709 if (known_le (psiz, poff))
1710 return false;
1712 pdata->minlen = ssize_int (0);
1714 /* Subtract the offset and one for the terminating nul. */
1715 psiz -= poff;
1716 psiz -= 1;
1717 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1718 /* Since VAL reflects the size of a declared object
1719 rather the type of the access it is not a tight bound. */
1722 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
1724 /* Avoid handling pointers to arrays. GCC might misuse
1725 a pointer to an array of one bound to point to an array
1726 object of a greater bound. */
1727 tree argtype = TREE_TYPE (arg);
1728 if (TREE_CODE (argtype) == ARRAY_TYPE)
1730 val = TYPE_SIZE_UNIT (argtype);
1731 if (!val
1732 || TREE_CODE (val) != INTEGER_CST
1733 || integer_zerop (val))
1734 return false;
1735 val = wide_int_to_tree (TREE_TYPE (val),
1736 wi::sub (wi::to_wide (val), 1));
1738 /* Set the minimum size to zero since the string in
1739 the array could have zero length. */
1740 pdata->minlen = ssize_int (0);
1743 maxbound = true;
1746 if (!val)
1747 return false;
1749 /* Adjust the lower bound on the string length as necessary. */
1750 if (!pdata->minlen
1751 || (rkind != SRK_STRLEN
1752 && TREE_CODE (pdata->minlen) == INTEGER_CST
1753 && TREE_CODE (val) == INTEGER_CST
1754 && tree_int_cst_lt (val, pdata->minlen)))
1755 pdata->minlen = val;
1757 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1759 /* Adjust the tighter (more optimistic) string length bound
1760 if necessary and proceed to adjust the more conservative
1761 bound. */
1762 if (TREE_CODE (val) == INTEGER_CST)
1764 if (tree_int_cst_lt (pdata->maxbound, val))
1765 pdata->maxbound = val;
1767 else
1768 pdata->maxbound = val;
1770 else if (pdata->maxbound || maxbound)
1771 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1772 if VAL corresponds to the maximum length determined based
1773 on the type of the object. */
1774 pdata->maxbound = val;
1776 if (tight_bound)
1778 /* VAL computed above represents an optimistically tight bound
1779 on the length of the string based on the referenced object's
1780 or subobject's type. Determine the conservative upper bound
1781 based on the enclosing object's size if possible. */
1782 if (rkind == SRK_LENRANGE)
1784 poly_int64 offset;
1785 tree base = get_addr_base_and_unit_offset (arg, &offset);
1786 if (!base)
1788 /* When the call above fails due to a non-constant offset
1789 assume the offset is zero and use the size of the whole
1790 enclosing object instead. */
1791 base = get_base_address (arg);
1792 offset = 0;
1794 /* If the base object is a pointer no upper bound on the length
1795 can be determined. Otherwise the maximum length is equal to
1796 the size of the enclosing object minus the offset of
1797 the referenced subobject minus 1 (for the terminating nul). */
1798 tree type = TREE_TYPE (base);
1799 if (TREE_CODE (type) == POINTER_TYPE
1800 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1801 || !(val = DECL_SIZE_UNIT (base)))
1802 val = build_all_ones_cst (size_type_node);
1803 else
1805 val = DECL_SIZE_UNIT (base);
1806 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1807 size_int (offset + 1));
1810 else
1811 return false;
1814 if (pdata->maxlen)
1816 /* Adjust the more conservative bound if possible/necessary
1817 and fail otherwise. */
1818 if (rkind != SRK_STRLEN)
1820 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1821 || TREE_CODE (val) != INTEGER_CST)
1822 return false;
1824 if (tree_int_cst_lt (pdata->maxlen, val))
1825 pdata->maxlen = val;
1826 return true;
1828 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1830 /* Fail if the length of this ARG is different from that
1831 previously determined from another ARG. */
1832 return false;
1836 pdata->maxlen = val;
1837 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1840 /* For an ARG referencing one or more strings, try to obtain the range
1841 of their lengths, or the size of the largest array ARG referes to if
1842 the range of lengths cannot be determined, and store all in *PDATA.
1843 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1844 the maximum constant value.
1845 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1846 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1847 length or if we are unable to determine the length, return false.
1848 VISITED is a bitmap of visited variables.
1849 RKIND determines the kind of value or range to obtain (see
1850 strlen_range_kind).
1851 Set PDATA->DECL if ARG refers to an unterminated constant array.
1852 On input, set ELTSIZE to 1 for normal single byte character strings,
1853 and either 2 or 4 for wide characer strings (the size of wchar_t).
1854 Return true if *PDATA was successfully populated and false otherwise. */
1856 static bool
1857 get_range_strlen (tree arg, bitmap visited,
1858 strlen_range_kind rkind,
1859 c_strlen_data *pdata, unsigned eltsize)
1862 if (TREE_CODE (arg) != SSA_NAME)
1863 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1865 /* If ARG is registered for SSA update we cannot look at its defining
1866 statement. */
1867 if (name_registered_for_update_p (arg))
1868 return false;
1870 /* If we were already here, break the infinite cycle. */
1871 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1872 return true;
1874 tree var = arg;
1875 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1877 switch (gimple_code (def_stmt))
1879 case GIMPLE_ASSIGN:
1880 /* The RHS of the statement defining VAR must either have a
1881 constant length or come from another SSA_NAME with a constant
1882 length. */
1883 if (gimple_assign_single_p (def_stmt)
1884 || gimple_assign_unary_nop_p (def_stmt))
1886 tree rhs = gimple_assign_rhs1 (def_stmt);
1887 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1889 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1891 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1892 gimple_assign_rhs3 (def_stmt) };
1894 for (unsigned int i = 0; i < 2; i++)
1895 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1897 if (rkind != SRK_LENRANGE)
1898 return false;
1899 /* Set the upper bound to the maximum to prevent
1900 it from being adjusted in the next iteration but
1901 leave MINLEN and the more conservative MAXBOUND
1902 determined so far alone (or leave them null if
1903 they haven't been set yet). That the MINLEN is
1904 in fact zero can be determined from MAXLEN being
1905 unbounded but the discovered minimum is used for
1906 diagnostics. */
1907 pdata->maxlen = build_all_ones_cst (size_type_node);
1909 return true;
1911 return false;
1913 case GIMPLE_PHI:
1914 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1915 must have a constant length. */
1916 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1918 tree arg = gimple_phi_arg (def_stmt, i)->def;
1920 /* If this PHI has itself as an argument, we cannot
1921 determine the string length of this argument. However,
1922 if we can find a constant string length for the other
1923 PHI args then we can still be sure that this is a
1924 constant string length. So be optimistic and just
1925 continue with the next argument. */
1926 if (arg == gimple_phi_result (def_stmt))
1927 continue;
1929 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1931 if (rkind != SRK_LENRANGE)
1932 return false;
1933 /* Set the upper bound to the maximum to prevent
1934 it from being adjusted in the next iteration but
1935 leave MINLEN and the more conservative MAXBOUND
1936 determined so far alone (or leave them null if
1937 they haven't been set yet). That the MINLEN is
1938 in fact zero can be determined from MAXLEN being
1939 unbounded but the discovered minimum is used for
1940 diagnostics. */
1941 pdata->maxlen = build_all_ones_cst (size_type_node);
1944 return true;
1946 default:
1947 return false;
1951 /* Try to obtain the range of the lengths of the string(s) referenced
1952 by ARG, or the size of the largest array ARG refers to if the range
1953 of lengths cannot be determined, and store all in *PDATA which must
1954 be zero-initialized on input except PDATA->MAXBOUND may be set to
1955 a non-null tree node other than INTEGER_CST to request to have it
1956 set to the length of the longest string in a PHI. ELTSIZE is
1957 the expected size of the string element in bytes: 1 for char and
1958 some power of 2 for wide characters.
1959 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1960 for optimization. Returning false means that a nonzero PDATA->MINLEN
1961 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1962 is -1 (in that case, the actual range is indeterminate, i.e.,
1963 [0, PTRDIFF_MAX - 2]. */
1965 bool
1966 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1968 auto_bitmap visited;
1969 tree maxbound = pdata->maxbound;
1971 if (!get_range_strlen (arg, visited, SRK_LENRANGE, pdata, eltsize))
1973 /* On failure extend the length range to an impossible maximum
1974 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1975 members can stay unchanged regardless. */
1976 pdata->minlen = ssize_int (0);
1977 pdata->maxlen = build_all_ones_cst (size_type_node);
1979 else if (!pdata->minlen)
1980 pdata->minlen = ssize_int (0);
1982 /* If it's unchanged from it initial non-null value, set the conservative
1983 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1984 if (maxbound && pdata->maxbound == maxbound)
1985 pdata->maxbound = build_all_ones_cst (size_type_node);
1987 return !integer_all_onesp (pdata->maxlen);
1990 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1991 For ARG of pointer types, NONSTR indicates if the caller is prepared
1992 to handle unterminated strings. For integer ARG and when RKIND ==
1993 SRK_INT_VALUE, NONSTR must be null.
1995 If an unterminated array is discovered and our caller handles
1996 unterminated arrays, then bubble up the offending DECL and
1997 return the maximum size. Otherwise return NULL. */
1999 static tree
2000 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
2002 /* A non-null NONSTR is meaningless when determining the maximum
2003 value of an integer ARG. */
2004 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
2005 /* ARG must have an integral type when RKIND says so. */
2006 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
2008 auto_bitmap visited;
2010 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2011 is unbounded. */
2012 c_strlen_data lendata = { };
2013 if (!get_range_strlen (arg, visited, rkind, &lendata, /* eltsize = */1))
2014 lendata.maxlen = NULL_TREE;
2015 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
2016 lendata.maxlen = NULL_TREE;
2018 if (nonstr)
2020 /* For callers prepared to handle unterminated arrays set
2021 *NONSTR to point to the declaration of the array and return
2022 the maximum length/size. */
2023 *nonstr = lendata.decl;
2024 return lendata.maxlen;
2027 /* Fail if the constant array isn't nul-terminated. */
2028 return lendata.decl ? NULL_TREE : lendata.maxlen;
2031 /* Return true if LEN is known to be less than or equal to (or if STRICT is
2032 true, strictly less than) the lower bound of SIZE at compile time and false
2033 otherwise. */
2035 static bool
2036 known_lower (gimple *stmt, tree len, tree size, bool strict = false)
2038 if (len == NULL_TREE)
2039 return false;
2041 wide_int size_range[2];
2042 wide_int len_range[2];
2043 if (get_range (len, stmt, len_range) && get_range (size, stmt, size_range))
2045 if (strict)
2046 return wi::ltu_p (len_range[1], size_range[0]);
2047 else
2048 return wi::leu_p (len_range[1], size_range[0]);
2051 return false;
2054 /* Fold function call to builtin strcpy with arguments DEST and SRC.
2055 If LEN is not NULL, it represents the length of the string to be
2056 copied. Return NULL_TREE if no simplification can be made. */
2058 static bool
2059 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
2060 tree dest, tree src)
2062 gimple *stmt = gsi_stmt (*gsi);
2063 location_t loc = gimple_location (stmt);
2064 tree fn;
2066 /* If SRC and DEST are the same (and not volatile), return DEST. */
2067 if (operand_equal_p (src, dest, 0))
2069 /* Issue -Wrestrict unless the pointers are null (those do
2070 not point to objects and so do not indicate an overlap;
2071 such calls could be the result of sanitization and jump
2072 threading). */
2073 if (!integer_zerop (dest) && !warning_suppressed_p (stmt, OPT_Wrestrict))
2075 tree func = gimple_call_fndecl (stmt);
2077 warning_at (loc, OPT_Wrestrict,
2078 "%qD source argument is the same as destination",
2079 func);
2082 replace_call_with_value (gsi, dest);
2083 return true;
2086 if (optimize_function_for_size_p (cfun))
2087 return false;
2089 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2090 if (!fn)
2091 return false;
2093 /* Set to non-null if ARG refers to an unterminated array. */
2094 tree nonstr = NULL;
2095 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
2097 if (nonstr)
2099 /* Avoid folding calls with unterminated arrays. */
2100 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
2101 warn_string_no_nul (loc, stmt, "strcpy", src, nonstr);
2102 suppress_warning (stmt, OPT_Wstringop_overread);
2103 return false;
2106 if (!len)
2107 return false;
2109 len = fold_convert_loc (loc, size_type_node, len);
2110 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
2111 len = force_gimple_operand_gsi (gsi, len, true,
2112 NULL_TREE, true, GSI_SAME_STMT);
2113 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2114 replace_call_with_call_and_fold (gsi, repl);
2115 return true;
2118 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2119 If SLEN is not NULL, it represents the length of the source string.
2120 Return NULL_TREE if no simplification can be made. */
2122 static bool
2123 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
2124 tree dest, tree src, tree len)
2126 gimple *stmt = gsi_stmt (*gsi);
2127 location_t loc = gimple_location (stmt);
2128 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
2130 /* If the LEN parameter is zero, return DEST. */
2131 if (integer_zerop (len))
2133 /* Avoid warning if the destination refers to an array/pointer
2134 decorate with attribute nonstring. */
2135 if (!nonstring)
2137 tree fndecl = gimple_call_fndecl (stmt);
2139 /* Warn about the lack of nul termination: the result is not
2140 a (nul-terminated) string. */
2141 tree slen = get_maxval_strlen (src, SRK_STRLEN);
2142 if (slen && !integer_zerop (slen))
2143 warning_at (loc, OPT_Wstringop_truncation,
2144 "%qD destination unchanged after copying no bytes "
2145 "from a string of length %E",
2146 fndecl, slen);
2147 else
2148 warning_at (loc, OPT_Wstringop_truncation,
2149 "%qD destination unchanged after copying no bytes",
2150 fndecl);
2153 replace_call_with_value (gsi, dest);
2154 return true;
2157 /* We can't compare slen with len as constants below if len is not a
2158 constant. */
2159 if (TREE_CODE (len) != INTEGER_CST)
2160 return false;
2162 /* Now, we must be passed a constant src ptr parameter. */
2163 tree slen = get_maxval_strlen (src, SRK_STRLEN);
2164 if (!slen || TREE_CODE (slen) != INTEGER_CST)
2165 return false;
2167 /* The size of the source string including the terminating nul. */
2168 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
2170 /* We do not support simplification of this case, though we do
2171 support it when expanding trees into RTL. */
2172 /* FIXME: generate a call to __builtin_memset. */
2173 if (tree_int_cst_lt (ssize, len))
2174 return false;
2176 /* Diagnose truncation that leaves the copy unterminated. */
2177 maybe_diag_stxncpy_trunc (*gsi, src, len);
2179 /* OK transform into builtin memcpy. */
2180 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2181 if (!fn)
2182 return false;
2184 len = fold_convert_loc (loc, size_type_node, len);
2185 len = force_gimple_operand_gsi (gsi, len, true,
2186 NULL_TREE, true, GSI_SAME_STMT);
2187 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2188 replace_call_with_call_and_fold (gsi, repl);
2190 return true;
2193 /* Fold function call to builtin strchr or strrchr.
2194 If both arguments are constant, evaluate and fold the result,
2195 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2196 In general strlen is significantly faster than strchr
2197 due to being a simpler operation. */
2198 static bool
2199 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
2201 gimple *stmt = gsi_stmt (*gsi);
2202 tree str = gimple_call_arg (stmt, 0);
2203 tree c = gimple_call_arg (stmt, 1);
2204 location_t loc = gimple_location (stmt);
2205 const char *p;
2206 char ch;
2208 if (!gimple_call_lhs (stmt))
2209 return false;
2211 /* Avoid folding if the first argument is not a nul-terminated array.
2212 Defer warning until later. */
2213 if (!check_nul_terminated_array (NULL_TREE, str))
2214 return false;
2216 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
2218 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
2220 if (p1 == NULL)
2222 replace_call_with_value (gsi, integer_zero_node);
2223 return true;
2226 tree len = build_int_cst (size_type_node, p1 - p);
2227 gimple_seq stmts = NULL;
2228 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2229 POINTER_PLUS_EXPR, str, len);
2230 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2231 gsi_replace_with_seq_vops (gsi, stmts);
2232 return true;
2235 if (!integer_zerop (c))
2236 return false;
2238 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2239 if (is_strrchr && optimize_function_for_size_p (cfun))
2241 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2243 if (strchr_fn)
2245 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2246 replace_call_with_call_and_fold (gsi, repl);
2247 return true;
2250 return false;
2253 tree len;
2254 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2256 if (!strlen_fn)
2257 return false;
2259 /* Create newstr = strlen (str). */
2260 gimple_seq stmts = NULL;
2261 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2262 gimple_set_location (new_stmt, loc);
2263 len = create_tmp_reg_or_ssa_name (size_type_node);
2264 gimple_call_set_lhs (new_stmt, len);
2265 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2267 /* Create (str p+ strlen (str)). */
2268 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2269 POINTER_PLUS_EXPR, str, len);
2270 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2271 gsi_replace_with_seq_vops (gsi, stmts);
2272 /* gsi now points at the assignment to the lhs, get a
2273 stmt iterator to the strlen.
2274 ??? We can't use gsi_for_stmt as that doesn't work when the
2275 CFG isn't built yet. */
2276 gimple_stmt_iterator gsi2 = *gsi;
2277 gsi_prev (&gsi2);
2278 fold_stmt (&gsi2);
2279 return true;
2282 /* Fold function call to builtin strstr.
2283 If both arguments are constant, evaluate and fold the result,
2284 additionally fold strstr (x, "") into x and strstr (x, "c")
2285 into strchr (x, 'c'). */
2286 static bool
2287 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2289 gimple *stmt = gsi_stmt (*gsi);
2290 if (!gimple_call_lhs (stmt))
2291 return false;
2293 tree haystack = gimple_call_arg (stmt, 0);
2294 tree needle = gimple_call_arg (stmt, 1);
2296 /* Avoid folding if either argument is not a nul-terminated array.
2297 Defer warning until later. */
2298 if (!check_nul_terminated_array (NULL_TREE, haystack)
2299 || !check_nul_terminated_array (NULL_TREE, needle))
2300 return false;
2302 const char *q = c_getstr (needle);
2303 if (q == NULL)
2304 return false;
2306 if (const char *p = c_getstr (haystack))
2308 const char *r = strstr (p, q);
2310 if (r == NULL)
2312 replace_call_with_value (gsi, integer_zero_node);
2313 return true;
2316 tree len = build_int_cst (size_type_node, r - p);
2317 gimple_seq stmts = NULL;
2318 gimple *new_stmt
2319 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2320 haystack, len);
2321 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2322 gsi_replace_with_seq_vops (gsi, stmts);
2323 return true;
2326 /* For strstr (x, "") return x. */
2327 if (q[0] == '\0')
2329 replace_call_with_value (gsi, haystack);
2330 return true;
2333 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2334 if (q[1] == '\0')
2336 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2337 if (strchr_fn)
2339 tree c = build_int_cst (integer_type_node, q[0]);
2340 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2341 replace_call_with_call_and_fold (gsi, repl);
2342 return true;
2346 return false;
2349 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2350 to the call.
2352 Return NULL_TREE if no simplification was possible, otherwise return the
2353 simplified form of the call as a tree.
2355 The simplified form may be a constant or other expression which
2356 computes the same value, but in a more efficient manner (including
2357 calls to other builtin functions).
2359 The call may contain arguments which need to be evaluated, but
2360 which are not useful to determine the result of the call. In
2361 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2362 COMPOUND_EXPR will be an argument which must be evaluated.
2363 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2364 COMPOUND_EXPR in the chain will contain the tree for the simplified
2365 form of the builtin function call. */
2367 static bool
2368 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2370 gimple *stmt = gsi_stmt (*gsi);
2371 location_t loc = gimple_location (stmt);
2373 const char *p = c_getstr (src);
2375 /* If the string length is zero, return the dst parameter. */
2376 if (p && *p == '\0')
2378 replace_call_with_value (gsi, dst);
2379 return true;
2382 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2383 return false;
2385 /* See if we can store by pieces into (dst + strlen(dst)). */
2386 tree newdst;
2387 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2388 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2390 if (!strlen_fn || !memcpy_fn)
2391 return false;
2393 /* If the length of the source string isn't computable don't
2394 split strcat into strlen and memcpy. */
2395 tree len = get_maxval_strlen (src, SRK_STRLEN);
2396 if (! len)
2397 return false;
2399 /* Create strlen (dst). */
2400 gimple_seq stmts = NULL, stmts2;
2401 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2402 gimple_set_location (repl, loc);
2403 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2404 gimple_call_set_lhs (repl, newdst);
2405 gimple_seq_add_stmt_without_update (&stmts, repl);
2407 /* Create (dst p+ strlen (dst)). */
2408 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2409 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2410 gimple_seq_add_seq_without_update (&stmts, stmts2);
2412 len = fold_convert_loc (loc, size_type_node, len);
2413 len = size_binop_loc (loc, PLUS_EXPR, len,
2414 build_int_cst (size_type_node, 1));
2415 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2416 gimple_seq_add_seq_without_update (&stmts, stmts2);
2418 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2419 gimple_seq_add_stmt_without_update (&stmts, repl);
2420 if (gimple_call_lhs (stmt))
2422 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2423 gimple_seq_add_stmt_without_update (&stmts, repl);
2424 gsi_replace_with_seq_vops (gsi, stmts);
2425 /* gsi now points at the assignment to the lhs, get a
2426 stmt iterator to the memcpy call.
2427 ??? We can't use gsi_for_stmt as that doesn't work when the
2428 CFG isn't built yet. */
2429 gimple_stmt_iterator gsi2 = *gsi;
2430 gsi_prev (&gsi2);
2431 fold_stmt (&gsi2);
2433 else
2435 gsi_replace_with_seq_vops (gsi, stmts);
2436 fold_stmt (gsi);
2438 return true;
2441 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2442 are the arguments to the call. */
2444 static bool
2445 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2447 gimple *stmt = gsi_stmt (*gsi);
2448 tree dest = gimple_call_arg (stmt, 0);
2449 tree src = gimple_call_arg (stmt, 1);
2450 tree size = gimple_call_arg (stmt, 2);
2451 tree fn;
2452 const char *p;
2455 p = c_getstr (src);
2456 /* If the SRC parameter is "", return DEST. */
2457 if (p && *p == '\0')
2459 replace_call_with_value (gsi, dest);
2460 return true;
2463 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2464 return false;
2466 /* If __builtin_strcat_chk is used, assume strcat is available. */
2467 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2468 if (!fn)
2469 return false;
2471 gimple *repl = gimple_build_call (fn, 2, dest, src);
2472 replace_call_with_call_and_fold (gsi, repl);
2473 return true;
2476 /* Simplify a call to the strncat builtin. */
2478 static bool
2479 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2481 gimple *stmt = gsi_stmt (*gsi);
2482 tree dst = gimple_call_arg (stmt, 0);
2483 tree src = gimple_call_arg (stmt, 1);
2484 tree len = gimple_call_arg (stmt, 2);
2485 tree src_len = c_strlen (src, 1);
2487 /* If the requested length is zero, or the src parameter string
2488 length is zero, return the dst parameter. */
2489 if (integer_zerop (len) || (src_len && integer_zerop (src_len)))
2491 replace_call_with_value (gsi, dst);
2492 return true;
2495 /* Return early if the requested len is less than the string length.
2496 Warnings will be issued elsewhere later. */
2497 if (!src_len || known_lower (stmt, len, src_len, true))
2498 return false;
2500 /* Warn on constant LEN. */
2501 if (TREE_CODE (len) == INTEGER_CST)
2503 bool nowarn = warning_suppressed_p (stmt, OPT_Wstringop_overflow_);
2504 tree dstsize;
2506 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize)
2507 && TREE_CODE (dstsize) == INTEGER_CST)
2509 int cmpdst = tree_int_cst_compare (len, dstsize);
2511 if (cmpdst >= 0)
2513 tree fndecl = gimple_call_fndecl (stmt);
2515 /* Strncat copies (at most) LEN bytes and always appends
2516 the terminating NUL so the specified bound should never
2517 be equal to (or greater than) the size of the destination.
2518 If it is, the copy could overflow. */
2519 location_t loc = gimple_location (stmt);
2520 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2521 cmpdst == 0
2522 ? G_("%qD specified bound %E equals "
2523 "destination size")
2524 : G_("%qD specified bound %E exceeds "
2525 "destination size %E"),
2526 fndecl, len, dstsize);
2527 if (nowarn)
2528 suppress_warning (stmt, OPT_Wstringop_overflow_);
2532 if (!nowarn && TREE_CODE (src_len) == INTEGER_CST
2533 && tree_int_cst_compare (src_len, len) == 0)
2535 tree fndecl = gimple_call_fndecl (stmt);
2536 location_t loc = gimple_location (stmt);
2538 /* To avoid possible overflow the specified bound should also
2539 not be equal to the length of the source, even when the size
2540 of the destination is unknown (it's not an uncommon mistake
2541 to specify as the bound to strncpy the length of the source). */
2542 if (warning_at (loc, OPT_Wstringop_overflow_,
2543 "%qD specified bound %E equals source length",
2544 fndecl, len))
2545 suppress_warning (stmt, OPT_Wstringop_overflow_);
2549 if (!known_lower (stmt, src_len, len))
2550 return false;
2552 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2554 /* If the replacement _DECL isn't initialized, don't do the
2555 transformation. */
2556 if (!fn)
2557 return false;
2559 /* Otherwise, emit a call to strcat. */
2560 gcall *repl = gimple_build_call (fn, 2, dst, src);
2561 replace_call_with_call_and_fold (gsi, repl);
2562 return true;
2565 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2566 LEN, and SIZE. */
2568 static bool
2569 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2571 gimple *stmt = gsi_stmt (*gsi);
2572 tree dest = gimple_call_arg (stmt, 0);
2573 tree src = gimple_call_arg (stmt, 1);
2574 tree len = gimple_call_arg (stmt, 2);
2575 tree size = gimple_call_arg (stmt, 3);
2576 tree fn;
2577 const char *p;
2579 p = c_getstr (src);
2580 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2581 if ((p && *p == '\0')
2582 || integer_zerop (len))
2584 replace_call_with_value (gsi, dest);
2585 return true;
2588 if (! integer_all_onesp (size))
2590 tree src_len = c_strlen (src, 1);
2591 if (known_lower (stmt, src_len, len))
2593 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2594 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2595 if (!fn)
2596 return false;
2598 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2599 replace_call_with_call_and_fold (gsi, repl);
2600 return true;
2602 return false;
2605 /* If __builtin_strncat_chk is used, assume strncat is available. */
2606 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2607 if (!fn)
2608 return false;
2610 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2611 replace_call_with_call_and_fold (gsi, repl);
2612 return true;
2615 /* Build and append gimple statements to STMTS that would load a first
2616 character of a memory location identified by STR. LOC is location
2617 of the statement. */
2619 static tree
2620 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2622 tree var;
2624 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2625 tree cst_uchar_ptr_node
2626 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2627 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2629 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2630 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2631 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2633 gimple_assign_set_lhs (stmt, var);
2634 gimple_seq_add_stmt_without_update (stmts, stmt);
2636 return var;
2639 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2641 static bool
2642 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2644 gimple *stmt = gsi_stmt (*gsi);
2645 tree callee = gimple_call_fndecl (stmt);
2646 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2648 tree type = integer_type_node;
2649 tree str1 = gimple_call_arg (stmt, 0);
2650 tree str2 = gimple_call_arg (stmt, 1);
2651 tree lhs = gimple_call_lhs (stmt);
2653 tree bound_node = NULL_TREE;
2654 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
2656 /* Handle strncmp and strncasecmp functions. */
2657 if (gimple_call_num_args (stmt) == 3)
2659 bound_node = gimple_call_arg (stmt, 2);
2660 if (tree_fits_uhwi_p (bound_node))
2661 bound = tree_to_uhwi (bound_node);
2664 /* If the BOUND parameter is zero, return zero. */
2665 if (bound == 0)
2667 replace_call_with_value (gsi, integer_zero_node);
2668 return true;
2671 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2672 if (operand_equal_p (str1, str2, 0))
2674 replace_call_with_value (gsi, integer_zero_node);
2675 return true;
2678 /* Initially set to the number of characters, including the terminating
2679 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2680 the array Sx is not terminated by a nul.
2681 For nul-terminated strings then adjusted to their length so that
2682 LENx == NULPOSx holds. */
2683 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2684 const char *p1 = getbyterep (str1, &len1);
2685 const char *p2 = getbyterep (str2, &len2);
2687 /* The position of the terminating nul character if one exists, otherwise
2688 a value greater than LENx. */
2689 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2691 if (p1)
2693 size_t n = strnlen (p1, len1);
2694 if (n < len1)
2695 len1 = nulpos1 = n;
2698 if (p2)
2700 size_t n = strnlen (p2, len2);
2701 if (n < len2)
2702 len2 = nulpos2 = n;
2705 /* For known strings, return an immediate value. */
2706 if (p1 && p2)
2708 int r = 0;
2709 bool known_result = false;
2711 switch (fcode)
2713 case BUILT_IN_STRCMP:
2714 case BUILT_IN_STRCMP_EQ:
2715 if (len1 != nulpos1 || len2 != nulpos2)
2716 break;
2718 r = strcmp (p1, p2);
2719 known_result = true;
2720 break;
2722 case BUILT_IN_STRNCMP:
2723 case BUILT_IN_STRNCMP_EQ:
2725 if (bound == HOST_WIDE_INT_M1U)
2726 break;
2728 /* Reduce the bound to be no more than the length
2729 of the shorter of the two strings, or the sizes
2730 of the unterminated arrays. */
2731 unsigned HOST_WIDE_INT n = bound;
2733 if (len1 == nulpos1 && len1 < n)
2734 n = len1 + 1;
2735 if (len2 == nulpos2 && len2 < n)
2736 n = len2 + 1;
2738 if (MIN (nulpos1, nulpos2) + 1 < n)
2739 break;
2741 r = strncmp (p1, p2, n);
2742 known_result = true;
2743 break;
2745 /* Only handleable situation is where the string are equal (result 0),
2746 which is already handled by operand_equal_p case. */
2747 case BUILT_IN_STRCASECMP:
2748 break;
2749 case BUILT_IN_STRNCASECMP:
2751 if (bound == HOST_WIDE_INT_M1U)
2752 break;
2753 r = strncmp (p1, p2, bound);
2754 if (r == 0)
2755 known_result = true;
2756 break;
2758 default:
2759 gcc_unreachable ();
2762 if (known_result)
2764 replace_call_with_value (gsi, build_cmp_result (type, r));
2765 return true;
2769 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
2770 || fcode == BUILT_IN_STRCMP
2771 || fcode == BUILT_IN_STRCMP_EQ
2772 || fcode == BUILT_IN_STRCASECMP;
2774 location_t loc = gimple_location (stmt);
2776 /* If the second arg is "", return *(const unsigned char*)arg1. */
2777 if (p2 && *p2 == '\0' && nonzero_bound)
2779 gimple_seq stmts = NULL;
2780 tree var = gimple_load_first_char (loc, str1, &stmts);
2781 if (lhs)
2783 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2784 gimple_seq_add_stmt_without_update (&stmts, stmt);
2787 gsi_replace_with_seq_vops (gsi, stmts);
2788 return true;
2791 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2792 if (p1 && *p1 == '\0' && nonzero_bound)
2794 gimple_seq stmts = NULL;
2795 tree var = gimple_load_first_char (loc, str2, &stmts);
2797 if (lhs)
2799 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2800 stmt = gimple_build_assign (c, NOP_EXPR, var);
2801 gimple_seq_add_stmt_without_update (&stmts, stmt);
2803 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2804 gimple_seq_add_stmt_without_update (&stmts, stmt);
2807 gsi_replace_with_seq_vops (gsi, stmts);
2808 return true;
2811 /* If BOUND is one, return an expression corresponding to
2812 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2813 if (fcode == BUILT_IN_STRNCMP && bound == 1)
2815 gimple_seq stmts = NULL;
2816 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2817 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2819 if (lhs)
2821 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2822 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2823 gimple_seq_add_stmt_without_update (&stmts, convert1);
2825 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2826 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2827 gimple_seq_add_stmt_without_update (&stmts, convert2);
2829 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2830 gimple_seq_add_stmt_without_update (&stmts, stmt);
2833 gsi_replace_with_seq_vops (gsi, stmts);
2834 return true;
2837 /* If BOUND is greater than the length of one constant string,
2838 and the other argument is also a nul-terminated string, replace
2839 strncmp with strcmp. */
2840 if (fcode == BUILT_IN_STRNCMP
2841 && bound > 0 && bound < HOST_WIDE_INT_M1U
2842 && ((p2 && len2 < bound && len2 == nulpos2)
2843 || (p1 && len1 < bound && len1 == nulpos1)))
2845 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2846 if (!fn)
2847 return false;
2848 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2849 replace_call_with_call_and_fold (gsi, repl);
2850 return true;
2853 return false;
2856 /* Fold a call to the memchr pointed by GSI iterator. */
2858 static bool
2859 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2861 gimple *stmt = gsi_stmt (*gsi);
2862 tree lhs = gimple_call_lhs (stmt);
2863 tree arg1 = gimple_call_arg (stmt, 0);
2864 tree arg2 = gimple_call_arg (stmt, 1);
2865 tree len = gimple_call_arg (stmt, 2);
2867 /* If the LEN parameter is zero, return zero. */
2868 if (integer_zerop (len))
2870 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2871 return true;
2874 char c;
2875 if (TREE_CODE (arg2) != INTEGER_CST
2876 || !tree_fits_uhwi_p (len)
2877 || !target_char_cst_p (arg2, &c))
2878 return false;
2880 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2881 unsigned HOST_WIDE_INT string_length;
2882 const char *p1 = getbyterep (arg1, &string_length);
2884 if (p1)
2886 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2887 if (r == NULL)
2889 tree mem_size, offset_node;
2890 byte_representation (arg1, &offset_node, &mem_size, NULL);
2891 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2892 ? 0 : tree_to_uhwi (offset_node);
2893 /* MEM_SIZE is the size of the array the string literal
2894 is stored in. */
2895 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2896 gcc_checking_assert (string_length <= string_size);
2897 if (length <= string_size)
2899 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2900 return true;
2903 else
2905 unsigned HOST_WIDE_INT offset = r - p1;
2906 gimple_seq stmts = NULL;
2907 if (lhs != NULL_TREE)
2909 tree offset_cst = build_int_cst (sizetype, offset);
2910 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2911 arg1, offset_cst);
2912 gimple_seq_add_stmt_without_update (&stmts, stmt);
2914 else
2915 gimple_seq_add_stmt_without_update (&stmts,
2916 gimple_build_nop ());
2918 gsi_replace_with_seq_vops (gsi, stmts);
2919 return true;
2923 return false;
2926 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2927 to the call. IGNORE is true if the value returned
2928 by the builtin will be ignored. UNLOCKED is true is true if this
2929 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2930 the known length of the string. Return NULL_TREE if no simplification
2931 was possible. */
2933 static bool
2934 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2935 tree arg0, tree arg1,
2936 bool unlocked)
2938 gimple *stmt = gsi_stmt (*gsi);
2940 /* If we're using an unlocked function, assume the other unlocked
2941 functions exist explicitly. */
2942 tree const fn_fputc = (unlocked
2943 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2944 : builtin_decl_implicit (BUILT_IN_FPUTC));
2945 tree const fn_fwrite = (unlocked
2946 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2947 : builtin_decl_implicit (BUILT_IN_FWRITE));
2949 /* If the return value is used, don't do the transformation. */
2950 if (gimple_call_lhs (stmt))
2951 return false;
2953 /* Get the length of the string passed to fputs. If the length
2954 can't be determined, punt. */
2955 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2956 if (!len || TREE_CODE (len) != INTEGER_CST)
2957 return false;
2959 switch (compare_tree_int (len, 1))
2961 case -1: /* length is 0, delete the call entirely . */
2962 replace_call_with_value (gsi, integer_zero_node);
2963 return true;
2965 case 0: /* length is 1, call fputc. */
2967 const char *p = c_getstr (arg0);
2968 if (p != NULL)
2970 if (!fn_fputc)
2971 return false;
2973 gimple *repl
2974 = gimple_build_call (fn_fputc, 2,
2975 build_int_cst (integer_type_node, p[0]),
2976 arg1);
2977 replace_call_with_call_and_fold (gsi, repl);
2978 return true;
2981 /* FALLTHROUGH */
2982 case 1: /* length is greater than 1, call fwrite. */
2984 /* If optimizing for size keep fputs. */
2985 if (optimize_function_for_size_p (cfun))
2986 return false;
2987 /* New argument list transforming fputs(string, stream) to
2988 fwrite(string, 1, len, stream). */
2989 if (!fn_fwrite)
2990 return false;
2992 gimple *repl
2993 = gimple_build_call (fn_fwrite, 4, arg0, size_one_node,
2994 fold_convert (size_type_node, len), arg1);
2995 replace_call_with_call_and_fold (gsi, repl);
2996 return true;
2998 default:
2999 gcc_unreachable ();
3003 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
3004 DEST, SRC, LEN, and SIZE are the arguments to the call.
3005 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
3006 code of the builtin. If MAXLEN is not NULL, it is maximum length
3007 passed as third argument. */
3009 static bool
3010 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
3011 tree dest, tree src, tree len, tree size,
3012 enum built_in_function fcode)
3014 gimple *stmt = gsi_stmt (*gsi);
3015 location_t loc = gimple_location (stmt);
3016 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3017 tree fn;
3019 /* If SRC and DEST are the same (and not volatile), return DEST
3020 (resp. DEST+LEN for __mempcpy_chk). */
3021 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
3023 if (fcode != BUILT_IN_MEMPCPY_CHK)
3025 replace_call_with_value (gsi, dest);
3026 return true;
3028 else
3030 gimple_seq stmts = NULL;
3031 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
3032 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
3033 TREE_TYPE (dest), dest, len);
3034 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3035 replace_call_with_value (gsi, temp);
3036 return true;
3040 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3041 if (! integer_all_onesp (size)
3042 && !known_lower (stmt, len, size)
3043 && !known_lower (stmt, maxlen, size))
3045 /* MAXLEN and LEN both cannot be proved to be less than SIZE, at
3046 least try to optimize (void) __mempcpy_chk () into
3047 (void) __memcpy_chk () */
3048 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
3050 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3051 if (!fn)
3052 return false;
3054 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3055 replace_call_with_call_and_fold (gsi, repl);
3056 return true;
3058 return false;
3061 fn = NULL_TREE;
3062 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3063 mem{cpy,pcpy,move,set} is available. */
3064 switch (fcode)
3066 case BUILT_IN_MEMCPY_CHK:
3067 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
3068 break;
3069 case BUILT_IN_MEMPCPY_CHK:
3070 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
3071 break;
3072 case BUILT_IN_MEMMOVE_CHK:
3073 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
3074 break;
3075 case BUILT_IN_MEMSET_CHK:
3076 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
3077 break;
3078 default:
3079 break;
3082 if (!fn)
3083 return false;
3085 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3086 replace_call_with_call_and_fold (gsi, repl);
3087 return true;
3090 /* Print a message in the dump file recording transformation of FROM to TO. */
3092 static void
3093 dump_transformation (gcall *from, gcall *to)
3095 if (dump_enabled_p ())
3096 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, from, "simplified %T to %T\n",
3097 gimple_call_fn (from), gimple_call_fn (to));
3100 /* Fold a call to the __st[rp]cpy_chk builtin.
3101 DEST, SRC, and SIZE are the arguments to the call.
3102 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3103 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3104 strings passed as second argument. */
3106 static bool
3107 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
3108 tree dest,
3109 tree src, tree size,
3110 enum built_in_function fcode)
3112 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3113 location_t loc = gimple_location (stmt);
3114 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3115 tree len, fn;
3117 /* If SRC and DEST are the same (and not volatile), return DEST. */
3118 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
3120 /* Issue -Wrestrict unless the pointers are null (those do
3121 not point to objects and so do not indicate an overlap;
3122 such calls could be the result of sanitization and jump
3123 threading). */
3124 if (!integer_zerop (dest)
3125 && !warning_suppressed_p (stmt, OPT_Wrestrict))
3127 tree func = gimple_call_fndecl (stmt);
3129 warning_at (loc, OPT_Wrestrict,
3130 "%qD source argument is the same as destination",
3131 func);
3134 replace_call_with_value (gsi, dest);
3135 return true;
3138 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
3139 if (! integer_all_onesp (size))
3141 len = c_strlen (src, 1);
3142 if (!known_lower (stmt, len, size, true)
3143 && !known_lower (stmt, maxlen, size, true))
3145 if (fcode == BUILT_IN_STPCPY_CHK)
3147 if (! ignore)
3148 return false;
3150 /* If return value of __stpcpy_chk is ignored,
3151 optimize into __strcpy_chk. */
3152 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
3153 if (!fn)
3154 return false;
3156 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
3157 replace_call_with_call_and_fold (gsi, repl);
3158 return true;
3161 if (! len || TREE_SIDE_EFFECTS (len))
3162 return false;
3164 /* If c_strlen returned something, but not provably less than size,
3165 transform __strcpy_chk into __memcpy_chk. */
3166 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3167 if (!fn)
3168 return false;
3170 gimple_seq stmts = NULL;
3171 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
3172 len = gimple_convert (&stmts, loc, size_type_node, len);
3173 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
3174 build_int_cst (size_type_node, 1));
3175 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3176 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3177 replace_call_with_call_and_fold (gsi, repl);
3178 return true;
3182 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3183 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK && !ignore
3184 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
3185 if (!fn)
3186 return false;
3188 gcall *repl = gimple_build_call (fn, 2, dest, src);
3189 dump_transformation (stmt, repl);
3190 replace_call_with_call_and_fold (gsi, repl);
3191 return true;
3194 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3195 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3196 length passed as third argument. IGNORE is true if return value can be
3197 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3199 static bool
3200 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
3201 tree dest, tree src,
3202 tree len, tree size,
3203 enum built_in_function fcode)
3205 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3206 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3207 tree fn;
3209 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3210 if (! integer_all_onesp (size)
3211 && !known_lower (stmt, len, size) && !known_lower (stmt, maxlen, size))
3213 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
3215 /* If return value of __stpncpy_chk is ignored,
3216 optimize into __strncpy_chk. */
3217 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3218 if (fn)
3220 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3221 replace_call_with_call_and_fold (gsi, repl);
3222 return true;
3225 return false;
3228 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3229 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK && !ignore
3230 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3231 if (!fn)
3232 return false;
3234 gcall *repl = gimple_build_call (fn, 3, dest, src, len);
3235 dump_transformation (stmt, repl);
3236 replace_call_with_call_and_fold (gsi, repl);
3237 return true;
3240 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3241 Return NULL_TREE if no simplification can be made. */
3243 static bool
3244 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3246 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3247 location_t loc = gimple_location (stmt);
3248 tree dest = gimple_call_arg (stmt, 0);
3249 tree src = gimple_call_arg (stmt, 1);
3250 tree fn, lenp1;
3252 /* If the result is unused, replace stpcpy with strcpy. */
3253 if (gimple_call_lhs (stmt) == NULL_TREE)
3255 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3256 if (!fn)
3257 return false;
3258 gimple_call_set_fndecl (stmt, fn);
3259 fold_stmt (gsi);
3260 return true;
3263 /* Set to non-null if ARG refers to an unterminated array. */
3264 c_strlen_data data = { };
3265 /* The size of the unterminated array if SRC referes to one. */
3266 tree size;
3267 /* True if the size is exact/constant, false if it's the lower bound
3268 of a range. */
3269 bool exact;
3270 tree len = c_strlen (src, 1, &data, 1);
3271 if (!len
3272 || TREE_CODE (len) != INTEGER_CST)
3274 data.decl = unterminated_array (src, &size, &exact);
3275 if (!data.decl)
3276 return false;
3279 if (data.decl)
3281 /* Avoid folding calls with unterminated arrays. */
3282 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
3283 warn_string_no_nul (loc, stmt, "stpcpy", src, data.decl, size,
3284 exact);
3285 suppress_warning (stmt, OPT_Wstringop_overread);
3286 return false;
3289 if (optimize_function_for_size_p (cfun)
3290 /* If length is zero it's small enough. */
3291 && !integer_zerop (len))
3292 return false;
3294 /* If the source has a known length replace stpcpy with memcpy. */
3295 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3296 if (!fn)
3297 return false;
3299 gimple_seq stmts = NULL;
3300 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3301 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3302 tem, build_int_cst (size_type_node, 1));
3303 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3304 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
3305 gimple_move_vops (repl, stmt);
3306 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3307 /* Replace the result with dest + len. */
3308 stmts = NULL;
3309 tem = gimple_convert (&stmts, loc, sizetype, len);
3310 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3311 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3312 POINTER_PLUS_EXPR, dest, tem);
3313 gsi_replace (gsi, ret, false);
3314 /* Finally fold the memcpy call. */
3315 gimple_stmt_iterator gsi2 = *gsi;
3316 gsi_prev (&gsi2);
3317 fold_stmt (&gsi2);
3318 return true;
3321 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3322 NULL_TREE if a normal call should be emitted rather than expanding
3323 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3324 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3325 passed as second argument. */
3327 static bool
3328 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3329 enum built_in_function fcode)
3331 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3332 tree dest, size, len, fn, fmt, flag;
3333 const char *fmt_str;
3335 /* Verify the required arguments in the original call. */
3336 if (gimple_call_num_args (stmt) < 5)
3337 return false;
3339 dest = gimple_call_arg (stmt, 0);
3340 len = gimple_call_arg (stmt, 1);
3341 flag = gimple_call_arg (stmt, 2);
3342 size = gimple_call_arg (stmt, 3);
3343 fmt = gimple_call_arg (stmt, 4);
3345 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3346 if (! integer_all_onesp (size)
3347 && !known_lower (stmt, len, size) && !known_lower (stmt, maxlen, size))
3348 return false;
3350 if (!init_target_chars ())
3351 return false;
3353 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3354 or if format doesn't contain % chars or is "%s". */
3355 if (! integer_zerop (flag))
3357 fmt_str = c_getstr (fmt);
3358 if (fmt_str == NULL)
3359 return false;
3360 if (strchr (fmt_str, target_percent) != NULL
3361 && strcmp (fmt_str, target_percent_s))
3362 return false;
3365 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3366 available. */
3367 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3368 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3369 if (!fn)
3370 return false;
3372 /* Replace the called function and the first 5 argument by 3 retaining
3373 trailing varargs. */
3374 gimple_call_set_fndecl (stmt, fn);
3375 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3376 gimple_call_set_arg (stmt, 0, dest);
3377 gimple_call_set_arg (stmt, 1, len);
3378 gimple_call_set_arg (stmt, 2, fmt);
3379 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3380 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3381 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3382 fold_stmt (gsi);
3383 return true;
3386 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3387 Return NULL_TREE if a normal call should be emitted rather than
3388 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3389 or BUILT_IN_VSPRINTF_CHK. */
3391 static bool
3392 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3393 enum built_in_function fcode)
3395 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3396 tree dest, size, len, fn, fmt, flag;
3397 const char *fmt_str;
3398 unsigned nargs = gimple_call_num_args (stmt);
3400 /* Verify the required arguments in the original call. */
3401 if (nargs < 4)
3402 return false;
3403 dest = gimple_call_arg (stmt, 0);
3404 flag = gimple_call_arg (stmt, 1);
3405 size = gimple_call_arg (stmt, 2);
3406 fmt = gimple_call_arg (stmt, 3);
3408 len = NULL_TREE;
3410 if (!init_target_chars ())
3411 return false;
3413 /* Check whether the format is a literal string constant. */
3414 fmt_str = c_getstr (fmt);
3415 if (fmt_str != NULL)
3417 /* If the format doesn't contain % args or %%, we know the size. */
3418 if (strchr (fmt_str, target_percent) == 0)
3420 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3421 len = build_int_cstu (size_type_node, strlen (fmt_str));
3423 /* If the format is "%s" and first ... argument is a string literal,
3424 we know the size too. */
3425 else if (fcode == BUILT_IN_SPRINTF_CHK
3426 && strcmp (fmt_str, target_percent_s) == 0)
3428 tree arg;
3430 if (nargs == 5)
3432 arg = gimple_call_arg (stmt, 4);
3433 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3434 len = c_strlen (arg, 1);
3439 if (! integer_all_onesp (size) && !known_lower (stmt, len, size, true))
3440 return false;
3442 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3443 or if format doesn't contain % chars or is "%s". */
3444 if (! integer_zerop (flag))
3446 if (fmt_str == NULL)
3447 return false;
3448 if (strchr (fmt_str, target_percent) != NULL
3449 && strcmp (fmt_str, target_percent_s))
3450 return false;
3453 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3454 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3455 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3456 if (!fn)
3457 return false;
3459 /* Replace the called function and the first 4 argument by 2 retaining
3460 trailing varargs. */
3461 gimple_call_set_fndecl (stmt, fn);
3462 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3463 gimple_call_set_arg (stmt, 0, dest);
3464 gimple_call_set_arg (stmt, 1, fmt);
3465 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3466 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3467 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3468 fold_stmt (gsi);
3469 return true;
3472 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3473 ORIG may be null if this is a 2-argument call. We don't attempt to
3474 simplify calls with more than 3 arguments.
3476 Return true if simplification was possible, otherwise false. */
3478 bool
3479 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3481 gimple *stmt = gsi_stmt (*gsi);
3483 /* Verify the required arguments in the original call. We deal with two
3484 types of sprintf() calls: 'sprintf (str, fmt)' and
3485 'sprintf (dest, "%s", orig)'. */
3486 if (gimple_call_num_args (stmt) > 3)
3487 return false;
3489 tree orig = NULL_TREE;
3490 if (gimple_call_num_args (stmt) == 3)
3491 orig = gimple_call_arg (stmt, 2);
3493 /* Check whether the format is a literal string constant. */
3494 tree fmt = gimple_call_arg (stmt, 1);
3495 const char *fmt_str = c_getstr (fmt);
3496 if (fmt_str == NULL)
3497 return false;
3499 tree dest = gimple_call_arg (stmt, 0);
3501 if (!init_target_chars ())
3502 return false;
3504 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3505 if (!fn)
3506 return false;
3508 /* If the format doesn't contain % args or %%, use strcpy. */
3509 if (strchr (fmt_str, target_percent) == NULL)
3511 /* Don't optimize sprintf (buf, "abc", ptr++). */
3512 if (orig)
3513 return false;
3515 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3516 'format' is known to contain no % formats. */
3517 gimple_seq stmts = NULL;
3518 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3520 /* Propagate the NO_WARNING bit to avoid issuing the same
3521 warning more than once. */
3522 copy_warning (repl, stmt);
3524 gimple_seq_add_stmt_without_update (&stmts, repl);
3525 if (tree lhs = gimple_call_lhs (stmt))
3527 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3528 strlen (fmt_str)));
3529 gimple_seq_add_stmt_without_update (&stmts, repl);
3530 gsi_replace_with_seq_vops (gsi, stmts);
3531 /* gsi now points at the assignment to the lhs, get a
3532 stmt iterator to the memcpy call.
3533 ??? We can't use gsi_for_stmt as that doesn't work when the
3534 CFG isn't built yet. */
3535 gimple_stmt_iterator gsi2 = *gsi;
3536 gsi_prev (&gsi2);
3537 fold_stmt (&gsi2);
3539 else
3541 gsi_replace_with_seq_vops (gsi, stmts);
3542 fold_stmt (gsi);
3544 return true;
3547 /* If the format is "%s", use strcpy if the result isn't used. */
3548 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3550 /* Don't crash on sprintf (str1, "%s"). */
3551 if (!orig)
3552 return false;
3554 /* Don't fold calls with source arguments of invalid (nonpointer)
3555 types. */
3556 if (!POINTER_TYPE_P (TREE_TYPE (orig)))
3557 return false;
3559 tree orig_len = NULL_TREE;
3560 if (gimple_call_lhs (stmt))
3562 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3563 if (!orig_len)
3564 return false;
3567 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3568 gimple_seq stmts = NULL;
3569 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3571 /* Propagate the NO_WARNING bit to avoid issuing the same
3572 warning more than once. */
3573 copy_warning (repl, stmt);
3575 gimple_seq_add_stmt_without_update (&stmts, repl);
3576 if (tree lhs = gimple_call_lhs (stmt))
3578 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3579 TREE_TYPE (orig_len)))
3580 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3581 repl = gimple_build_assign (lhs, orig_len);
3582 gimple_seq_add_stmt_without_update (&stmts, repl);
3583 gsi_replace_with_seq_vops (gsi, stmts);
3584 /* gsi now points at the assignment to the lhs, get a
3585 stmt iterator to the memcpy call.
3586 ??? We can't use gsi_for_stmt as that doesn't work when the
3587 CFG isn't built yet. */
3588 gimple_stmt_iterator gsi2 = *gsi;
3589 gsi_prev (&gsi2);
3590 fold_stmt (&gsi2);
3592 else
3594 gsi_replace_with_seq_vops (gsi, stmts);
3595 fold_stmt (gsi);
3597 return true;
3599 return false;
3602 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3603 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3604 attempt to simplify calls with more than 4 arguments.
3606 Return true if simplification was possible, otherwise false. */
3608 bool
3609 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3611 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3612 tree dest = gimple_call_arg (stmt, 0);
3613 tree destsize = gimple_call_arg (stmt, 1);
3614 tree fmt = gimple_call_arg (stmt, 2);
3615 tree orig = NULL_TREE;
3616 const char *fmt_str = NULL;
3618 if (gimple_call_num_args (stmt) > 4)
3619 return false;
3621 if (gimple_call_num_args (stmt) == 4)
3622 orig = gimple_call_arg (stmt, 3);
3624 /* Check whether the format is a literal string constant. */
3625 fmt_str = c_getstr (fmt);
3626 if (fmt_str == NULL)
3627 return false;
3629 if (!init_target_chars ())
3630 return false;
3632 /* If the format doesn't contain % args or %%, use strcpy. */
3633 if (strchr (fmt_str, target_percent) == NULL)
3635 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3636 if (!fn)
3637 return false;
3639 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3640 if (orig)
3641 return false;
3643 tree len = build_int_cstu (TREE_TYPE (destsize), strlen (fmt_str));
3645 /* We could expand this as
3646 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3647 or to
3648 memcpy (str, fmt_with_nul_at_cstm1, cst);
3649 but in the former case that might increase code size
3650 and in the latter case grow .rodata section too much.
3651 So punt for now. */
3652 if (!known_lower (stmt, len, destsize, true))
3653 return false;
3655 gimple_seq stmts = NULL;
3656 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3657 gimple_seq_add_stmt_without_update (&stmts, repl);
3658 if (tree lhs = gimple_call_lhs (stmt))
3660 repl = gimple_build_assign (lhs,
3661 fold_convert (TREE_TYPE (lhs), len));
3662 gimple_seq_add_stmt_without_update (&stmts, repl);
3663 gsi_replace_with_seq_vops (gsi, stmts);
3664 /* gsi now points at the assignment to the lhs, get a
3665 stmt iterator to the memcpy call.
3666 ??? We can't use gsi_for_stmt as that doesn't work when the
3667 CFG isn't built yet. */
3668 gimple_stmt_iterator gsi2 = *gsi;
3669 gsi_prev (&gsi2);
3670 fold_stmt (&gsi2);
3672 else
3674 gsi_replace_with_seq_vops (gsi, stmts);
3675 fold_stmt (gsi);
3677 return true;
3680 /* If the format is "%s", use strcpy if the result isn't used. */
3681 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3683 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3684 if (!fn)
3685 return false;
3687 /* Don't crash on snprintf (str1, cst, "%s"). */
3688 if (!orig)
3689 return false;
3691 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3693 /* We could expand this as
3694 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3695 or to
3696 memcpy (str1, str2_with_nul_at_cstm1, cst);
3697 but in the former case that might increase code size
3698 and in the latter case grow .rodata section too much.
3699 So punt for now. */
3700 if (!known_lower (stmt, orig_len, destsize, true))
3701 return false;
3703 /* Convert snprintf (str1, cst, "%s", str2) into
3704 strcpy (str1, str2) if strlen (str2) < cst. */
3705 gimple_seq stmts = NULL;
3706 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3707 gimple_seq_add_stmt_without_update (&stmts, repl);
3708 if (tree lhs = gimple_call_lhs (stmt))
3710 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3711 TREE_TYPE (orig_len)))
3712 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3713 repl = gimple_build_assign (lhs, orig_len);
3714 gimple_seq_add_stmt_without_update (&stmts, repl);
3715 gsi_replace_with_seq_vops (gsi, stmts);
3716 /* gsi now points at the assignment to the lhs, get a
3717 stmt iterator to the memcpy call.
3718 ??? We can't use gsi_for_stmt as that doesn't work when the
3719 CFG isn't built yet. */
3720 gimple_stmt_iterator gsi2 = *gsi;
3721 gsi_prev (&gsi2);
3722 fold_stmt (&gsi2);
3724 else
3726 gsi_replace_with_seq_vops (gsi, stmts);
3727 fold_stmt (gsi);
3729 return true;
3731 return false;
3734 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3735 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3736 more than 3 arguments, and ARG may be null in the 2-argument case.
3738 Return NULL_TREE if no simplification was possible, otherwise return the
3739 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3740 code of the function to be simplified. */
3742 static bool
3743 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3744 tree fp, tree fmt, tree arg,
3745 enum built_in_function fcode)
3747 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3748 tree fn_fputc, fn_fputs;
3749 const char *fmt_str = NULL;
3751 /* If the return value is used, don't do the transformation. */
3752 if (gimple_call_lhs (stmt) != NULL_TREE)
3753 return false;
3755 /* Check whether the format is a literal string constant. */
3756 fmt_str = c_getstr (fmt);
3757 if (fmt_str == NULL)
3758 return false;
3760 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3762 /* If we're using an unlocked function, assume the other
3763 unlocked functions exist explicitly. */
3764 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3765 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3767 else
3769 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3770 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3773 if (!init_target_chars ())
3774 return false;
3776 /* If the format doesn't contain % args or %%, use strcpy. */
3777 if (strchr (fmt_str, target_percent) == NULL)
3779 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3780 && arg)
3781 return false;
3783 /* If the format specifier was "", fprintf does nothing. */
3784 if (fmt_str[0] == '\0')
3786 replace_call_with_value (gsi, NULL_TREE);
3787 return true;
3790 /* When "string" doesn't contain %, replace all cases of
3791 fprintf (fp, string) with fputs (string, fp). The fputs
3792 builtin will take care of special cases like length == 1. */
3793 if (fn_fputs)
3795 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3796 replace_call_with_call_and_fold (gsi, repl);
3797 return true;
3801 /* The other optimizations can be done only on the non-va_list variants. */
3802 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3803 return false;
3805 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3806 else if (strcmp (fmt_str, target_percent_s) == 0)
3808 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3809 return false;
3810 if (fn_fputs)
3812 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3813 replace_call_with_call_and_fold (gsi, repl);
3814 return true;
3818 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3819 else if (strcmp (fmt_str, target_percent_c) == 0)
3821 if (!arg
3822 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3823 return false;
3824 if (fn_fputc)
3826 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3827 replace_call_with_call_and_fold (gsi, repl);
3828 return true;
3832 return false;
3835 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3836 FMT and ARG are the arguments to the call; we don't fold cases with
3837 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3839 Return NULL_TREE if no simplification was possible, otherwise return the
3840 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3841 code of the function to be simplified. */
3843 static bool
3844 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3845 tree arg, enum built_in_function fcode)
3847 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3848 tree fn_putchar, fn_puts, newarg;
3849 const char *fmt_str = NULL;
3851 /* If the return value is used, don't do the transformation. */
3852 if (gimple_call_lhs (stmt) != NULL_TREE)
3853 return false;
3855 /* Check whether the format is a literal string constant. */
3856 fmt_str = c_getstr (fmt);
3857 if (fmt_str == NULL)
3858 return false;
3860 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3862 /* If we're using an unlocked function, assume the other
3863 unlocked functions exist explicitly. */
3864 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3865 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3867 else
3869 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3870 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3873 if (!init_target_chars ())
3874 return false;
3876 if (strcmp (fmt_str, target_percent_s) == 0
3877 || strchr (fmt_str, target_percent) == NULL)
3879 const char *str;
3881 if (strcmp (fmt_str, target_percent_s) == 0)
3883 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3884 return false;
3886 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3887 return false;
3889 str = c_getstr (arg);
3890 if (str == NULL)
3891 return false;
3893 else
3895 /* The format specifier doesn't contain any '%' characters. */
3896 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3897 && arg)
3898 return false;
3899 str = fmt_str;
3902 /* If the string was "", printf does nothing. */
3903 if (str[0] == '\0')
3905 replace_call_with_value (gsi, NULL_TREE);
3906 return true;
3909 /* If the string has length of 1, call putchar. */
3910 if (str[1] == '\0')
3912 /* Given printf("c"), (where c is any one character,)
3913 convert "c"[0] to an int and pass that to the replacement
3914 function. */
3915 newarg = build_int_cst (integer_type_node, str[0]);
3916 if (fn_putchar)
3918 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3919 replace_call_with_call_and_fold (gsi, repl);
3920 return true;
3923 else
3925 /* If the string was "string\n", call puts("string"). */
3926 size_t len = strlen (str);
3927 if ((unsigned char)str[len - 1] == target_newline
3928 && (size_t) (int) len == len
3929 && (int) len > 0)
3931 char *newstr;
3933 /* Create a NUL-terminated string that's one char shorter
3934 than the original, stripping off the trailing '\n'. */
3935 newstr = xstrdup (str);
3936 newstr[len - 1] = '\0';
3937 newarg = build_string_literal (len, newstr);
3938 free (newstr);
3939 if (fn_puts)
3941 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3942 replace_call_with_call_and_fold (gsi, repl);
3943 return true;
3946 else
3947 /* We'd like to arrange to call fputs(string,stdout) here,
3948 but we need stdout and don't have a way to get it yet. */
3949 return false;
3953 /* The other optimizations can be done only on the non-va_list variants. */
3954 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3955 return false;
3957 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3958 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3960 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3961 return false;
3962 if (fn_puts)
3964 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3965 replace_call_with_call_and_fold (gsi, repl);
3966 return true;
3970 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3971 else if (strcmp (fmt_str, target_percent_c) == 0)
3973 if (!arg || ! useless_type_conversion_p (integer_type_node,
3974 TREE_TYPE (arg)))
3975 return false;
3976 if (fn_putchar)
3978 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3979 replace_call_with_call_and_fold (gsi, repl);
3980 return true;
3984 return false;
3989 /* Fold a call to __builtin_strlen with known length LEN. */
3991 static bool
3992 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3994 gimple *stmt = gsi_stmt (*gsi);
3995 tree arg = gimple_call_arg (stmt, 0);
3997 wide_int minlen;
3998 wide_int maxlen;
4000 c_strlen_data lendata = { };
4001 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
4002 && !lendata.decl
4003 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
4004 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
4006 /* The range of lengths refers to either a single constant
4007 string or to the longest and shortest constant string
4008 referenced by the argument of the strlen() call, or to
4009 the strings that can possibly be stored in the arrays
4010 the argument refers to. */
4011 minlen = wi::to_wide (lendata.minlen);
4012 maxlen = wi::to_wide (lendata.maxlen);
4014 else
4016 unsigned prec = TYPE_PRECISION (sizetype);
4018 minlen = wi::shwi (0, prec);
4019 maxlen = wi::to_wide (max_object_size (), prec) - 2;
4022 /* For -fsanitize=address, don't optimize the upper bound of the
4023 length to be able to diagnose UB on non-zero terminated arrays. */
4024 if (sanitize_flags_p (SANITIZE_ADDRESS))
4025 maxlen = wi::max_value (TYPE_PRECISION (sizetype), UNSIGNED);
4027 if (minlen == maxlen)
4029 /* Fold the strlen call to a constant. */
4030 tree type = TREE_TYPE (lendata.minlen);
4031 tree len = force_gimple_operand_gsi (gsi,
4032 wide_int_to_tree (type, minlen),
4033 true, NULL, true, GSI_SAME_STMT);
4034 replace_call_with_value (gsi, len);
4035 return true;
4038 /* Set the strlen() range to [0, MAXLEN]. */
4039 if (tree lhs = gimple_call_lhs (stmt))
4040 set_strlen_range (lhs, minlen, maxlen);
4042 return false;
4045 /* Fold a call to __builtin_acc_on_device. */
4047 static bool
4048 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
4050 /* Defer folding until we know which compiler we're in. */
4051 if (symtab->state != EXPANSION)
4052 return false;
4054 unsigned val_host = GOMP_DEVICE_HOST;
4055 unsigned val_dev = GOMP_DEVICE_NONE;
4057 #ifdef ACCEL_COMPILER
4058 val_host = GOMP_DEVICE_NOT_HOST;
4059 val_dev = ACCEL_COMPILER_acc_device;
4060 #endif
4062 location_t loc = gimple_location (gsi_stmt (*gsi));
4064 tree host_eq = make_ssa_name (boolean_type_node);
4065 gimple *host_ass = gimple_build_assign
4066 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
4067 gimple_set_location (host_ass, loc);
4068 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
4070 tree dev_eq = make_ssa_name (boolean_type_node);
4071 gimple *dev_ass = gimple_build_assign
4072 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
4073 gimple_set_location (dev_ass, loc);
4074 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
4076 tree result = make_ssa_name (boolean_type_node);
4077 gimple *result_ass = gimple_build_assign
4078 (result, BIT_IOR_EXPR, host_eq, dev_eq);
4079 gimple_set_location (result_ass, loc);
4080 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
4082 replace_call_with_value (gsi, result);
4084 return true;
4087 /* Fold realloc (0, n) -> malloc (n). */
4089 static bool
4090 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
4092 gimple *stmt = gsi_stmt (*gsi);
4093 tree arg = gimple_call_arg (stmt, 0);
4094 tree size = gimple_call_arg (stmt, 1);
4096 if (operand_equal_p (arg, null_pointer_node, 0))
4098 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
4099 if (fn_malloc)
4101 gcall *repl = gimple_build_call (fn_malloc, 1, size);
4102 replace_call_with_call_and_fold (gsi, repl);
4103 return true;
4106 return false;
4109 /* Number of bytes into which any type but aggregate, vector or
4110 _BitInt types should fit. */
4111 static constexpr size_t clear_padding_unit
4112 = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
4113 /* Buffer size on which __builtin_clear_padding folding code works. */
4114 static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
4116 /* Data passed through __builtin_clear_padding folding. */
4117 struct clear_padding_struct {
4118 location_t loc;
4119 /* 0 during __builtin_clear_padding folding, nonzero during
4120 clear_type_padding_in_mask. In that case, instead of clearing the
4121 non-padding bits in union_ptr array clear the padding bits in there. */
4122 bool clear_in_mask;
4123 tree base;
4124 tree alias_type;
4125 gimple_stmt_iterator *gsi;
4126 /* Alignment of buf->base + 0. */
4127 unsigned align;
4128 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4129 HOST_WIDE_INT off;
4130 /* Number of padding bytes before buf->off that don't have padding clear
4131 code emitted yet. */
4132 HOST_WIDE_INT padding_bytes;
4133 /* The size of the whole object. Never emit code to touch
4134 buf->base + buf->sz or following bytes. */
4135 HOST_WIDE_INT sz;
4136 /* Number of bytes recorded in buf->buf. */
4137 size_t size;
4138 /* When inside union, instead of emitting code we and bits inside of
4139 the union_ptr array. */
4140 unsigned char *union_ptr;
4141 /* Set bits mean padding bits that need to be cleared by the builtin. */
4142 unsigned char buf[clear_padding_buf_size + clear_padding_unit];
4145 /* Emit code to clear padding requested in BUF->buf - set bits
4146 in there stand for padding that should be cleared. FULL is true
4147 if everything from the buffer should be flushed, otherwise
4148 it can leave up to 2 * clear_padding_unit bytes for further
4149 processing. */
4151 static void
4152 clear_padding_flush (clear_padding_struct *buf, bool full)
4154 gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
4155 if (!full && buf->size < 2 * clear_padding_unit)
4156 return;
4157 gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
4158 size_t end = buf->size;
4159 if (!full)
4160 end = ((end - clear_padding_unit - 1) / clear_padding_unit
4161 * clear_padding_unit);
4162 size_t padding_bytes = buf->padding_bytes;
4163 if (buf->union_ptr)
4165 if (buf->clear_in_mask)
4167 /* During clear_type_padding_in_mask, clear the padding
4168 bits set in buf->buf in the buf->union_ptr mask. */
4169 for (size_t i = 0; i < end; i++)
4171 if (buf->buf[i] == (unsigned char) ~0)
4172 padding_bytes++;
4173 else
4175 memset (&buf->union_ptr[buf->off + i - padding_bytes],
4176 0, padding_bytes);
4177 padding_bytes = 0;
4178 buf->union_ptr[buf->off + i] &= ~buf->buf[i];
4181 if (full)
4183 memset (&buf->union_ptr[buf->off + end - padding_bytes],
4184 0, padding_bytes);
4185 buf->off = 0;
4186 buf->size = 0;
4187 buf->padding_bytes = 0;
4189 else
4191 memmove (buf->buf, buf->buf + end, buf->size - end);
4192 buf->off += end;
4193 buf->size -= end;
4194 buf->padding_bytes = padding_bytes;
4196 return;
4198 /* Inside of a union, instead of emitting any code, instead
4199 clear all bits in the union_ptr buffer that are clear
4200 in buf. Whole padding bytes don't clear anything. */
4201 for (size_t i = 0; i < end; i++)
4203 if (buf->buf[i] == (unsigned char) ~0)
4204 padding_bytes++;
4205 else
4207 padding_bytes = 0;
4208 buf->union_ptr[buf->off + i] &= buf->buf[i];
4211 if (full)
4213 buf->off = 0;
4214 buf->size = 0;
4215 buf->padding_bytes = 0;
4217 else
4219 memmove (buf->buf, buf->buf + end, buf->size - end);
4220 buf->off += end;
4221 buf->size -= end;
4222 buf->padding_bytes = padding_bytes;
4224 return;
4226 size_t wordsize = UNITS_PER_WORD;
4227 for (size_t i = 0; i < end; i += wordsize)
4229 size_t nonzero_first = wordsize;
4230 size_t nonzero_last = 0;
4231 size_t zero_first = wordsize;
4232 size_t zero_last = 0;
4233 bool all_ones = true, bytes_only = true;
4234 if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4235 > (unsigned HOST_WIDE_INT) buf->sz)
4237 gcc_assert (wordsize > 1);
4238 wordsize /= 2;
4239 i -= wordsize;
4240 continue;
4242 for (size_t j = i; j < i + wordsize && j < end; j++)
4244 if (buf->buf[j])
4246 if (nonzero_first == wordsize)
4248 nonzero_first = j - i;
4249 nonzero_last = j - i;
4251 if (nonzero_last != j - i)
4252 all_ones = false;
4253 nonzero_last = j + 1 - i;
4255 else
4257 if (zero_first == wordsize)
4258 zero_first = j - i;
4259 zero_last = j + 1 - i;
4261 if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4263 all_ones = false;
4264 bytes_only = false;
4267 size_t padding_end = i;
4268 if (padding_bytes)
4270 if (nonzero_first == 0
4271 && nonzero_last == wordsize
4272 && all_ones)
4274 /* All bits are padding and we had some padding
4275 before too. Just extend it. */
4276 padding_bytes += wordsize;
4277 continue;
4279 if (all_ones && nonzero_first == 0)
4281 padding_bytes += nonzero_last;
4282 padding_end += nonzero_last;
4283 nonzero_first = wordsize;
4284 nonzero_last = 0;
4286 else if (bytes_only && nonzero_first == 0)
4288 gcc_assert (zero_first && zero_first != wordsize);
4289 padding_bytes += zero_first;
4290 padding_end += zero_first;
4292 tree atype, src;
4293 if (padding_bytes == 1)
4295 atype = char_type_node;
4296 src = build_zero_cst (char_type_node);
4298 else
4300 atype = build_array_type_nelts (char_type_node, padding_bytes);
4301 src = build_constructor (atype, NULL);
4303 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4304 build_int_cst (buf->alias_type,
4305 buf->off + padding_end
4306 - padding_bytes));
4307 gimple *g = gimple_build_assign (dst, src);
4308 gimple_set_location (g, buf->loc);
4309 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4310 padding_bytes = 0;
4311 buf->padding_bytes = 0;
4313 if (nonzero_first == wordsize)
4314 /* All bits in a word are 0, there are no padding bits. */
4315 continue;
4316 if (all_ones && nonzero_last == wordsize)
4318 /* All bits between nonzero_first and end of word are padding
4319 bits, start counting padding_bytes. */
4320 padding_bytes = nonzero_last - nonzero_first;
4321 continue;
4323 if (bytes_only)
4325 /* If bitfields aren't involved in this word, prefer storing
4326 individual bytes or groups of them over performing a RMW
4327 operation on the whole word. */
4328 gcc_assert (i + zero_last <= end);
4329 for (size_t j = padding_end; j < i + zero_last; j++)
4331 if (buf->buf[j])
4333 size_t k;
4334 for (k = j; k < i + zero_last; k++)
4335 if (buf->buf[k] == 0)
4336 break;
4337 HOST_WIDE_INT off = buf->off + j;
4338 tree atype, src;
4339 if (k - j == 1)
4341 atype = char_type_node;
4342 src = build_zero_cst (char_type_node);
4344 else
4346 atype = build_array_type_nelts (char_type_node, k - j);
4347 src = build_constructor (atype, NULL);
4349 tree dst = build2_loc (buf->loc, MEM_REF, atype,
4350 buf->base,
4351 build_int_cst (buf->alias_type, off));
4352 gimple *g = gimple_build_assign (dst, src);
4353 gimple_set_location (g, buf->loc);
4354 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4355 j = k;
4358 if (nonzero_last == wordsize)
4359 padding_bytes = nonzero_last - zero_last;
4360 continue;
4362 for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4364 if (nonzero_last - nonzero_first <= eltsz
4365 && ((nonzero_first & ~(eltsz - 1))
4366 == ((nonzero_last - 1) & ~(eltsz - 1))))
4368 tree type;
4369 if (eltsz == 1)
4370 type = char_type_node;
4371 else
4372 type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4374 size_t start = nonzero_first & ~(eltsz - 1);
4375 HOST_WIDE_INT off = buf->off + i + start;
4376 tree atype = type;
4377 if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4378 atype = build_aligned_type (type, buf->align);
4379 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4380 build_int_cst (buf->alias_type, off));
4381 tree src;
4382 gimple *g;
4383 if (all_ones
4384 && nonzero_first == start
4385 && nonzero_last == start + eltsz)
4386 src = build_zero_cst (type);
4387 else
4389 src = make_ssa_name (type);
4390 tree tmp_dst = unshare_expr (dst);
4391 /* The folding introduces a read from the tmp_dst, we should
4392 prevent uninitialized warning analysis from issuing warning
4393 for such fake read. In order to suppress warning only for
4394 this expr, we should set the location of tmp_dst to
4395 UNKNOWN_LOCATION first, then suppress_warning will call
4396 set_no_warning_bit to set the no_warning flag only for
4397 tmp_dst. */
4398 SET_EXPR_LOCATION (tmp_dst, UNKNOWN_LOCATION);
4399 suppress_warning (tmp_dst, OPT_Wuninitialized);
4400 g = gimple_build_assign (src, tmp_dst);
4401 gimple_set_location (g, buf->loc);
4402 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4403 tree mask = native_interpret_expr (type,
4404 buf->buf + i + start,
4405 eltsz);
4406 gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4407 mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4408 tree src_masked = make_ssa_name (type);
4409 g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4410 src, mask);
4411 gimple_set_location (g, buf->loc);
4412 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4413 src = src_masked;
4415 g = gimple_build_assign (dst, src);
4416 gimple_set_location (g, buf->loc);
4417 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4418 break;
4422 if (full)
4424 if (padding_bytes)
4426 tree atype, src;
4427 if (padding_bytes == 1)
4429 atype = char_type_node;
4430 src = build_zero_cst (char_type_node);
4432 else
4434 atype = build_array_type_nelts (char_type_node, padding_bytes);
4435 src = build_constructor (atype, NULL);
4437 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4438 build_int_cst (buf->alias_type,
4439 buf->off + end
4440 - padding_bytes));
4441 gimple *g = gimple_build_assign (dst, src);
4442 gimple_set_location (g, buf->loc);
4443 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4445 size_t end_rem = end % UNITS_PER_WORD;
4446 buf->off += end - end_rem;
4447 buf->size = end_rem;
4448 memset (buf->buf, 0, buf->size);
4449 buf->padding_bytes = 0;
4451 else
4453 memmove (buf->buf, buf->buf + end, buf->size - end);
4454 buf->off += end;
4455 buf->size -= end;
4456 buf->padding_bytes = padding_bytes;
4460 /* Append PADDING_BYTES padding bytes. */
4462 static void
4463 clear_padding_add_padding (clear_padding_struct *buf,
4464 HOST_WIDE_INT padding_bytes)
4466 if (padding_bytes == 0)
4467 return;
4468 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4469 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4470 clear_padding_flush (buf, false);
4471 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4472 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4474 memset (buf->buf + buf->size, ~0, clear_padding_buf_size - buf->size);
4475 padding_bytes -= clear_padding_buf_size - buf->size;
4476 buf->size = clear_padding_buf_size;
4477 clear_padding_flush (buf, false);
4478 gcc_assert (buf->padding_bytes);
4479 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4480 is guaranteed to be all ones. */
4481 padding_bytes += buf->size;
4482 buf->size = padding_bytes % UNITS_PER_WORD;
4483 memset (buf->buf, ~0, buf->size);
4484 buf->off += padding_bytes - buf->size;
4485 buf->padding_bytes += padding_bytes - buf->size;
4487 else
4489 memset (buf->buf + buf->size, ~0, padding_bytes);
4490 buf->size += padding_bytes;
4494 static void clear_padding_type (clear_padding_struct *, tree,
4495 HOST_WIDE_INT, bool);
4497 /* Clear padding bits of union type TYPE. */
4499 static void
4500 clear_padding_union (clear_padding_struct *buf, tree type,
4501 HOST_WIDE_INT sz, bool for_auto_init)
4503 clear_padding_struct *union_buf;
4504 HOST_WIDE_INT start_off = 0, next_off = 0;
4505 size_t start_size = 0;
4506 if (buf->union_ptr)
4508 start_off = buf->off + buf->size;
4509 next_off = start_off + sz;
4510 start_size = start_off % UNITS_PER_WORD;
4511 start_off -= start_size;
4512 clear_padding_flush (buf, true);
4513 union_buf = buf;
4515 else
4517 if (sz + buf->size > clear_padding_buf_size)
4518 clear_padding_flush (buf, false);
4519 union_buf = XALLOCA (clear_padding_struct);
4520 union_buf->loc = buf->loc;
4521 union_buf->clear_in_mask = buf->clear_in_mask;
4522 union_buf->base = NULL_TREE;
4523 union_buf->alias_type = NULL_TREE;
4524 union_buf->gsi = NULL;
4525 union_buf->align = 0;
4526 union_buf->off = 0;
4527 union_buf->padding_bytes = 0;
4528 union_buf->sz = sz;
4529 union_buf->size = 0;
4530 if (sz + buf->size <= clear_padding_buf_size)
4531 union_buf->union_ptr = buf->buf + buf->size;
4532 else
4533 union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4534 memset (union_buf->union_ptr, ~0, sz);
4537 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4538 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4540 if (DECL_SIZE_UNIT (field) == NULL_TREE)
4542 if (TREE_TYPE (field) == error_mark_node)
4543 continue;
4544 gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4545 && !COMPLETE_TYPE_P (TREE_TYPE (field)));
4546 if (!buf->clear_in_mask && !for_auto_init)
4547 error_at (buf->loc, "flexible array member %qD does not have "
4548 "well defined padding bits for %qs",
4549 field, "__builtin_clear_padding");
4550 continue;
4552 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4553 gcc_assert (union_buf->size == 0);
4554 union_buf->off = start_off;
4555 union_buf->size = start_size;
4556 memset (union_buf->buf, ~0, start_size);
4557 clear_padding_type (union_buf, TREE_TYPE (field), fldsz, for_auto_init);
4558 clear_padding_add_padding (union_buf, sz - fldsz);
4559 clear_padding_flush (union_buf, true);
4562 if (buf == union_buf)
4564 buf->off = next_off;
4565 buf->size = next_off % UNITS_PER_WORD;
4566 buf->off -= buf->size;
4567 memset (buf->buf, ~0, buf->size);
4569 else if (sz + buf->size <= clear_padding_buf_size)
4570 buf->size += sz;
4571 else
4573 unsigned char *union_ptr = union_buf->union_ptr;
4574 while (sz)
4576 clear_padding_flush (buf, false);
4577 HOST_WIDE_INT this_sz
4578 = MIN ((unsigned HOST_WIDE_INT) sz,
4579 clear_padding_buf_size - buf->size);
4580 memcpy (buf->buf + buf->size, union_ptr, this_sz);
4581 buf->size += this_sz;
4582 union_ptr += this_sz;
4583 sz -= this_sz;
4585 XDELETE (union_buf->union_ptr);
4589 /* The only known floating point formats with padding bits are the
4590 IEEE extended ones. */
4592 static bool
4593 clear_padding_real_needs_padding_p (tree type)
4595 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4596 return (fmt->b == 2
4597 && fmt->signbit_ro == fmt->signbit_rw
4598 && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4601 /* _BitInt has padding bits if it isn't extended in the ABI and has smaller
4602 precision than bits in limb or corresponding number of limbs. */
4604 static bool
4605 clear_padding_bitint_needs_padding_p (tree type)
4607 struct bitint_info info;
4608 bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
4609 gcc_assert (ok);
4610 if (info.extended)
4611 return false;
4612 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.abi_limb_mode);
4613 if (TYPE_PRECISION (type) < GET_MODE_PRECISION (limb_mode))
4614 return true;
4615 else if (TYPE_PRECISION (type) == GET_MODE_PRECISION (limb_mode))
4616 return false;
4617 else
4618 return (((unsigned) TYPE_PRECISION (type))
4619 % GET_MODE_PRECISION (limb_mode)) != 0;
4622 /* Return true if TYPE might contain any padding bits. */
4624 bool
4625 clear_padding_type_may_have_padding_p (tree type)
4627 switch (TREE_CODE (type))
4629 case RECORD_TYPE:
4630 case UNION_TYPE:
4631 return true;
4632 case ARRAY_TYPE:
4633 case COMPLEX_TYPE:
4634 case VECTOR_TYPE:
4635 return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4636 case REAL_TYPE:
4637 return clear_padding_real_needs_padding_p (type);
4638 case BITINT_TYPE:
4639 return clear_padding_bitint_needs_padding_p (type);
4640 default:
4641 return false;
4645 /* Emit a runtime loop:
4646 for (; buf.base != end; buf.base += sz)
4647 __builtin_clear_padding (buf.base); */
4649 static void
4650 clear_padding_emit_loop (clear_padding_struct *buf, tree type,
4651 tree end, bool for_auto_init)
4653 tree l1 = create_artificial_label (buf->loc);
4654 tree l2 = create_artificial_label (buf->loc);
4655 tree l3 = create_artificial_label (buf->loc);
4656 gimple *g = gimple_build_goto (l2);
4657 gimple_set_location (g, buf->loc);
4658 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4659 g = gimple_build_label (l1);
4660 gimple_set_location (g, buf->loc);
4661 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4662 clear_padding_type (buf, type, buf->sz, for_auto_init);
4663 clear_padding_flush (buf, true);
4664 g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4665 size_int (buf->sz));
4666 gimple_set_location (g, buf->loc);
4667 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4668 g = gimple_build_label (l2);
4669 gimple_set_location (g, buf->loc);
4670 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4671 g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4672 gimple_set_location (g, buf->loc);
4673 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4674 g = gimple_build_label (l3);
4675 gimple_set_location (g, buf->loc);
4676 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4679 /* Clear padding bits for TYPE. Called recursively from
4680 gimple_fold_builtin_clear_padding. If FOR_AUTO_INIT is true,
4681 the __builtin_clear_padding is not called by the end user,
4682 instead, it's inserted by the compiler to initialize the
4683 paddings of automatic variable. Therefore, we should not
4684 emit the error messages for flexible array members to confuse
4685 the end user. */
4687 static void
4688 clear_padding_type (clear_padding_struct *buf, tree type,
4689 HOST_WIDE_INT sz, bool for_auto_init)
4691 switch (TREE_CODE (type))
4693 case RECORD_TYPE:
4694 HOST_WIDE_INT cur_pos;
4695 cur_pos = 0;
4696 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4697 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4699 tree ftype = TREE_TYPE (field);
4700 if (DECL_BIT_FIELD (field))
4702 HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
4703 if (fldsz == 0)
4704 continue;
4705 HOST_WIDE_INT pos = int_byte_position (field);
4706 if (pos >= sz)
4707 continue;
4708 HOST_WIDE_INT bpos
4709 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4710 bpos %= BITS_PER_UNIT;
4711 HOST_WIDE_INT end
4712 = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4713 if (pos + end > cur_pos)
4715 clear_padding_add_padding (buf, pos + end - cur_pos);
4716 cur_pos = pos + end;
4718 gcc_assert (cur_pos > pos
4719 && ((unsigned HOST_WIDE_INT) buf->size
4720 >= (unsigned HOST_WIDE_INT) cur_pos - pos));
4721 unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4722 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4723 sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4724 " in %qs", "__builtin_clear_padding");
4725 else if (BYTES_BIG_ENDIAN)
4727 /* Big endian. */
4728 if (bpos + fldsz <= BITS_PER_UNIT)
4729 *p &= ~(((1 << fldsz) - 1)
4730 << (BITS_PER_UNIT - bpos - fldsz));
4731 else
4733 if (bpos)
4735 *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4736 p++;
4737 fldsz -= BITS_PER_UNIT - bpos;
4739 memset (p, 0, fldsz / BITS_PER_UNIT);
4740 p += fldsz / BITS_PER_UNIT;
4741 fldsz %= BITS_PER_UNIT;
4742 if (fldsz)
4743 *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4746 else
4748 /* Little endian. */
4749 if (bpos + fldsz <= BITS_PER_UNIT)
4750 *p &= ~(((1 << fldsz) - 1) << bpos);
4751 else
4753 if (bpos)
4755 *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4756 p++;
4757 fldsz -= BITS_PER_UNIT - bpos;
4759 memset (p, 0, fldsz / BITS_PER_UNIT);
4760 p += fldsz / BITS_PER_UNIT;
4761 fldsz %= BITS_PER_UNIT;
4762 if (fldsz)
4763 *p &= ~((1 << fldsz) - 1);
4767 else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4769 if (ftype == error_mark_node)
4770 continue;
4771 gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4772 && !COMPLETE_TYPE_P (ftype));
4773 if (!buf->clear_in_mask && !for_auto_init)
4774 error_at (buf->loc, "flexible array member %qD does not "
4775 "have well defined padding bits for %qs",
4776 field, "__builtin_clear_padding");
4778 else if (is_empty_type (ftype))
4779 continue;
4780 else
4782 HOST_WIDE_INT pos = int_byte_position (field);
4783 if (pos >= sz)
4784 continue;
4785 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4786 gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4787 clear_padding_add_padding (buf, pos - cur_pos);
4788 cur_pos = pos;
4789 if (tree asbase = lang_hooks.types.classtype_as_base (field))
4790 ftype = asbase;
4791 clear_padding_type (buf, ftype, fldsz, for_auto_init);
4792 cur_pos += fldsz;
4795 gcc_assert (sz >= cur_pos);
4796 clear_padding_add_padding (buf, sz - cur_pos);
4797 break;
4798 case ARRAY_TYPE:
4799 HOST_WIDE_INT nelts, fldsz;
4800 fldsz = int_size_in_bytes (TREE_TYPE (type));
4801 if (fldsz == 0)
4802 break;
4803 nelts = sz / fldsz;
4804 if (nelts > 1
4805 && sz > 8 * UNITS_PER_WORD
4806 && buf->union_ptr == NULL
4807 && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4809 /* For sufficiently large array of more than one elements,
4810 emit a runtime loop to keep code size manageable. */
4811 tree base = buf->base;
4812 unsigned int prev_align = buf->align;
4813 HOST_WIDE_INT off = buf->off + buf->size;
4814 HOST_WIDE_INT prev_sz = buf->sz;
4815 clear_padding_flush (buf, true);
4816 tree elttype = TREE_TYPE (type);
4817 buf->base = create_tmp_var (build_pointer_type (elttype));
4818 tree end = make_ssa_name (TREE_TYPE (buf->base));
4819 gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4820 base, size_int (off));
4821 gimple_set_location (g, buf->loc);
4822 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4823 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4824 size_int (sz));
4825 gimple_set_location (g, buf->loc);
4826 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4827 buf->sz = fldsz;
4828 buf->align = TYPE_ALIGN (elttype);
4829 buf->off = 0;
4830 buf->size = 0;
4831 clear_padding_emit_loop (buf, elttype, end, for_auto_init);
4832 buf->base = base;
4833 buf->sz = prev_sz;
4834 buf->align = prev_align;
4835 buf->size = off % UNITS_PER_WORD;
4836 buf->off = off - buf->size;
4837 memset (buf->buf, 0, buf->size);
4838 break;
4840 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4841 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4842 break;
4843 case UNION_TYPE:
4844 clear_padding_union (buf, type, sz, for_auto_init);
4845 break;
4846 case REAL_TYPE:
4847 gcc_assert ((size_t) sz <= clear_padding_unit);
4848 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4849 clear_padding_flush (buf, false);
4850 if (clear_padding_real_needs_padding_p (type))
4852 /* Use native_interpret_real + native_encode_expr to figure out
4853 which bits are padding. */
4854 memset (buf->buf + buf->size, ~0, sz);
4855 tree cst = native_interpret_real (type, buf->buf + buf->size, sz);
4856 gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
4857 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4858 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4859 for (size_t i = 0; i < (size_t) sz; i++)
4860 buf->buf[buf->size + i] ^= ~0;
4862 else
4863 memset (buf->buf + buf->size, 0, sz);
4864 buf->size += sz;
4865 break;
4866 case COMPLEX_TYPE:
4867 fldsz = int_size_in_bytes (TREE_TYPE (type));
4868 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4869 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4870 break;
4871 case VECTOR_TYPE:
4872 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
4873 fldsz = int_size_in_bytes (TREE_TYPE (type));
4874 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4875 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4876 break;
4877 case NULLPTR_TYPE:
4878 gcc_assert ((size_t) sz <= clear_padding_unit);
4879 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4880 clear_padding_flush (buf, false);
4881 memset (buf->buf + buf->size, ~0, sz);
4882 buf->size += sz;
4883 break;
4884 case BITINT_TYPE:
4886 struct bitint_info info;
4887 bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
4888 gcc_assert (ok);
4889 scalar_int_mode limb_mode
4890 = as_a <scalar_int_mode> (info.abi_limb_mode);
4891 if (TYPE_PRECISION (type) <= GET_MODE_PRECISION (limb_mode))
4893 gcc_assert ((size_t) sz <= clear_padding_unit);
4894 if ((unsigned HOST_WIDE_INT) sz + buf->size
4895 > clear_padding_buf_size)
4896 clear_padding_flush (buf, false);
4897 if (!info.extended
4898 && TYPE_PRECISION (type) < GET_MODE_PRECISION (limb_mode))
4900 int tprec = GET_MODE_PRECISION (limb_mode);
4901 int prec = TYPE_PRECISION (type);
4902 tree t = build_nonstandard_integer_type (tprec, 1);
4903 tree cst = wide_int_to_tree (t, wi::mask (prec, true, tprec));
4904 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4905 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4907 else
4908 memset (buf->buf + buf->size, 0, sz);
4909 buf->size += sz;
4910 break;
4912 tree limbtype
4913 = build_nonstandard_integer_type (GET_MODE_PRECISION (limb_mode), 1);
4914 fldsz = int_size_in_bytes (limbtype);
4915 nelts = int_size_in_bytes (type) / fldsz;
4916 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4918 if (!info.extended
4919 && i == (info.big_endian ? 0 : nelts - 1)
4920 && (((unsigned) TYPE_PRECISION (type))
4921 % TYPE_PRECISION (limbtype)) != 0)
4923 int tprec = GET_MODE_PRECISION (limb_mode);
4924 int prec = (((unsigned) TYPE_PRECISION (type)) % tprec);
4925 tree cst = wide_int_to_tree (limbtype,
4926 wi::mask (prec, true, tprec));
4927 int len = native_encode_expr (cst, buf->buf + buf->size,
4928 fldsz);
4929 gcc_assert (len > 0 && (size_t) len == (size_t) fldsz);
4930 buf->size += fldsz;
4932 else
4933 clear_padding_type (buf, limbtype, fldsz, for_auto_init);
4935 break;
4937 default:
4938 gcc_assert ((size_t) sz <= clear_padding_unit);
4939 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4940 clear_padding_flush (buf, false);
4941 memset (buf->buf + buf->size, 0, sz);
4942 buf->size += sz;
4943 break;
4947 /* Clear padding bits of TYPE in MASK. */
4949 void
4950 clear_type_padding_in_mask (tree type, unsigned char *mask)
4952 clear_padding_struct buf;
4953 buf.loc = UNKNOWN_LOCATION;
4954 buf.clear_in_mask = true;
4955 buf.base = NULL_TREE;
4956 buf.alias_type = NULL_TREE;
4957 buf.gsi = NULL;
4958 buf.align = 0;
4959 buf.off = 0;
4960 buf.padding_bytes = 0;
4961 buf.sz = int_size_in_bytes (type);
4962 buf.size = 0;
4963 buf.union_ptr = mask;
4964 clear_padding_type (&buf, type, buf.sz, false);
4965 clear_padding_flush (&buf, true);
4968 /* Fold __builtin_clear_padding builtin. */
4970 static bool
4971 gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
4973 gimple *stmt = gsi_stmt (*gsi);
4974 gcc_assert (gimple_call_num_args (stmt) == 2);
4975 tree ptr = gimple_call_arg (stmt, 0);
4976 tree typearg = gimple_call_arg (stmt, 1);
4977 /* The 2nd argument of __builtin_clear_padding's value is used to
4978 distinguish whether this call is made by the user or by the compiler
4979 for automatic variable initialization. */
4980 bool for_auto_init = (bool) TREE_INT_CST_LOW (typearg);
4981 tree type = TREE_TYPE (TREE_TYPE (typearg));
4982 location_t loc = gimple_location (stmt);
4983 clear_padding_struct buf;
4984 gimple_stmt_iterator gsiprev = *gsi;
4985 /* This should be folded during the lower pass. */
4986 gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
4987 gcc_assert (COMPLETE_TYPE_P (type));
4988 gsi_prev (&gsiprev);
4990 buf.loc = loc;
4991 buf.clear_in_mask = false;
4992 buf.base = ptr;
4993 buf.alias_type = NULL_TREE;
4994 buf.gsi = gsi;
4995 buf.align = get_pointer_alignment (ptr);
4996 unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
4997 buf.align = MAX (buf.align, talign);
4998 buf.off = 0;
4999 buf.padding_bytes = 0;
5000 buf.size = 0;
5001 buf.sz = int_size_in_bytes (type);
5002 buf.union_ptr = NULL;
5003 if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
5004 sorry_at (loc, "%s not supported for variable length aggregates",
5005 "__builtin_clear_padding");
5006 /* The implementation currently assumes 8-bit host and target
5007 chars which is the case for all currently supported targets
5008 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
5009 else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
5010 sorry_at (loc, "%s not supported on this target",
5011 "__builtin_clear_padding");
5012 else if (!clear_padding_type_may_have_padding_p (type))
5014 else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
5016 tree sz = TYPE_SIZE_UNIT (type);
5017 tree elttype = type;
5018 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
5019 while (TREE_CODE (elttype) == ARRAY_TYPE
5020 && int_size_in_bytes (elttype) < 0)
5021 elttype = TREE_TYPE (elttype);
5022 HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
5023 gcc_assert (eltsz >= 0);
5024 if (eltsz)
5026 buf.base = create_tmp_var (build_pointer_type (elttype));
5027 tree end = make_ssa_name (TREE_TYPE (buf.base));
5028 gimple *g = gimple_build_assign (buf.base, ptr);
5029 gimple_set_location (g, loc);
5030 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5031 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
5032 gimple_set_location (g, loc);
5033 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5034 buf.sz = eltsz;
5035 buf.align = TYPE_ALIGN (elttype);
5036 buf.alias_type = build_pointer_type (elttype);
5037 clear_padding_emit_loop (&buf, elttype, end, for_auto_init);
5040 else
5042 if (!is_gimple_mem_ref_addr (buf.base))
5044 buf.base = make_ssa_name (TREE_TYPE (ptr));
5045 gimple *g = gimple_build_assign (buf.base, ptr);
5046 gimple_set_location (g, loc);
5047 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5049 buf.alias_type = build_pointer_type (type);
5050 clear_padding_type (&buf, type, buf.sz, for_auto_init);
5051 clear_padding_flush (&buf, true);
5054 gimple_stmt_iterator gsiprev2 = *gsi;
5055 gsi_prev (&gsiprev2);
5056 if (gsi_stmt (gsiprev) == gsi_stmt (gsiprev2))
5057 gsi_replace (gsi, gimple_build_nop (), true);
5058 else
5060 gsi_remove (gsi, true);
5061 *gsi = gsiprev2;
5063 return true;
5066 /* Fold the non-target builtin at *GSI and return whether any simplification
5067 was made. */
5069 static bool
5070 gimple_fold_builtin (gimple_stmt_iterator *gsi)
5072 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
5073 tree callee = gimple_call_fndecl (stmt);
5075 /* Give up for always_inline inline builtins until they are
5076 inlined. */
5077 if (avoid_folding_inline_builtin (callee))
5078 return false;
5080 unsigned n = gimple_call_num_args (stmt);
5081 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
5082 switch (fcode)
5084 case BUILT_IN_BCMP:
5085 return gimple_fold_builtin_bcmp (gsi);
5086 case BUILT_IN_BCOPY:
5087 return gimple_fold_builtin_bcopy (gsi);
5088 case BUILT_IN_BZERO:
5089 return gimple_fold_builtin_bzero (gsi);
5091 case BUILT_IN_MEMSET:
5092 return gimple_fold_builtin_memset (gsi,
5093 gimple_call_arg (stmt, 1),
5094 gimple_call_arg (stmt, 2));
5095 case BUILT_IN_MEMCPY:
5096 case BUILT_IN_MEMPCPY:
5097 case BUILT_IN_MEMMOVE:
5098 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
5099 gimple_call_arg (stmt, 1), fcode);
5100 case BUILT_IN_SPRINTF_CHK:
5101 case BUILT_IN_VSPRINTF_CHK:
5102 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
5103 case BUILT_IN_STRCAT_CHK:
5104 return gimple_fold_builtin_strcat_chk (gsi);
5105 case BUILT_IN_STRNCAT_CHK:
5106 return gimple_fold_builtin_strncat_chk (gsi);
5107 case BUILT_IN_STRLEN:
5108 return gimple_fold_builtin_strlen (gsi);
5109 case BUILT_IN_STRCPY:
5110 return gimple_fold_builtin_strcpy (gsi,
5111 gimple_call_arg (stmt, 0),
5112 gimple_call_arg (stmt, 1));
5113 case BUILT_IN_STRNCPY:
5114 return gimple_fold_builtin_strncpy (gsi,
5115 gimple_call_arg (stmt, 0),
5116 gimple_call_arg (stmt, 1),
5117 gimple_call_arg (stmt, 2));
5118 case BUILT_IN_STRCAT:
5119 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
5120 gimple_call_arg (stmt, 1));
5121 case BUILT_IN_STRNCAT:
5122 return gimple_fold_builtin_strncat (gsi);
5123 case BUILT_IN_INDEX:
5124 case BUILT_IN_STRCHR:
5125 return gimple_fold_builtin_strchr (gsi, false);
5126 case BUILT_IN_RINDEX:
5127 case BUILT_IN_STRRCHR:
5128 return gimple_fold_builtin_strchr (gsi, true);
5129 case BUILT_IN_STRSTR:
5130 return gimple_fold_builtin_strstr (gsi);
5131 case BUILT_IN_STRCMP:
5132 case BUILT_IN_STRCMP_EQ:
5133 case BUILT_IN_STRCASECMP:
5134 case BUILT_IN_STRNCMP:
5135 case BUILT_IN_STRNCMP_EQ:
5136 case BUILT_IN_STRNCASECMP:
5137 return gimple_fold_builtin_string_compare (gsi);
5138 case BUILT_IN_MEMCHR:
5139 return gimple_fold_builtin_memchr (gsi);
5140 case BUILT_IN_FPUTS:
5141 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5142 gimple_call_arg (stmt, 1), false);
5143 case BUILT_IN_FPUTS_UNLOCKED:
5144 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5145 gimple_call_arg (stmt, 1), true);
5146 case BUILT_IN_MEMCPY_CHK:
5147 case BUILT_IN_MEMPCPY_CHK:
5148 case BUILT_IN_MEMMOVE_CHK:
5149 case BUILT_IN_MEMSET_CHK:
5150 return gimple_fold_builtin_memory_chk (gsi,
5151 gimple_call_arg (stmt, 0),
5152 gimple_call_arg (stmt, 1),
5153 gimple_call_arg (stmt, 2),
5154 gimple_call_arg (stmt, 3),
5155 fcode);
5156 case BUILT_IN_STPCPY:
5157 return gimple_fold_builtin_stpcpy (gsi);
5158 case BUILT_IN_STRCPY_CHK:
5159 case BUILT_IN_STPCPY_CHK:
5160 return gimple_fold_builtin_stxcpy_chk (gsi,
5161 gimple_call_arg (stmt, 0),
5162 gimple_call_arg (stmt, 1),
5163 gimple_call_arg (stmt, 2),
5164 fcode);
5165 case BUILT_IN_STRNCPY_CHK:
5166 case BUILT_IN_STPNCPY_CHK:
5167 return gimple_fold_builtin_stxncpy_chk (gsi,
5168 gimple_call_arg (stmt, 0),
5169 gimple_call_arg (stmt, 1),
5170 gimple_call_arg (stmt, 2),
5171 gimple_call_arg (stmt, 3),
5172 fcode);
5173 case BUILT_IN_SNPRINTF_CHK:
5174 case BUILT_IN_VSNPRINTF_CHK:
5175 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
5177 case BUILT_IN_FPRINTF:
5178 case BUILT_IN_FPRINTF_UNLOCKED:
5179 case BUILT_IN_VFPRINTF:
5180 if (n == 2 || n == 3)
5181 return gimple_fold_builtin_fprintf (gsi,
5182 gimple_call_arg (stmt, 0),
5183 gimple_call_arg (stmt, 1),
5184 n == 3
5185 ? gimple_call_arg (stmt, 2)
5186 : NULL_TREE,
5187 fcode);
5188 break;
5189 case BUILT_IN_FPRINTF_CHK:
5190 case BUILT_IN_VFPRINTF_CHK:
5191 if (n == 3 || n == 4)
5192 return gimple_fold_builtin_fprintf (gsi,
5193 gimple_call_arg (stmt, 0),
5194 gimple_call_arg (stmt, 2),
5195 n == 4
5196 ? gimple_call_arg (stmt, 3)
5197 : NULL_TREE,
5198 fcode);
5199 break;
5200 case BUILT_IN_PRINTF:
5201 case BUILT_IN_PRINTF_UNLOCKED:
5202 case BUILT_IN_VPRINTF:
5203 if (n == 1 || n == 2)
5204 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
5205 n == 2
5206 ? gimple_call_arg (stmt, 1)
5207 : NULL_TREE, fcode);
5208 break;
5209 case BUILT_IN_PRINTF_CHK:
5210 case BUILT_IN_VPRINTF_CHK:
5211 if (n == 2 || n == 3)
5212 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
5213 n == 3
5214 ? gimple_call_arg (stmt, 2)
5215 : NULL_TREE, fcode);
5216 break;
5217 case BUILT_IN_ACC_ON_DEVICE:
5218 return gimple_fold_builtin_acc_on_device (gsi,
5219 gimple_call_arg (stmt, 0));
5220 case BUILT_IN_REALLOC:
5221 return gimple_fold_builtin_realloc (gsi);
5223 case BUILT_IN_CLEAR_PADDING:
5224 return gimple_fold_builtin_clear_padding (gsi);
5226 default:;
5229 /* Try the generic builtin folder. */
5230 bool ignore = (gimple_call_lhs (stmt) == NULL);
5231 tree result = fold_call_stmt (stmt, ignore);
5232 if (result)
5234 if (ignore)
5235 STRIP_NOPS (result);
5236 else
5237 result = fold_convert (gimple_call_return_type (stmt), result);
5238 gimplify_and_update_call_from_tree (gsi, result);
5239 return true;
5242 return false;
5245 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5246 function calls to constants, where possible. */
5248 static tree
5249 fold_internal_goacc_dim (const gimple *call)
5251 int axis = oacc_get_ifn_dim_arg (call);
5252 int size = oacc_get_fn_dim_size (current_function_decl, axis);
5253 tree result = NULL_TREE;
5254 tree type = TREE_TYPE (gimple_call_lhs (call));
5256 switch (gimple_call_internal_fn (call))
5258 case IFN_GOACC_DIM_POS:
5259 /* If the size is 1, we know the answer. */
5260 if (size == 1)
5261 result = build_int_cst (type, 0);
5262 break;
5263 case IFN_GOACC_DIM_SIZE:
5264 /* If the size is not dynamic, we know the answer. */
5265 if (size)
5266 result = build_int_cst (type, size);
5267 break;
5268 default:
5269 break;
5272 return result;
5275 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5276 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5277 &var where var is only addressable because of such calls. */
5279 bool
5280 optimize_atomic_compare_exchange_p (gimple *stmt)
5282 if (gimple_call_num_args (stmt) != 6
5283 || !flag_inline_atomics
5284 || !optimize
5285 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
5286 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5287 || !gimple_vdef (stmt)
5288 || !gimple_vuse (stmt))
5289 return false;
5291 tree fndecl = gimple_call_fndecl (stmt);
5292 switch (DECL_FUNCTION_CODE (fndecl))
5294 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
5295 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
5296 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
5297 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
5298 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
5299 break;
5300 default:
5301 return false;
5304 tree expected = gimple_call_arg (stmt, 1);
5305 if (TREE_CODE (expected) != ADDR_EXPR
5306 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
5307 return false;
5309 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
5310 if (!is_gimple_reg_type (etype)
5311 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
5312 || TREE_THIS_VOLATILE (etype)
5313 || VECTOR_TYPE_P (etype)
5314 || TREE_CODE (etype) == COMPLEX_TYPE
5315 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5316 might not preserve all the bits. See PR71716. */
5317 || SCALAR_FLOAT_TYPE_P (etype)
5318 || maybe_ne (TYPE_PRECISION (etype),
5319 GET_MODE_BITSIZE (TYPE_MODE (etype))))
5320 return false;
5322 tree weak = gimple_call_arg (stmt, 3);
5323 if (!integer_zerop (weak) && !integer_onep (weak))
5324 return false;
5326 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5327 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5328 machine_mode mode = TYPE_MODE (itype);
5330 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
5331 == CODE_FOR_nothing
5332 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5333 return false;
5335 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
5336 return false;
5338 return true;
5341 /* Fold
5342 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5343 into
5344 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5345 i = IMAGPART_EXPR <t>;
5346 r = (_Bool) i;
5347 e = REALPART_EXPR <t>; */
5349 void
5350 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5352 gimple *stmt = gsi_stmt (*gsi);
5353 tree fndecl = gimple_call_fndecl (stmt);
5354 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5355 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5356 tree ctype = build_complex_type (itype);
5357 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
5358 bool throws = false;
5359 edge e = NULL;
5360 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5361 expected);
5362 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5363 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5364 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5366 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
5367 build1 (VIEW_CONVERT_EXPR, itype,
5368 gimple_assign_lhs (g)));
5369 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5371 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
5372 + int_size_in_bytes (itype);
5373 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5374 gimple_call_arg (stmt, 0),
5375 gimple_assign_lhs (g),
5376 gimple_call_arg (stmt, 2),
5377 build_int_cst (integer_type_node, flag),
5378 gimple_call_arg (stmt, 4),
5379 gimple_call_arg (stmt, 5));
5380 tree lhs = make_ssa_name (ctype);
5381 gimple_call_set_lhs (g, lhs);
5382 gimple_move_vops (g, stmt);
5383 tree oldlhs = gimple_call_lhs (stmt);
5384 if (stmt_can_throw_internal (cfun, stmt))
5386 throws = true;
5387 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
5389 gimple_call_set_nothrow (as_a <gcall *> (g),
5390 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
5391 gimple_call_set_lhs (stmt, NULL_TREE);
5392 gsi_replace (gsi, g, true);
5393 if (oldlhs)
5395 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
5396 build1 (IMAGPART_EXPR, itype, lhs));
5397 if (throws)
5399 gsi_insert_on_edge_immediate (e, g);
5400 *gsi = gsi_for_stmt (g);
5402 else
5403 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5404 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
5405 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5407 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
5408 build1 (REALPART_EXPR, itype, lhs));
5409 if (throws && oldlhs == NULL_TREE)
5411 gsi_insert_on_edge_immediate (e, g);
5412 *gsi = gsi_for_stmt (g);
5414 else
5415 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5416 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5418 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5419 VIEW_CONVERT_EXPR,
5420 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5421 gimple_assign_lhs (g)));
5422 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5424 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
5425 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5426 *gsi = gsiret;
5429 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5430 doesn't fit into TYPE. The test for overflow should be regardless of
5431 -fwrapv, and even for unsigned types. */
5433 bool
5434 arith_overflowed_p (enum tree_code code, const_tree type,
5435 const_tree arg0, const_tree arg1)
5437 widest2_int warg0 = widest2_int_cst (arg0);
5438 widest2_int warg1 = widest2_int_cst (arg1);
5439 widest2_int wres;
5440 switch (code)
5442 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
5443 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
5444 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
5445 default: gcc_unreachable ();
5447 signop sign = TYPE_SIGN (type);
5448 if (sign == UNSIGNED && wi::neg_p (wres))
5449 return true;
5450 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
5453 /* If IFN_{MASK,LEN,MASK_LEN}_LOAD/STORE call CALL is unconditional,
5454 return a MEM_REF for the memory it references, otherwise return null.
5455 VECTYPE is the type of the memory vector. MASK_P indicates it's for
5456 MASK if true, otherwise it's for LEN. */
5458 static tree
5459 gimple_fold_partial_load_store_mem_ref (gcall *call, tree vectype, bool mask_p)
5461 tree ptr = gimple_call_arg (call, 0);
5462 tree alias_align = gimple_call_arg (call, 1);
5463 if (!tree_fits_uhwi_p (alias_align))
5464 return NULL_TREE;
5466 if (mask_p)
5468 tree mask = gimple_call_arg (call, 2);
5469 if (!integer_all_onesp (mask))
5470 return NULL_TREE;
5472 else
5474 internal_fn ifn = gimple_call_internal_fn (call);
5475 int len_index = internal_fn_len_index (ifn);
5476 tree basic_len = gimple_call_arg (call, len_index);
5477 if (!poly_int_tree_p (basic_len))
5478 return NULL_TREE;
5479 tree bias = gimple_call_arg (call, len_index + 1);
5480 gcc_assert (TREE_CODE (bias) == INTEGER_CST);
5481 /* For LEN_LOAD/LEN_STORE/MASK_LEN_LOAD/MASK_LEN_STORE,
5482 we don't fold when (bias + len) != VF. */
5483 if (maybe_ne (wi::to_poly_widest (basic_len) + wi::to_widest (bias),
5484 GET_MODE_NUNITS (TYPE_MODE (vectype))))
5485 return NULL_TREE;
5487 /* For MASK_LEN_{LOAD,STORE}, we should also check whether
5488 the mask is all ones mask. */
5489 if (ifn == IFN_MASK_LEN_LOAD || ifn == IFN_MASK_LEN_STORE)
5491 tree mask = gimple_call_arg (call, internal_fn_mask_index (ifn));
5492 if (!integer_all_onesp (mask))
5493 return NULL_TREE;
5497 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
5498 if (TYPE_ALIGN (vectype) != align)
5499 vectype = build_aligned_type (vectype, align);
5500 tree offset = build_zero_cst (TREE_TYPE (alias_align));
5501 return fold_build2 (MEM_REF, vectype, ptr, offset);
5504 /* Try to fold IFN_{MASK,LEN}_LOAD call CALL. Return true on success.
5505 MASK_P indicates it's for MASK if true, otherwise it's for LEN. */
5507 static bool
5508 gimple_fold_partial_load (gimple_stmt_iterator *gsi, gcall *call, bool mask_p)
5510 tree lhs = gimple_call_lhs (call);
5511 if (!lhs)
5512 return false;
5514 if (tree rhs
5515 = gimple_fold_partial_load_store_mem_ref (call, TREE_TYPE (lhs), mask_p))
5517 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5518 gimple_set_location (new_stmt, gimple_location (call));
5519 gimple_move_vops (new_stmt, call);
5520 gsi_replace (gsi, new_stmt, false);
5521 return true;
5523 return false;
5526 /* Try to fold IFN_{MASK,LEN}_STORE call CALL. Return true on success.
5527 MASK_P indicates it's for MASK if true, otherwise it's for LEN. */
5529 static bool
5530 gimple_fold_partial_store (gimple_stmt_iterator *gsi, gcall *call,
5531 bool mask_p)
5533 internal_fn ifn = gimple_call_internal_fn (call);
5534 tree rhs = gimple_call_arg (call, internal_fn_stored_value_index (ifn));
5535 if (tree lhs
5536 = gimple_fold_partial_load_store_mem_ref (call, TREE_TYPE (rhs), mask_p))
5538 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5539 gimple_set_location (new_stmt, gimple_location (call));
5540 gimple_move_vops (new_stmt, call);
5541 gsi_replace (gsi, new_stmt, false);
5542 return true;
5544 return false;
5547 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5548 The statement may be replaced by another statement, e.g., if the call
5549 simplifies to a constant value. Return true if any changes were made.
5550 It is assumed that the operands have been previously folded. */
5552 static bool
5553 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
5555 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
5556 tree callee;
5557 bool changed = false;
5559 /* Check for virtual calls that became direct calls. */
5560 callee = gimple_call_fn (stmt);
5561 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
5563 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
5565 if (dump_file && virtual_method_call_p (callee)
5566 && !possible_polymorphic_call_target_p
5567 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
5568 (OBJ_TYPE_REF_EXPR (callee)))))
5570 fprintf (dump_file,
5571 "Type inheritance inconsistent devirtualization of ");
5572 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5573 fprintf (dump_file, " to ");
5574 print_generic_expr (dump_file, callee, TDF_SLIM);
5575 fprintf (dump_file, "\n");
5578 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
5579 changed = true;
5581 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
5583 bool final;
5584 vec <cgraph_node *>targets
5585 = possible_polymorphic_call_targets (callee, stmt, &final);
5586 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5588 tree lhs = gimple_call_lhs (stmt);
5589 if (dump_enabled_p ())
5591 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5592 "folding virtual function call to %s\n",
5593 targets.length () == 1
5594 ? targets[0]->name ()
5595 : "__builtin_unreachable");
5597 if (targets.length () == 1)
5599 tree fndecl = targets[0]->decl;
5600 gimple_call_set_fndecl (stmt, fndecl);
5601 changed = true;
5602 /* If changing the call to __cxa_pure_virtual
5603 or similar noreturn function, adjust gimple_call_fntype
5604 too. */
5605 if (gimple_call_noreturn_p (stmt)
5606 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
5607 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
5608 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5609 == void_type_node))
5610 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
5611 /* If the call becomes noreturn, remove the lhs. */
5612 if (lhs
5613 && gimple_call_noreturn_p (stmt)
5614 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
5615 || should_remove_lhs_p (lhs)))
5617 if (TREE_CODE (lhs) == SSA_NAME)
5619 tree var = create_tmp_var (TREE_TYPE (lhs));
5620 tree def = get_or_create_ssa_default_def (cfun, var);
5621 gimple *new_stmt = gimple_build_assign (lhs, def);
5622 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
5624 gimple_call_set_lhs (stmt, NULL_TREE);
5626 maybe_remove_unused_call_args (cfun, stmt);
5628 else
5630 location_t loc = gimple_location (stmt);
5631 gimple *new_stmt = gimple_build_builtin_unreachable (loc);
5632 gimple_call_set_ctrl_altering (new_stmt, false);
5633 /* If the call had a SSA name as lhs morph that into
5634 an uninitialized value. */
5635 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5637 tree var = create_tmp_var (TREE_TYPE (lhs));
5638 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
5639 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5640 set_ssa_default_def (cfun, var, lhs);
5642 gimple_move_vops (new_stmt, stmt);
5643 gsi_replace (gsi, new_stmt, false);
5644 return true;
5650 /* Check for indirect calls that became direct calls, and then
5651 no longer require a static chain. */
5652 if (gimple_call_chain (stmt))
5654 tree fn = gimple_call_fndecl (stmt);
5655 if (fn && !DECL_STATIC_CHAIN (fn))
5657 gimple_call_set_chain (stmt, NULL);
5658 changed = true;
5662 if (inplace)
5663 return changed;
5665 /* Check for builtins that CCP can handle using information not
5666 available in the generic fold routines. */
5667 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5669 if (gimple_fold_builtin (gsi))
5670 changed = true;
5672 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
5674 changed |= targetm.gimple_fold_builtin (gsi);
5676 else if (gimple_call_internal_p (stmt))
5678 enum tree_code subcode = ERROR_MARK;
5679 tree result = NULL_TREE;
5680 bool cplx_result = false;
5681 bool uaddc_usubc = false;
5682 tree overflow = NULL_TREE;
5683 switch (gimple_call_internal_fn (stmt))
5685 case IFN_BUILTIN_EXPECT:
5686 result = fold_builtin_expect (gimple_location (stmt),
5687 gimple_call_arg (stmt, 0),
5688 gimple_call_arg (stmt, 1),
5689 gimple_call_arg (stmt, 2),
5690 NULL_TREE);
5691 break;
5692 case IFN_UBSAN_OBJECT_SIZE:
5694 tree offset = gimple_call_arg (stmt, 1);
5695 tree objsize = gimple_call_arg (stmt, 2);
5696 if (integer_all_onesp (objsize)
5697 || (TREE_CODE (offset) == INTEGER_CST
5698 && TREE_CODE (objsize) == INTEGER_CST
5699 && tree_int_cst_le (offset, objsize)))
5701 replace_call_with_value (gsi, NULL_TREE);
5702 return true;
5705 break;
5706 case IFN_UBSAN_PTR:
5707 if (integer_zerop (gimple_call_arg (stmt, 1)))
5709 replace_call_with_value (gsi, NULL_TREE);
5710 return true;
5712 break;
5713 case IFN_UBSAN_BOUNDS:
5715 tree index = gimple_call_arg (stmt, 1);
5716 tree bound = gimple_call_arg (stmt, 2);
5717 if (TREE_CODE (index) == INTEGER_CST
5718 && TREE_CODE (bound) == INTEGER_CST)
5720 index = fold_convert (TREE_TYPE (bound), index);
5721 if (TREE_CODE (index) == INTEGER_CST
5722 && tree_int_cst_lt (index, bound))
5724 replace_call_with_value (gsi, NULL_TREE);
5725 return true;
5729 break;
5730 case IFN_GOACC_DIM_SIZE:
5731 case IFN_GOACC_DIM_POS:
5732 result = fold_internal_goacc_dim (stmt);
5733 break;
5734 case IFN_UBSAN_CHECK_ADD:
5735 subcode = PLUS_EXPR;
5736 break;
5737 case IFN_UBSAN_CHECK_SUB:
5738 subcode = MINUS_EXPR;
5739 break;
5740 case IFN_UBSAN_CHECK_MUL:
5741 subcode = MULT_EXPR;
5742 break;
5743 case IFN_ADD_OVERFLOW:
5744 subcode = PLUS_EXPR;
5745 cplx_result = true;
5746 break;
5747 case IFN_SUB_OVERFLOW:
5748 subcode = MINUS_EXPR;
5749 cplx_result = true;
5750 break;
5751 case IFN_MUL_OVERFLOW:
5752 subcode = MULT_EXPR;
5753 cplx_result = true;
5754 break;
5755 case IFN_UADDC:
5756 subcode = PLUS_EXPR;
5757 cplx_result = true;
5758 uaddc_usubc = true;
5759 break;
5760 case IFN_USUBC:
5761 subcode = MINUS_EXPR;
5762 cplx_result = true;
5763 uaddc_usubc = true;
5764 break;
5765 case IFN_MASK_LOAD:
5766 changed |= gimple_fold_partial_load (gsi, stmt, true);
5767 break;
5768 case IFN_MASK_STORE:
5769 changed |= gimple_fold_partial_store (gsi, stmt, true);
5770 break;
5771 case IFN_LEN_LOAD:
5772 case IFN_MASK_LEN_LOAD:
5773 changed |= gimple_fold_partial_load (gsi, stmt, false);
5774 break;
5775 case IFN_LEN_STORE:
5776 case IFN_MASK_LEN_STORE:
5777 changed |= gimple_fold_partial_store (gsi, stmt, false);
5778 break;
5779 default:
5780 break;
5782 if (subcode != ERROR_MARK)
5784 tree arg0 = gimple_call_arg (stmt, 0);
5785 tree arg1 = gimple_call_arg (stmt, 1);
5786 tree arg2 = NULL_TREE;
5787 tree type = TREE_TYPE (arg0);
5788 if (cplx_result)
5790 tree lhs = gimple_call_lhs (stmt);
5791 if (lhs == NULL_TREE)
5792 type = NULL_TREE;
5793 else
5794 type = TREE_TYPE (TREE_TYPE (lhs));
5795 if (uaddc_usubc)
5796 arg2 = gimple_call_arg (stmt, 2);
5798 if (type == NULL_TREE)
5800 else if (uaddc_usubc)
5802 if (!integer_zerop (arg2))
5804 /* x = y + 0 + 0; x = y - 0 - 0; */
5805 else if (integer_zerop (arg1))
5806 result = arg0;
5807 /* x = 0 + y + 0; */
5808 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
5809 result = arg1;
5810 /* x = y - y - 0; */
5811 else if (subcode == MINUS_EXPR
5812 && operand_equal_p (arg0, arg1, 0))
5813 result = integer_zero_node;
5815 /* x = y + 0; x = y - 0; x = y * 0; */
5816 else if (integer_zerop (arg1))
5817 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
5818 /* x = 0 + y; x = 0 * y; */
5819 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
5820 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
5821 /* x = y - y; */
5822 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
5823 result = integer_zero_node;
5824 /* x = y * 1; x = 1 * y; */
5825 else if (subcode == MULT_EXPR && integer_onep (arg1))
5826 result = arg0;
5827 else if (subcode == MULT_EXPR && integer_onep (arg0))
5828 result = arg1;
5829 if (result)
5831 if (result == integer_zero_node)
5832 result = build_zero_cst (type);
5833 else if (cplx_result && TREE_TYPE (result) != type)
5835 if (TREE_CODE (result) == INTEGER_CST)
5837 if (arith_overflowed_p (PLUS_EXPR, type, result,
5838 integer_zero_node))
5839 overflow = build_one_cst (type);
5841 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
5842 && TYPE_UNSIGNED (type))
5843 || (TYPE_PRECISION (type)
5844 < (TYPE_PRECISION (TREE_TYPE (result))
5845 + (TYPE_UNSIGNED (TREE_TYPE (result))
5846 && !TYPE_UNSIGNED (type)))))
5847 result = NULL_TREE;
5848 if (result)
5849 result = fold_convert (type, result);
5854 if (result)
5856 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
5857 result = drop_tree_overflow (result);
5858 if (cplx_result)
5860 if (overflow == NULL_TREE)
5861 overflow = build_zero_cst (TREE_TYPE (result));
5862 tree ctype = build_complex_type (TREE_TYPE (result));
5863 if (TREE_CODE (result) == INTEGER_CST
5864 && TREE_CODE (overflow) == INTEGER_CST)
5865 result = build_complex (ctype, result, overflow);
5866 else
5867 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
5868 ctype, result, overflow);
5870 gimplify_and_update_call_from_tree (gsi, result);
5871 changed = true;
5875 return changed;
5879 /* Return true whether NAME has a use on STMT. Note this can return
5880 false even though there's a use on STMT if SSA operands are not
5881 up-to-date. */
5883 static bool
5884 has_use_on_stmt (tree name, gimple *stmt)
5886 ssa_op_iter iter;
5887 tree op;
5888 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5889 if (op == name)
5890 return true;
5891 return false;
5894 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5895 gimple_simplify.
5897 Replaces *GSI with the simplification result in RCODE and OPS
5898 and the associated statements in *SEQ. Does the replacement
5899 according to INPLACE and returns true if the operation succeeded. */
5901 static bool
5902 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5903 gimple_match_op *res_op,
5904 gimple_seq *seq, bool inplace)
5906 gimple *stmt = gsi_stmt (*gsi);
5907 tree *ops = res_op->ops;
5908 unsigned int num_ops = res_op->num_ops;
5910 /* Play safe and do not allow abnormals to be mentioned in
5911 newly created statements. See also maybe_push_res_to_seq.
5912 As an exception allow such uses if there was a use of the
5913 same SSA name on the old stmt. */
5914 for (unsigned int i = 0; i < num_ops; ++i)
5915 if (TREE_CODE (ops[i]) == SSA_NAME
5916 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
5917 && !has_use_on_stmt (ops[i], stmt))
5918 return false;
5920 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
5921 for (unsigned int i = 0; i < 2; ++i)
5922 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
5923 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
5924 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
5925 return false;
5927 /* Don't insert new statements when INPLACE is true, even if we could
5928 reuse STMT for the final statement. */
5929 if (inplace && !gimple_seq_empty_p (*seq))
5930 return false;
5932 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
5934 gcc_assert (res_op->code.is_tree_code ());
5935 auto code = tree_code (res_op->code);
5936 if (TREE_CODE_CLASS (code) == tcc_comparison
5937 /* GIMPLE_CONDs condition may not throw. */
5938 && (!flag_exceptions
5939 || !cfun->can_throw_non_call_exceptions
5940 || !operation_could_trap_p (code,
5941 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
5942 false, NULL_TREE)))
5943 gimple_cond_set_condition (cond_stmt, code, ops[0], ops[1]);
5944 else if (code == SSA_NAME)
5945 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
5946 build_zero_cst (TREE_TYPE (ops[0])));
5947 else if (code == INTEGER_CST)
5949 if (integer_zerop (ops[0]))
5950 gimple_cond_make_false (cond_stmt);
5951 else
5952 gimple_cond_make_true (cond_stmt);
5954 else if (!inplace)
5956 tree res = maybe_push_res_to_seq (res_op, seq);
5957 if (!res)
5958 return false;
5959 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
5960 build_zero_cst (TREE_TYPE (res)));
5962 else
5963 return false;
5964 if (dump_file && (dump_flags & TDF_DETAILS))
5966 fprintf (dump_file, "gimple_simplified to ");
5967 if (!gimple_seq_empty_p (*seq))
5968 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5969 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5970 0, TDF_SLIM);
5972 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5973 return true;
5975 else if (is_gimple_assign (stmt)
5976 && res_op->code.is_tree_code ())
5978 auto code = tree_code (res_op->code);
5979 if (!inplace
5980 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (code))
5982 maybe_build_generic_op (res_op);
5983 gimple_assign_set_rhs_with_ops (gsi, code,
5984 res_op->op_or_null (0),
5985 res_op->op_or_null (1),
5986 res_op->op_or_null (2));
5987 if (dump_file && (dump_flags & TDF_DETAILS))
5989 fprintf (dump_file, "gimple_simplified to ");
5990 if (!gimple_seq_empty_p (*seq))
5991 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5992 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5993 0, TDF_SLIM);
5995 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5996 return true;
5999 else if (res_op->code.is_fn_code ()
6000 && gimple_call_combined_fn (stmt) == combined_fn (res_op->code))
6002 gcc_assert (num_ops == gimple_call_num_args (stmt));
6003 for (unsigned int i = 0; i < num_ops; ++i)
6004 gimple_call_set_arg (stmt, i, ops[i]);
6005 if (dump_file && (dump_flags & TDF_DETAILS))
6007 fprintf (dump_file, "gimple_simplified to ");
6008 if (!gimple_seq_empty_p (*seq))
6009 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
6010 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
6012 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
6013 return true;
6015 else if (!inplace)
6017 if (gimple_has_lhs (stmt))
6019 tree lhs = gimple_get_lhs (stmt);
6020 if (!maybe_push_res_to_seq (res_op, seq, lhs))
6021 return false;
6022 if (dump_file && (dump_flags & TDF_DETAILS))
6024 fprintf (dump_file, "gimple_simplified to ");
6025 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
6027 gsi_replace_with_seq_vops (gsi, *seq);
6028 return true;
6030 else
6031 gcc_unreachable ();
6034 return false;
6037 /* Canonicalize MEM_REFs invariant address operand after propagation. */
6039 static bool
6040 maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
6042 bool res = false;
6043 tree *orig_t = t;
6045 if (TREE_CODE (*t) == ADDR_EXPR)
6046 t = &TREE_OPERAND (*t, 0);
6048 /* The C and C++ frontends use an ARRAY_REF for indexing with their
6049 generic vector extension. The actual vector referenced is
6050 view-converted to an array type for this purpose. If the index
6051 is constant the canonical representation in the middle-end is a
6052 BIT_FIELD_REF so re-write the former to the latter here. */
6053 if (TREE_CODE (*t) == ARRAY_REF
6054 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
6055 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
6056 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
6058 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
6059 if (VECTOR_TYPE_P (vtype))
6061 tree low = array_ref_low_bound (*t);
6062 if (TREE_CODE (low) == INTEGER_CST)
6064 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
6066 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
6067 wi::to_widest (low));
6068 idx = wi::mul (idx, wi::to_widest
6069 (TYPE_SIZE (TREE_TYPE (*t))));
6070 widest_int ext
6071 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
6072 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
6074 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
6075 TREE_TYPE (*t),
6076 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
6077 TYPE_SIZE (TREE_TYPE (*t)),
6078 wide_int_to_tree (bitsizetype, idx));
6079 res = true;
6086 while (handled_component_p (*t))
6087 t = &TREE_OPERAND (*t, 0);
6089 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
6090 of invariant addresses into a SSA name MEM_REF address. */
6091 if (TREE_CODE (*t) == MEM_REF
6092 || TREE_CODE (*t) == TARGET_MEM_REF)
6094 tree addr = TREE_OPERAND (*t, 0);
6095 if (TREE_CODE (addr) == ADDR_EXPR
6096 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
6097 || handled_component_p (TREE_OPERAND (addr, 0))))
6099 tree base;
6100 poly_int64 coffset;
6101 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
6102 &coffset);
6103 if (!base)
6105 if (is_debug)
6106 return false;
6107 gcc_unreachable ();
6110 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
6111 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
6112 TREE_OPERAND (*t, 1),
6113 size_int (coffset));
6114 res = true;
6116 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
6117 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
6120 /* Canonicalize back MEM_REFs to plain reference trees if the object
6121 accessed is a decl that has the same access semantics as the MEM_REF. */
6122 if (TREE_CODE (*t) == MEM_REF
6123 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
6124 && integer_zerop (TREE_OPERAND (*t, 1))
6125 && MR_DEPENDENCE_CLIQUE (*t) == 0)
6127 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6128 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
6129 if (/* Same volatile qualification. */
6130 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
6131 /* Same TBAA behavior with -fstrict-aliasing. */
6132 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
6133 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
6134 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
6135 /* Same alignment. */
6136 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
6137 /* We have to look out here to not drop a required conversion
6138 from the rhs to the lhs if *t appears on the lhs or vice-versa
6139 if it appears on the rhs. Thus require strict type
6140 compatibility. */
6141 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
6143 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6144 res = true;
6148 else if (TREE_CODE (*orig_t) == ADDR_EXPR
6149 && TREE_CODE (*t) == MEM_REF
6150 && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
6152 tree base;
6153 poly_int64 coffset;
6154 base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
6155 &coffset);
6156 if (base)
6158 gcc_assert (TREE_CODE (base) == MEM_REF);
6159 poly_int64 moffset;
6160 if (mem_ref_offset (base).to_shwi (&moffset))
6162 coffset += moffset;
6163 if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
6165 coffset += moffset;
6166 *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
6167 return true;
6173 /* Canonicalize TARGET_MEM_REF in particular with respect to
6174 the indexes becoming constant. */
6175 else if (TREE_CODE (*t) == TARGET_MEM_REF)
6177 tree tem = maybe_fold_tmr (*t);
6178 if (tem)
6180 *t = tem;
6181 if (TREE_CODE (*orig_t) == ADDR_EXPR)
6182 recompute_tree_invariant_for_addr_expr (*orig_t);
6183 res = true;
6187 return res;
6190 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6191 distinguishes both cases. */
6193 static bool
6194 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
6196 bool changed = false;
6197 gimple *stmt = gsi_stmt (*gsi);
6198 bool nowarning = warning_suppressed_p (stmt, OPT_Wstrict_overflow);
6199 unsigned i;
6200 fold_defer_overflow_warnings ();
6202 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6203 after propagation.
6204 ??? This shouldn't be done in generic folding but in the
6205 propagation helpers which also know whether an address was
6206 propagated.
6207 Also canonicalize operand order. */
6208 switch (gimple_code (stmt))
6210 case GIMPLE_ASSIGN:
6211 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
6213 tree *rhs = gimple_assign_rhs1_ptr (stmt);
6214 if ((REFERENCE_CLASS_P (*rhs)
6215 || TREE_CODE (*rhs) == ADDR_EXPR)
6216 && maybe_canonicalize_mem_ref_addr (rhs))
6217 changed = true;
6218 tree *lhs = gimple_assign_lhs_ptr (stmt);
6219 if (REFERENCE_CLASS_P (*lhs)
6220 && maybe_canonicalize_mem_ref_addr (lhs))
6221 changed = true;
6222 /* Canonicalize &MEM[ssa_n, CST] to ssa_n p+ CST.
6223 This cannot be done in maybe_canonicalize_mem_ref_addr
6224 as the gimple now has two operands rather than one.
6225 The same reason why this can't be done in
6226 maybe_canonicalize_mem_ref_addr is the same reason why
6227 this can't be done inplace. */
6228 if (!inplace && TREE_CODE (*rhs) == ADDR_EXPR)
6230 tree inner = TREE_OPERAND (*rhs, 0);
6231 if (TREE_CODE (inner) == MEM_REF
6232 && TREE_CODE (TREE_OPERAND (inner, 0)) == SSA_NAME
6233 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6235 tree ptr = TREE_OPERAND (inner, 0);
6236 tree addon = TREE_OPERAND (inner, 1);
6237 addon = fold_convert (sizetype, addon);
6238 gimple_assign_set_rhs_with_ops (gsi, POINTER_PLUS_EXPR,
6239 ptr, addon);
6240 changed = true;
6241 stmt = gsi_stmt (*gsi);
6245 else
6247 /* Canonicalize operand order. */
6248 enum tree_code code = gimple_assign_rhs_code (stmt);
6249 if (TREE_CODE_CLASS (code) == tcc_comparison
6250 || commutative_tree_code (code)
6251 || commutative_ternary_tree_code (code))
6253 tree rhs1 = gimple_assign_rhs1 (stmt);
6254 tree rhs2 = gimple_assign_rhs2 (stmt);
6255 if (tree_swap_operands_p (rhs1, rhs2))
6257 gimple_assign_set_rhs1 (stmt, rhs2);
6258 gimple_assign_set_rhs2 (stmt, rhs1);
6259 if (TREE_CODE_CLASS (code) == tcc_comparison)
6260 gimple_assign_set_rhs_code (stmt,
6261 swap_tree_comparison (code));
6262 changed = true;
6266 break;
6267 case GIMPLE_CALL:
6269 gcall *call = as_a<gcall *> (stmt);
6270 for (i = 0; i < gimple_call_num_args (call); ++i)
6272 tree *arg = gimple_call_arg_ptr (call, i);
6273 if (REFERENCE_CLASS_P (*arg)
6274 && maybe_canonicalize_mem_ref_addr (arg))
6275 changed = true;
6277 tree *lhs = gimple_call_lhs_ptr (call);
6278 if (*lhs
6279 && REFERENCE_CLASS_P (*lhs)
6280 && maybe_canonicalize_mem_ref_addr (lhs))
6281 changed = true;
6282 if (*lhs)
6284 combined_fn cfn = gimple_call_combined_fn (call);
6285 internal_fn ifn = associated_internal_fn (cfn, TREE_TYPE (*lhs));
6286 int opno = first_commutative_argument (ifn);
6287 if (opno >= 0)
6289 tree arg1 = gimple_call_arg (call, opno);
6290 tree arg2 = gimple_call_arg (call, opno + 1);
6291 if (tree_swap_operands_p (arg1, arg2))
6293 gimple_call_set_arg (call, opno, arg2);
6294 gimple_call_set_arg (call, opno + 1, arg1);
6295 changed = true;
6299 break;
6301 case GIMPLE_ASM:
6303 gasm *asm_stmt = as_a <gasm *> (stmt);
6304 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
6306 tree link = gimple_asm_output_op (asm_stmt, i);
6307 tree op = TREE_VALUE (link);
6308 if (REFERENCE_CLASS_P (op)
6309 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6310 changed = true;
6312 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
6314 tree link = gimple_asm_input_op (asm_stmt, i);
6315 tree op = TREE_VALUE (link);
6316 if ((REFERENCE_CLASS_P (op)
6317 || TREE_CODE (op) == ADDR_EXPR)
6318 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6319 changed = true;
6322 break;
6323 case GIMPLE_DEBUG:
6324 if (gimple_debug_bind_p (stmt))
6326 tree *val = gimple_debug_bind_get_value_ptr (stmt);
6327 if (*val
6328 && (REFERENCE_CLASS_P (*val)
6329 || TREE_CODE (*val) == ADDR_EXPR)
6330 && maybe_canonicalize_mem_ref_addr (val, true))
6331 changed = true;
6333 break;
6334 case GIMPLE_COND:
6336 /* Canonicalize operand order. */
6337 tree lhs = gimple_cond_lhs (stmt);
6338 tree rhs = gimple_cond_rhs (stmt);
6339 if (tree_swap_operands_p (lhs, rhs))
6341 gcond *gc = as_a <gcond *> (stmt);
6342 gimple_cond_set_lhs (gc, rhs);
6343 gimple_cond_set_rhs (gc, lhs);
6344 gimple_cond_set_code (gc,
6345 swap_tree_comparison (gimple_cond_code (gc)));
6346 changed = true;
6349 default:;
6352 /* Dispatch to pattern-based folding. */
6353 if (!inplace
6354 || is_gimple_assign (stmt)
6355 || gimple_code (stmt) == GIMPLE_COND)
6357 gimple_seq seq = NULL;
6358 gimple_match_op res_op;
6359 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
6360 valueize, valueize))
6362 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
6363 changed = true;
6364 else
6365 gimple_seq_discard (seq);
6369 stmt = gsi_stmt (*gsi);
6371 /* Fold the main computation performed by the statement. */
6372 switch (gimple_code (stmt))
6374 case GIMPLE_ASSIGN:
6376 /* Try to canonicalize for boolean-typed X the comparisons
6377 X == 0, X == 1, X != 0, and X != 1. */
6378 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
6379 || gimple_assign_rhs_code (stmt) == NE_EXPR)
6381 tree lhs = gimple_assign_lhs (stmt);
6382 tree op1 = gimple_assign_rhs1 (stmt);
6383 tree op2 = gimple_assign_rhs2 (stmt);
6384 tree type = TREE_TYPE (op1);
6386 /* Check whether the comparison operands are of the same boolean
6387 type as the result type is.
6388 Check that second operand is an integer-constant with value
6389 one or zero. */
6390 if (TREE_CODE (op2) == INTEGER_CST
6391 && (integer_zerop (op2) || integer_onep (op2))
6392 && useless_type_conversion_p (TREE_TYPE (lhs), type))
6394 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
6395 bool is_logical_not = false;
6397 /* X == 0 and X != 1 is a logical-not.of X
6398 X == 1 and X != 0 is X */
6399 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
6400 || (cmp_code == NE_EXPR && integer_onep (op2)))
6401 is_logical_not = true;
6403 if (is_logical_not == false)
6404 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
6405 /* Only for one-bit precision typed X the transformation
6406 !X -> ~X is valied. */
6407 else if (TYPE_PRECISION (type) == 1)
6408 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
6409 /* Otherwise we use !X -> X ^ 1. */
6410 else
6411 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
6412 build_int_cst (type, 1));
6413 changed = true;
6414 break;
6418 unsigned old_num_ops = gimple_num_ops (stmt);
6419 tree lhs = gimple_assign_lhs (stmt);
6420 tree new_rhs = fold_gimple_assign (gsi);
6421 if (new_rhs
6422 && !useless_type_conversion_p (TREE_TYPE (lhs),
6423 TREE_TYPE (new_rhs)))
6424 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6425 if (new_rhs
6426 && (!inplace
6427 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6429 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6430 changed = true;
6432 break;
6435 case GIMPLE_CALL:
6436 changed |= gimple_fold_call (gsi, inplace);
6437 break;
6439 case GIMPLE_DEBUG:
6440 if (gimple_debug_bind_p (stmt))
6442 tree val = gimple_debug_bind_get_value (stmt);
6443 if (val && REFERENCE_CLASS_P (val))
6445 tree tem = maybe_fold_reference (val);
6446 if (tem)
6448 gimple_debug_bind_set_value (stmt, tem);
6449 changed = true;
6453 break;
6455 case GIMPLE_RETURN:
6457 greturn *ret_stmt = as_a<greturn *> (stmt);
6458 tree ret = gimple_return_retval(ret_stmt);
6460 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6462 tree val = valueize (ret);
6463 if (val && val != ret
6464 && may_propagate_copy (ret, val))
6466 gimple_return_set_retval (ret_stmt, val);
6467 changed = true;
6471 break;
6473 default:;
6476 stmt = gsi_stmt (*gsi);
6478 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
6479 return changed;
6482 /* Valueziation callback that ends up not following SSA edges. */
6484 tree
6485 no_follow_ssa_edges (tree)
6487 return NULL_TREE;
6490 /* Valueization callback that ends up following single-use SSA edges only. */
6492 tree
6493 follow_single_use_edges (tree val)
6495 if (TREE_CODE (val) == SSA_NAME
6496 && !has_single_use (val))
6497 return NULL_TREE;
6498 return val;
6501 /* Valueization callback that follows all SSA edges. */
6503 tree
6504 follow_all_ssa_edges (tree val)
6506 return val;
6509 /* Fold the statement pointed to by GSI. In some cases, this function may
6510 replace the whole statement with a new one. Returns true iff folding
6511 makes any changes.
6512 The statement pointed to by GSI should be in valid gimple form but may
6513 be in unfolded state as resulting from for example constant propagation
6514 which can produce *&x = 0. */
6516 bool
6517 fold_stmt (gimple_stmt_iterator *gsi)
6519 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
6522 bool
6523 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
6525 return fold_stmt_1 (gsi, false, valueize);
6528 /* Perform the minimal folding on statement *GSI. Only operations like
6529 *&x created by constant propagation are handled. The statement cannot
6530 be replaced with a new one. Return true if the statement was
6531 changed, false otherwise.
6532 The statement *GSI should be in valid gimple form but may
6533 be in unfolded state as resulting from for example constant propagation
6534 which can produce *&x = 0. */
6536 bool
6537 fold_stmt_inplace (gimple_stmt_iterator *gsi)
6539 gimple *stmt = gsi_stmt (*gsi);
6540 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
6541 gcc_assert (gsi_stmt (*gsi) == stmt);
6542 return changed;
6545 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6546 if EXPR is null or we don't know how.
6547 If non-null, the result always has boolean type. */
6549 static tree
6550 canonicalize_bool (tree expr, bool invert)
6552 if (!expr)
6553 return NULL_TREE;
6554 else if (invert)
6556 if (integer_nonzerop (expr))
6557 return boolean_false_node;
6558 else if (integer_zerop (expr))
6559 return boolean_true_node;
6560 else if (TREE_CODE (expr) == SSA_NAME)
6561 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
6562 build_int_cst (TREE_TYPE (expr), 0));
6563 else if (COMPARISON_CLASS_P (expr))
6564 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
6565 boolean_type_node,
6566 TREE_OPERAND (expr, 0),
6567 TREE_OPERAND (expr, 1));
6568 else
6569 return NULL_TREE;
6571 else
6573 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6574 return expr;
6575 if (integer_nonzerop (expr))
6576 return boolean_true_node;
6577 else if (integer_zerop (expr))
6578 return boolean_false_node;
6579 else if (TREE_CODE (expr) == SSA_NAME)
6580 return fold_build2 (NE_EXPR, boolean_type_node, expr,
6581 build_int_cst (TREE_TYPE (expr), 0));
6582 else if (COMPARISON_CLASS_P (expr))
6583 return fold_build2 (TREE_CODE (expr),
6584 boolean_type_node,
6585 TREE_OPERAND (expr, 0),
6586 TREE_OPERAND (expr, 1));
6587 else
6588 return NULL_TREE;
6592 /* Check to see if a boolean expression EXPR is logically equivalent to the
6593 comparison (OP1 CODE OP2). Check for various identities involving
6594 SSA_NAMEs. */
6596 static bool
6597 same_bool_comparison_p (const_tree expr, enum tree_code code,
6598 const_tree op1, const_tree op2)
6600 gimple *s;
6602 /* The obvious case. */
6603 if (TREE_CODE (expr) == code
6604 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
6605 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
6606 return true;
6608 /* Check for comparing (name, name != 0) and the case where expr
6609 is an SSA_NAME with a definition matching the comparison. */
6610 if (TREE_CODE (expr) == SSA_NAME
6611 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6613 if (operand_equal_p (expr, op1, 0))
6614 return ((code == NE_EXPR && integer_zerop (op2))
6615 || (code == EQ_EXPR && integer_nonzerop (op2)));
6616 s = SSA_NAME_DEF_STMT (expr);
6617 if (is_gimple_assign (s)
6618 && gimple_assign_rhs_code (s) == code
6619 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
6620 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
6621 return true;
6624 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6625 of name is a comparison, recurse. */
6626 if (TREE_CODE (op1) == SSA_NAME
6627 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
6629 s = SSA_NAME_DEF_STMT (op1);
6630 if (is_gimple_assign (s)
6631 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
6633 enum tree_code c = gimple_assign_rhs_code (s);
6634 if ((c == NE_EXPR && integer_zerop (op2))
6635 || (c == EQ_EXPR && integer_nonzerop (op2)))
6636 return same_bool_comparison_p (expr, c,
6637 gimple_assign_rhs1 (s),
6638 gimple_assign_rhs2 (s));
6639 if ((c == EQ_EXPR && integer_zerop (op2))
6640 || (c == NE_EXPR && integer_nonzerop (op2)))
6641 return same_bool_comparison_p (expr,
6642 invert_tree_comparison (c, false),
6643 gimple_assign_rhs1 (s),
6644 gimple_assign_rhs2 (s));
6647 return false;
6650 /* Check to see if two boolean expressions OP1 and OP2 are logically
6651 equivalent. */
6653 static bool
6654 same_bool_result_p (const_tree op1, const_tree op2)
6656 /* Simple cases first. */
6657 if (operand_equal_p (op1, op2, 0))
6658 return true;
6660 /* Check the cases where at least one of the operands is a comparison.
6661 These are a bit smarter than operand_equal_p in that they apply some
6662 identifies on SSA_NAMEs. */
6663 if (COMPARISON_CLASS_P (op2)
6664 && same_bool_comparison_p (op1, TREE_CODE (op2),
6665 TREE_OPERAND (op2, 0),
6666 TREE_OPERAND (op2, 1)))
6667 return true;
6668 if (COMPARISON_CLASS_P (op1)
6669 && same_bool_comparison_p (op2, TREE_CODE (op1),
6670 TREE_OPERAND (op1, 0),
6671 TREE_OPERAND (op1, 1)))
6672 return true;
6674 /* Default case. */
6675 return false;
6678 /* Forward declarations for some mutually recursive functions. */
6680 static tree
6681 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6682 enum tree_code code2, tree op2a, tree op2b, basic_block);
6683 static tree
6684 and_var_with_comparison (tree type, tree var, bool invert,
6685 enum tree_code code2, tree op2a, tree op2b,
6686 basic_block);
6687 static tree
6688 and_var_with_comparison_1 (tree type, gimple *stmt,
6689 enum tree_code code2, tree op2a, tree op2b,
6690 basic_block);
6691 static tree
6692 or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
6693 enum tree_code code2, tree op2a, tree op2b,
6694 basic_block);
6695 static tree
6696 or_var_with_comparison (tree, tree var, bool invert,
6697 enum tree_code code2, tree op2a, tree op2b,
6698 basic_block);
6699 static tree
6700 or_var_with_comparison_1 (tree, gimple *stmt,
6701 enum tree_code code2, tree op2a, tree op2b,
6702 basic_block);
6704 /* Helper function for and_comparisons_1: try to simplify the AND of the
6705 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6706 If INVERT is true, invert the value of the VAR before doing the AND.
6707 Return NULL_EXPR if we can't simplify this to a single expression. */
6709 static tree
6710 and_var_with_comparison (tree type, tree var, bool invert,
6711 enum tree_code code2, tree op2a, tree op2b,
6712 basic_block outer_cond_bb)
6714 tree t;
6715 gimple *stmt = SSA_NAME_DEF_STMT (var);
6717 /* We can only deal with variables whose definitions are assignments. */
6718 if (!is_gimple_assign (stmt))
6719 return NULL_TREE;
6721 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6722 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6723 Then we only have to consider the simpler non-inverted cases. */
6724 if (invert)
6725 t = or_var_with_comparison_1 (type, stmt,
6726 invert_tree_comparison (code2, false),
6727 op2a, op2b, outer_cond_bb);
6728 else
6729 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b,
6730 outer_cond_bb);
6731 return canonicalize_bool (t, invert);
6734 /* Try to simplify the AND of the ssa variable defined by the assignment
6735 STMT with the comparison specified by (OP2A CODE2 OP2B).
6736 Return NULL_EXPR if we can't simplify this to a single expression. */
6738 static tree
6739 and_var_with_comparison_1 (tree type, gimple *stmt,
6740 enum tree_code code2, tree op2a, tree op2b,
6741 basic_block outer_cond_bb)
6743 tree var = gimple_assign_lhs (stmt);
6744 tree true_test_var = NULL_TREE;
6745 tree false_test_var = NULL_TREE;
6746 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6748 /* Check for identities like (var AND (var == 0)) => false. */
6749 if (TREE_CODE (op2a) == SSA_NAME
6750 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6752 if ((code2 == NE_EXPR && integer_zerop (op2b))
6753 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6755 true_test_var = op2a;
6756 if (var == true_test_var)
6757 return var;
6759 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6760 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6762 false_test_var = op2a;
6763 if (var == false_test_var)
6764 return boolean_false_node;
6768 /* If the definition is a comparison, recurse on it. */
6769 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6771 tree t = and_comparisons_1 (type, innercode,
6772 gimple_assign_rhs1 (stmt),
6773 gimple_assign_rhs2 (stmt),
6774 code2,
6775 op2a,
6776 op2b, outer_cond_bb);
6777 if (t)
6778 return t;
6781 /* If the definition is an AND or OR expression, we may be able to
6782 simplify by reassociating. */
6783 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6784 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
6786 tree inner1 = gimple_assign_rhs1 (stmt);
6787 tree inner2 = gimple_assign_rhs2 (stmt);
6788 gimple *s;
6789 tree t;
6790 tree partial = NULL_TREE;
6791 bool is_and = (innercode == BIT_AND_EXPR);
6793 /* Check for boolean identities that don't require recursive examination
6794 of inner1/inner2:
6795 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6796 inner1 AND (inner1 OR inner2) => inner1
6797 !inner1 AND (inner1 AND inner2) => false
6798 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6799 Likewise for similar cases involving inner2. */
6800 if (inner1 == true_test_var)
6801 return (is_and ? var : inner1);
6802 else if (inner2 == true_test_var)
6803 return (is_and ? var : inner2);
6804 else if (inner1 == false_test_var)
6805 return (is_and
6806 ? boolean_false_node
6807 : and_var_with_comparison (type, inner2, false, code2, op2a,
6808 op2b, outer_cond_bb));
6809 else if (inner2 == false_test_var)
6810 return (is_and
6811 ? boolean_false_node
6812 : and_var_with_comparison (type, inner1, false, code2, op2a,
6813 op2b, outer_cond_bb));
6815 /* Next, redistribute/reassociate the AND across the inner tests.
6816 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6817 if (TREE_CODE (inner1) == SSA_NAME
6818 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6819 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6820 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6821 gimple_assign_rhs1 (s),
6822 gimple_assign_rhs2 (s),
6823 code2, op2a, op2b,
6824 outer_cond_bb)))
6826 /* Handle the AND case, where we are reassociating:
6827 (inner1 AND inner2) AND (op2a code2 op2b)
6828 => (t AND inner2)
6829 If the partial result t is a constant, we win. Otherwise
6830 continue on to try reassociating with the other inner test. */
6831 if (is_and)
6833 if (integer_onep (t))
6834 return inner2;
6835 else if (integer_zerop (t))
6836 return boolean_false_node;
6839 /* Handle the OR case, where we are redistributing:
6840 (inner1 OR inner2) AND (op2a code2 op2b)
6841 => (t OR (inner2 AND (op2a code2 op2b))) */
6842 else if (integer_onep (t))
6843 return boolean_true_node;
6845 /* Save partial result for later. */
6846 partial = t;
6849 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6850 if (TREE_CODE (inner2) == SSA_NAME
6851 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6852 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6853 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6854 gimple_assign_rhs1 (s),
6855 gimple_assign_rhs2 (s),
6856 code2, op2a, op2b,
6857 outer_cond_bb)))
6859 /* Handle the AND case, where we are reassociating:
6860 (inner1 AND inner2) AND (op2a code2 op2b)
6861 => (inner1 AND t) */
6862 if (is_and)
6864 if (integer_onep (t))
6865 return inner1;
6866 else if (integer_zerop (t))
6867 return boolean_false_node;
6868 /* If both are the same, we can apply the identity
6869 (x AND x) == x. */
6870 else if (partial && same_bool_result_p (t, partial))
6871 return t;
6874 /* Handle the OR case. where we are redistributing:
6875 (inner1 OR inner2) AND (op2a code2 op2b)
6876 => (t OR (inner1 AND (op2a code2 op2b)))
6877 => (t OR partial) */
6878 else
6880 if (integer_onep (t))
6881 return boolean_true_node;
6882 else if (partial)
6884 /* We already got a simplification for the other
6885 operand to the redistributed OR expression. The
6886 interesting case is when at least one is false.
6887 Or, if both are the same, we can apply the identity
6888 (x OR x) == x. */
6889 if (integer_zerop (partial))
6890 return t;
6891 else if (integer_zerop (t))
6892 return partial;
6893 else if (same_bool_result_p (t, partial))
6894 return t;
6899 return NULL_TREE;
6902 /* Try to simplify the AND of two comparisons defined by
6903 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6904 If this can be done without constructing an intermediate value,
6905 return the resulting tree; otherwise NULL_TREE is returned.
6906 This function is deliberately asymmetric as it recurses on SSA_DEFs
6907 in the first comparison but not the second. */
6909 static tree
6910 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6911 enum tree_code code2, tree op2a, tree op2b,
6912 basic_block outer_cond_bb)
6914 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6916 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6917 if (operand_equal_p (op1a, op2a, 0)
6918 && operand_equal_p (op1b, op2b, 0))
6920 /* Result will be either NULL_TREE, or a combined comparison. */
6921 tree t = combine_comparisons (UNKNOWN_LOCATION,
6922 TRUTH_ANDIF_EXPR, code1, code2,
6923 truth_type, op1a, op1b);
6924 if (t)
6925 return t;
6928 /* Likewise the swapped case of the above. */
6929 if (operand_equal_p (op1a, op2b, 0)
6930 && operand_equal_p (op1b, op2a, 0))
6932 /* Result will be either NULL_TREE, or a combined comparison. */
6933 tree t = combine_comparisons (UNKNOWN_LOCATION,
6934 TRUTH_ANDIF_EXPR, code1,
6935 swap_tree_comparison (code2),
6936 truth_type, op1a, op1b);
6937 if (t)
6938 return t;
6941 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6942 NAME's definition is a truth value. See if there are any simplifications
6943 that can be done against the NAME's definition. */
6944 if (TREE_CODE (op1a) == SSA_NAME
6945 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6946 && (integer_zerop (op1b) || integer_onep (op1b)))
6948 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6949 || (code1 == NE_EXPR && integer_onep (op1b)));
6950 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6951 switch (gimple_code (stmt))
6953 case GIMPLE_ASSIGN:
6954 /* Try to simplify by copy-propagating the definition. */
6955 return and_var_with_comparison (type, op1a, invert, code2, op2a,
6956 op2b, outer_cond_bb);
6958 case GIMPLE_PHI:
6959 /* If every argument to the PHI produces the same result when
6960 ANDed with the second comparison, we win.
6961 Do not do this unless the type is bool since we need a bool
6962 result here anyway. */
6963 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6965 tree result = NULL_TREE;
6966 unsigned i;
6967 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6969 tree arg = gimple_phi_arg_def (stmt, i);
6971 /* If this PHI has itself as an argument, ignore it.
6972 If all the other args produce the same result,
6973 we're still OK. */
6974 if (arg == gimple_phi_result (stmt))
6975 continue;
6976 else if (TREE_CODE (arg) == INTEGER_CST)
6978 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
6980 if (!result)
6981 result = boolean_false_node;
6982 else if (!integer_zerop (result))
6983 return NULL_TREE;
6985 else if (!result)
6986 result = fold_build2 (code2, boolean_type_node,
6987 op2a, op2b);
6988 else if (!same_bool_comparison_p (result,
6989 code2, op2a, op2b))
6990 return NULL_TREE;
6992 else if (TREE_CODE (arg) == SSA_NAME
6993 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6995 tree temp;
6996 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6997 /* In simple cases we can look through PHI nodes,
6998 but we have to be careful with loops.
6999 See PR49073. */
7000 if (! dom_info_available_p (CDI_DOMINATORS)
7001 || gimple_bb (def_stmt) == gimple_bb (stmt)
7002 || dominated_by_p (CDI_DOMINATORS,
7003 gimple_bb (def_stmt),
7004 gimple_bb (stmt)))
7005 return NULL_TREE;
7006 temp = and_var_with_comparison (type, arg, invert, code2,
7007 op2a, op2b,
7008 outer_cond_bb);
7009 if (!temp)
7010 return NULL_TREE;
7011 else if (!result)
7012 result = temp;
7013 else if (!same_bool_result_p (result, temp))
7014 return NULL_TREE;
7016 else
7017 return NULL_TREE;
7019 return result;
7022 default:
7023 break;
7026 return NULL_TREE;
7029 static basic_block fosa_bb;
7030 static vec<std::pair<tree, flow_sensitive_info_storage> > *fosa_unwind;
7031 static tree
7032 follow_outer_ssa_edges (tree val)
7034 if (TREE_CODE (val) == SSA_NAME
7035 && !SSA_NAME_IS_DEFAULT_DEF (val))
7037 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (val));
7038 if (!def_bb
7039 || def_bb == fosa_bb
7040 || (dom_info_available_p (CDI_DOMINATORS)
7041 && (def_bb == fosa_bb
7042 || dominated_by_p (CDI_DOMINATORS, fosa_bb, def_bb))))
7043 return val;
7044 /* We cannot temporarily rewrite stmts with undefined overflow
7045 behavior, so avoid expanding them. */
7046 if ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (val))
7047 || POINTER_TYPE_P (TREE_TYPE (val)))
7048 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (val)))
7049 return NULL_TREE;
7050 flow_sensitive_info_storage storage;
7051 storage.save_and_clear (val);
7052 /* If the definition does not dominate fosa_bb temporarily reset
7053 flow-sensitive info. */
7054 fosa_unwind->safe_push (std::make_pair (val, storage));
7055 return val;
7057 return val;
7060 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
7061 : try to simplify the AND/OR of the ssa variable VAR with the comparison
7062 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
7063 simplify this to a single expression. As we are going to lower the cost
7064 of building SSA names / gimple stmts significantly, we need to allocate
7065 them ont the stack. This will cause the code to be a bit ugly. */
7067 static tree
7068 maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
7069 enum tree_code code1,
7070 tree op1a, tree op1b,
7071 enum tree_code code2, tree op2a,
7072 tree op2b,
7073 basic_block outer_cond_bb)
7075 /* Allocate gimple stmt1 on the stack. */
7076 gassign *stmt1
7077 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
7078 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
7079 gimple_assign_set_rhs_code (stmt1, code1);
7080 gimple_assign_set_rhs1 (stmt1, op1a);
7081 gimple_assign_set_rhs2 (stmt1, op1b);
7082 gimple_set_bb (stmt1, NULL);
7084 /* Allocate gimple stmt2 on the stack. */
7085 gassign *stmt2
7086 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
7087 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
7088 gimple_assign_set_rhs_code (stmt2, code2);
7089 gimple_assign_set_rhs1 (stmt2, op2a);
7090 gimple_assign_set_rhs2 (stmt2, op2b);
7091 gimple_set_bb (stmt2, NULL);
7093 /* Allocate SSA names(lhs1) on the stack. */
7094 alignas (tree_node) unsigned char lhs1buf[sizeof (tree_ssa_name)];
7095 tree lhs1 = (tree) &lhs1buf[0];
7096 memset (lhs1, 0, sizeof (tree_ssa_name));
7097 TREE_SET_CODE (lhs1, SSA_NAME);
7098 TREE_TYPE (lhs1) = type;
7099 init_ssa_name_imm_use (lhs1);
7101 /* Allocate SSA names(lhs2) on the stack. */
7102 alignas (tree_node) unsigned char lhs2buf[sizeof (tree_ssa_name)];
7103 tree lhs2 = (tree) &lhs2buf[0];
7104 memset (lhs2, 0, sizeof (tree_ssa_name));
7105 TREE_SET_CODE (lhs2, SSA_NAME);
7106 TREE_TYPE (lhs2) = type;
7107 init_ssa_name_imm_use (lhs2);
7109 gimple_assign_set_lhs (stmt1, lhs1);
7110 gimple_assign_set_lhs (stmt2, lhs2);
7112 gimple_match_op op (gimple_match_cond::UNCOND, code,
7113 type, gimple_assign_lhs (stmt1),
7114 gimple_assign_lhs (stmt2));
7115 fosa_bb = outer_cond_bb;
7116 auto_vec<std::pair<tree, flow_sensitive_info_storage>, 8> unwind_stack;
7117 fosa_unwind = &unwind_stack;
7118 if (op.resimplify (NULL, (!outer_cond_bb
7119 ? follow_all_ssa_edges : follow_outer_ssa_edges)))
7121 fosa_unwind = NULL;
7122 for (auto p : unwind_stack)
7123 p.second.restore (p.first);
7124 if (gimple_simplified_result_is_gimple_val (&op))
7126 tree res = op.ops[0];
7127 if (res == lhs1)
7128 return build2 (code1, type, op1a, op1b);
7129 else if (res == lhs2)
7130 return build2 (code2, type, op2a, op2b);
7131 else
7132 return res;
7134 else if (op.code.is_tree_code ()
7135 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
7137 tree op0 = op.ops[0];
7138 tree op1 = op.ops[1];
7139 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
7140 return NULL_TREE; /* not simple */
7142 return build2 ((enum tree_code)op.code, op.type, op0, op1);
7145 fosa_unwind = NULL;
7146 for (auto p : unwind_stack)
7147 p.second.restore (p.first);
7149 return NULL_TREE;
7152 /* Try to simplify the AND of two comparisons, specified by
7153 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7154 If this can be simplified to a single expression (without requiring
7155 introducing more SSA variables to hold intermediate values),
7156 return the resulting tree. Otherwise return NULL_TREE.
7157 If the result expression is non-null, it has boolean type. */
7159 tree
7160 maybe_fold_and_comparisons (tree type,
7161 enum tree_code code1, tree op1a, tree op1b,
7162 enum tree_code code2, tree op2a, tree op2b,
7163 basic_block outer_cond_bb)
7165 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b,
7166 outer_cond_bb))
7167 return t;
7169 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b,
7170 outer_cond_bb))
7171 return t;
7173 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
7174 op1a, op1b, code2, op2a,
7175 op2b, outer_cond_bb))
7176 return t;
7178 return NULL_TREE;
7181 /* Helper function for or_comparisons_1: try to simplify the OR of the
7182 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
7183 If INVERT is true, invert the value of VAR before doing the OR.
7184 Return NULL_EXPR if we can't simplify this to a single expression. */
7186 static tree
7187 or_var_with_comparison (tree type, tree var, bool invert,
7188 enum tree_code code2, tree op2a, tree op2b,
7189 basic_block outer_cond_bb)
7191 tree t;
7192 gimple *stmt = SSA_NAME_DEF_STMT (var);
7194 /* We can only deal with variables whose definitions are assignments. */
7195 if (!is_gimple_assign (stmt))
7196 return NULL_TREE;
7198 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
7199 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
7200 Then we only have to consider the simpler non-inverted cases. */
7201 if (invert)
7202 t = and_var_with_comparison_1 (type, stmt,
7203 invert_tree_comparison (code2, false),
7204 op2a, op2b, outer_cond_bb);
7205 else
7206 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b,
7207 outer_cond_bb);
7208 return canonicalize_bool (t, invert);
7211 /* Try to simplify the OR of the ssa variable defined by the assignment
7212 STMT with the comparison specified by (OP2A CODE2 OP2B).
7213 Return NULL_EXPR if we can't simplify this to a single expression. */
7215 static tree
7216 or_var_with_comparison_1 (tree type, gimple *stmt,
7217 enum tree_code code2, tree op2a, tree op2b,
7218 basic_block outer_cond_bb)
7220 tree var = gimple_assign_lhs (stmt);
7221 tree true_test_var = NULL_TREE;
7222 tree false_test_var = NULL_TREE;
7223 enum tree_code innercode = gimple_assign_rhs_code (stmt);
7225 /* Check for identities like (var OR (var != 0)) => true . */
7226 if (TREE_CODE (op2a) == SSA_NAME
7227 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
7229 if ((code2 == NE_EXPR && integer_zerop (op2b))
7230 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
7232 true_test_var = op2a;
7233 if (var == true_test_var)
7234 return var;
7236 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
7237 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
7239 false_test_var = op2a;
7240 if (var == false_test_var)
7241 return boolean_true_node;
7245 /* If the definition is a comparison, recurse on it. */
7246 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
7248 tree t = or_comparisons_1 (type, innercode,
7249 gimple_assign_rhs1 (stmt),
7250 gimple_assign_rhs2 (stmt),
7251 code2, op2a, op2b, outer_cond_bb);
7252 if (t)
7253 return t;
7256 /* If the definition is an AND or OR expression, we may be able to
7257 simplify by reassociating. */
7258 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
7259 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
7261 tree inner1 = gimple_assign_rhs1 (stmt);
7262 tree inner2 = gimple_assign_rhs2 (stmt);
7263 gimple *s;
7264 tree t;
7265 tree partial = NULL_TREE;
7266 bool is_or = (innercode == BIT_IOR_EXPR);
7268 /* Check for boolean identities that don't require recursive examination
7269 of inner1/inner2:
7270 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7271 inner1 OR (inner1 AND inner2) => inner1
7272 !inner1 OR (inner1 OR inner2) => true
7273 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7275 if (inner1 == true_test_var)
7276 return (is_or ? var : inner1);
7277 else if (inner2 == true_test_var)
7278 return (is_or ? var : inner2);
7279 else if (inner1 == false_test_var)
7280 return (is_or
7281 ? boolean_true_node
7282 : or_var_with_comparison (type, inner2, false, code2, op2a,
7283 op2b, outer_cond_bb));
7284 else if (inner2 == false_test_var)
7285 return (is_or
7286 ? boolean_true_node
7287 : or_var_with_comparison (type, inner1, false, code2, op2a,
7288 op2b, outer_cond_bb));
7290 /* Next, redistribute/reassociate the OR across the inner tests.
7291 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7292 if (TREE_CODE (inner1) == SSA_NAME
7293 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
7294 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7295 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7296 gimple_assign_rhs1 (s),
7297 gimple_assign_rhs2 (s),
7298 code2, op2a, op2b,
7299 outer_cond_bb)))
7301 /* Handle the OR case, where we are reassociating:
7302 (inner1 OR inner2) OR (op2a code2 op2b)
7303 => (t OR inner2)
7304 If the partial result t is a constant, we win. Otherwise
7305 continue on to try reassociating with the other inner test. */
7306 if (is_or)
7308 if (integer_onep (t))
7309 return boolean_true_node;
7310 else if (integer_zerop (t))
7311 return inner2;
7314 /* Handle the AND case, where we are redistributing:
7315 (inner1 AND inner2) OR (op2a code2 op2b)
7316 => (t AND (inner2 OR (op2a code op2b))) */
7317 else if (integer_zerop (t))
7318 return boolean_false_node;
7320 /* Save partial result for later. */
7321 partial = t;
7324 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7325 if (TREE_CODE (inner2) == SSA_NAME
7326 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
7327 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7328 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7329 gimple_assign_rhs1 (s),
7330 gimple_assign_rhs2 (s),
7331 code2, op2a, op2b,
7332 outer_cond_bb)))
7334 /* Handle the OR case, where we are reassociating:
7335 (inner1 OR inner2) OR (op2a code2 op2b)
7336 => (inner1 OR t)
7337 => (t OR partial) */
7338 if (is_or)
7340 if (integer_zerop (t))
7341 return inner1;
7342 else if (integer_onep (t))
7343 return boolean_true_node;
7344 /* If both are the same, we can apply the identity
7345 (x OR x) == x. */
7346 else if (partial && same_bool_result_p (t, partial))
7347 return t;
7350 /* Handle the AND case, where we are redistributing:
7351 (inner1 AND inner2) OR (op2a code2 op2b)
7352 => (t AND (inner1 OR (op2a code2 op2b)))
7353 => (t AND partial) */
7354 else
7356 if (integer_zerop (t))
7357 return boolean_false_node;
7358 else if (partial)
7360 /* We already got a simplification for the other
7361 operand to the redistributed AND expression. The
7362 interesting case is when at least one is true.
7363 Or, if both are the same, we can apply the identity
7364 (x AND x) == x. */
7365 if (integer_onep (partial))
7366 return t;
7367 else if (integer_onep (t))
7368 return partial;
7369 else if (same_bool_result_p (t, partial))
7370 return t;
7375 return NULL_TREE;
7378 /* Try to simplify the OR of two comparisons defined by
7379 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7380 If this can be done without constructing an intermediate value,
7381 return the resulting tree; otherwise NULL_TREE is returned.
7382 This function is deliberately asymmetric as it recurses on SSA_DEFs
7383 in the first comparison but not the second. */
7385 static tree
7386 or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
7387 enum tree_code code2, tree op2a, tree op2b,
7388 basic_block outer_cond_bb)
7390 tree truth_type = truth_type_for (TREE_TYPE (op1a));
7392 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7393 if (operand_equal_p (op1a, op2a, 0)
7394 && operand_equal_p (op1b, op2b, 0))
7396 /* Result will be either NULL_TREE, or a combined comparison. */
7397 tree t = combine_comparisons (UNKNOWN_LOCATION,
7398 TRUTH_ORIF_EXPR, code1, code2,
7399 truth_type, op1a, op1b);
7400 if (t)
7401 return t;
7404 /* Likewise the swapped case of the above. */
7405 if (operand_equal_p (op1a, op2b, 0)
7406 && operand_equal_p (op1b, op2a, 0))
7408 /* Result will be either NULL_TREE, or a combined comparison. */
7409 tree t = combine_comparisons (UNKNOWN_LOCATION,
7410 TRUTH_ORIF_EXPR, code1,
7411 swap_tree_comparison (code2),
7412 truth_type, op1a, op1b);
7413 if (t)
7414 return t;
7417 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7418 NAME's definition is a truth value. See if there are any simplifications
7419 that can be done against the NAME's definition. */
7420 if (TREE_CODE (op1a) == SSA_NAME
7421 && (code1 == NE_EXPR || code1 == EQ_EXPR)
7422 && (integer_zerop (op1b) || integer_onep (op1b)))
7424 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7425 || (code1 == NE_EXPR && integer_onep (op1b)));
7426 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
7427 switch (gimple_code (stmt))
7429 case GIMPLE_ASSIGN:
7430 /* Try to simplify by copy-propagating the definition. */
7431 return or_var_with_comparison (type, op1a, invert, code2, op2a,
7432 op2b, outer_cond_bb);
7434 case GIMPLE_PHI:
7435 /* If every argument to the PHI produces the same result when
7436 ORed with the second comparison, we win.
7437 Do not do this unless the type is bool since we need a bool
7438 result here anyway. */
7439 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7441 tree result = NULL_TREE;
7442 unsigned i;
7443 for (i = 0; i < gimple_phi_num_args (stmt); i++)
7445 tree arg = gimple_phi_arg_def (stmt, i);
7447 /* If this PHI has itself as an argument, ignore it.
7448 If all the other args produce the same result,
7449 we're still OK. */
7450 if (arg == gimple_phi_result (stmt))
7451 continue;
7452 else if (TREE_CODE (arg) == INTEGER_CST)
7454 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
7456 if (!result)
7457 result = boolean_true_node;
7458 else if (!integer_onep (result))
7459 return NULL_TREE;
7461 else if (!result)
7462 result = fold_build2 (code2, boolean_type_node,
7463 op2a, op2b);
7464 else if (!same_bool_comparison_p (result,
7465 code2, op2a, op2b))
7466 return NULL_TREE;
7468 else if (TREE_CODE (arg) == SSA_NAME
7469 && !SSA_NAME_IS_DEFAULT_DEF (arg))
7471 tree temp;
7472 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
7473 /* In simple cases we can look through PHI nodes,
7474 but we have to be careful with loops.
7475 See PR49073. */
7476 if (! dom_info_available_p (CDI_DOMINATORS)
7477 || gimple_bb (def_stmt) == gimple_bb (stmt)
7478 || dominated_by_p (CDI_DOMINATORS,
7479 gimple_bb (def_stmt),
7480 gimple_bb (stmt)))
7481 return NULL_TREE;
7482 temp = or_var_with_comparison (type, arg, invert, code2,
7483 op2a, op2b, outer_cond_bb);
7484 if (!temp)
7485 return NULL_TREE;
7486 else if (!result)
7487 result = temp;
7488 else if (!same_bool_result_p (result, temp))
7489 return NULL_TREE;
7491 else
7492 return NULL_TREE;
7494 return result;
7497 default:
7498 break;
7501 return NULL_TREE;
7504 /* Try to simplify the OR of two comparisons, specified by
7505 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7506 If this can be simplified to a single expression (without requiring
7507 introducing more SSA variables to hold intermediate values),
7508 return the resulting tree. Otherwise return NULL_TREE.
7509 If the result expression is non-null, it has boolean type. */
7511 tree
7512 maybe_fold_or_comparisons (tree type,
7513 enum tree_code code1, tree op1a, tree op1b,
7514 enum tree_code code2, tree op2a, tree op2b,
7515 basic_block outer_cond_bb)
7517 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b,
7518 outer_cond_bb))
7519 return t;
7521 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b,
7522 outer_cond_bb))
7523 return t;
7525 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
7526 op1a, op1b, code2, op2a,
7527 op2b, outer_cond_bb))
7528 return t;
7530 return NULL_TREE;
7533 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7535 Either NULL_TREE, a simplified but non-constant or a constant
7536 is returned.
7538 ??? This should go into a gimple-fold-inline.h file to be eventually
7539 privatized with the single valueize function used in the various TUs
7540 to avoid the indirect function call overhead. */
7542 tree
7543 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
7544 tree (*gvalueize) (tree))
7546 gimple_match_op res_op;
7547 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7548 edges if there are intermediate VARYING defs. For this reason
7549 do not follow SSA edges here even though SCCVN can technically
7550 just deal fine with that. */
7551 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
7553 tree res = NULL_TREE;
7554 if (gimple_simplified_result_is_gimple_val (&res_op))
7555 res = res_op.ops[0];
7556 else if (mprts_hook)
7557 res = mprts_hook (&res_op);
7558 if (res)
7560 if (dump_file && dump_flags & TDF_DETAILS)
7562 fprintf (dump_file, "Match-and-simplified ");
7563 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
7564 fprintf (dump_file, " to ");
7565 print_generic_expr (dump_file, res);
7566 fprintf (dump_file, "\n");
7568 return res;
7572 location_t loc = gimple_location (stmt);
7573 switch (gimple_code (stmt))
7575 case GIMPLE_ASSIGN:
7577 enum tree_code subcode = gimple_assign_rhs_code (stmt);
7579 switch (get_gimple_rhs_class (subcode))
7581 case GIMPLE_SINGLE_RHS:
7583 tree rhs = gimple_assign_rhs1 (stmt);
7584 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
7586 if (TREE_CODE (rhs) == SSA_NAME)
7588 /* If the RHS is an SSA_NAME, return its known constant value,
7589 if any. */
7590 return (*valueize) (rhs);
7592 /* Handle propagating invariant addresses into address
7593 operations. */
7594 else if (TREE_CODE (rhs) == ADDR_EXPR
7595 && !is_gimple_min_invariant (rhs))
7597 poly_int64 offset = 0;
7598 tree base;
7599 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
7600 &offset,
7601 valueize);
7602 if (base
7603 && (CONSTANT_CLASS_P (base)
7604 || decl_address_invariant_p (base)))
7605 return build_invariant_address (TREE_TYPE (rhs),
7606 base, offset);
7608 else if (TREE_CODE (rhs) == CONSTRUCTOR
7609 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
7610 && known_eq (CONSTRUCTOR_NELTS (rhs),
7611 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
7613 unsigned i, nelts;
7614 tree val;
7616 nelts = CONSTRUCTOR_NELTS (rhs);
7617 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
7618 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7620 val = (*valueize) (val);
7621 if (TREE_CODE (val) == INTEGER_CST
7622 || TREE_CODE (val) == REAL_CST
7623 || TREE_CODE (val) == FIXED_CST)
7624 vec.quick_push (val);
7625 else
7626 return NULL_TREE;
7629 return vec.build ();
7631 if (subcode == OBJ_TYPE_REF)
7633 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
7634 /* If callee is constant, we can fold away the wrapper. */
7635 if (is_gimple_min_invariant (val))
7636 return val;
7639 if (kind == tcc_reference)
7641 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
7642 || TREE_CODE (rhs) == REALPART_EXPR
7643 || TREE_CODE (rhs) == IMAGPART_EXPR)
7644 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7646 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7647 return fold_unary_loc (EXPR_LOCATION (rhs),
7648 TREE_CODE (rhs),
7649 TREE_TYPE (rhs), val);
7651 else if (TREE_CODE (rhs) == BIT_FIELD_REF
7652 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7654 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7655 return fold_ternary_loc (EXPR_LOCATION (rhs),
7656 TREE_CODE (rhs),
7657 TREE_TYPE (rhs), val,
7658 TREE_OPERAND (rhs, 1),
7659 TREE_OPERAND (rhs, 2));
7661 else if (TREE_CODE (rhs) == MEM_REF
7662 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7664 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7665 if (TREE_CODE (val) == ADDR_EXPR
7666 && is_gimple_min_invariant (val))
7668 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
7669 unshare_expr (val),
7670 TREE_OPERAND (rhs, 1));
7671 if (tem)
7672 rhs = tem;
7675 return fold_const_aggregate_ref_1 (rhs, valueize);
7677 else if (kind == tcc_declaration)
7678 return get_symbol_constant_value (rhs);
7679 return rhs;
7682 case GIMPLE_UNARY_RHS:
7683 return NULL_TREE;
7685 case GIMPLE_BINARY_RHS:
7686 /* Translate &x + CST into an invariant form suitable for
7687 further propagation. */
7688 if (subcode == POINTER_PLUS_EXPR)
7690 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7691 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7692 if (TREE_CODE (op0) == ADDR_EXPR
7693 && TREE_CODE (op1) == INTEGER_CST)
7695 tree off = fold_convert (ptr_type_node, op1);
7696 return build1_loc
7697 (loc, ADDR_EXPR, TREE_TYPE (op0),
7698 fold_build2 (MEM_REF,
7699 TREE_TYPE (TREE_TYPE (op0)),
7700 unshare_expr (op0), off));
7703 /* Canonicalize bool != 0 and bool == 0 appearing after
7704 valueization. While gimple_simplify handles this
7705 it can get confused by the ~X == 1 -> X == 0 transform
7706 which we cant reduce to a SSA name or a constant
7707 (and we have no way to tell gimple_simplify to not
7708 consider those transforms in the first place). */
7709 else if (subcode == EQ_EXPR
7710 || subcode == NE_EXPR)
7712 tree lhs = gimple_assign_lhs (stmt);
7713 tree op0 = gimple_assign_rhs1 (stmt);
7714 if (useless_type_conversion_p (TREE_TYPE (lhs),
7715 TREE_TYPE (op0)))
7717 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7718 op0 = (*valueize) (op0);
7719 if (TREE_CODE (op0) == INTEGER_CST)
7720 std::swap (op0, op1);
7721 if (TREE_CODE (op1) == INTEGER_CST
7722 && ((subcode == NE_EXPR && integer_zerop (op1))
7723 || (subcode == EQ_EXPR && integer_onep (op1))))
7724 return op0;
7727 return NULL_TREE;
7729 case GIMPLE_TERNARY_RHS:
7731 /* Handle ternary operators that can appear in GIMPLE form. */
7732 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7733 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7734 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
7735 return fold_ternary_loc (loc, subcode,
7736 TREE_TYPE (gimple_assign_lhs (stmt)),
7737 op0, op1, op2);
7740 default:
7741 gcc_unreachable ();
7745 case GIMPLE_CALL:
7747 tree fn;
7748 gcall *call_stmt = as_a <gcall *> (stmt);
7750 if (gimple_call_internal_p (stmt))
7752 enum tree_code subcode = ERROR_MARK;
7753 switch (gimple_call_internal_fn (stmt))
7755 case IFN_UBSAN_CHECK_ADD:
7756 subcode = PLUS_EXPR;
7757 break;
7758 case IFN_UBSAN_CHECK_SUB:
7759 subcode = MINUS_EXPR;
7760 break;
7761 case IFN_UBSAN_CHECK_MUL:
7762 subcode = MULT_EXPR;
7763 break;
7764 case IFN_BUILTIN_EXPECT:
7766 tree arg0 = gimple_call_arg (stmt, 0);
7767 tree op0 = (*valueize) (arg0);
7768 if (TREE_CODE (op0) == INTEGER_CST)
7769 return op0;
7770 return NULL_TREE;
7772 default:
7773 return NULL_TREE;
7775 tree arg0 = gimple_call_arg (stmt, 0);
7776 tree arg1 = gimple_call_arg (stmt, 1);
7777 tree op0 = (*valueize) (arg0);
7778 tree op1 = (*valueize) (arg1);
7780 if (TREE_CODE (op0) != INTEGER_CST
7781 || TREE_CODE (op1) != INTEGER_CST)
7783 switch (subcode)
7785 case MULT_EXPR:
7786 /* x * 0 = 0 * x = 0 without overflow. */
7787 if (integer_zerop (op0) || integer_zerop (op1))
7788 return build_zero_cst (TREE_TYPE (arg0));
7789 break;
7790 case MINUS_EXPR:
7791 /* y - y = 0 without overflow. */
7792 if (operand_equal_p (op0, op1, 0))
7793 return build_zero_cst (TREE_TYPE (arg0));
7794 break;
7795 default:
7796 break;
7799 tree res
7800 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
7801 if (res
7802 && TREE_CODE (res) == INTEGER_CST
7803 && !TREE_OVERFLOW (res))
7804 return res;
7805 return NULL_TREE;
7808 fn = (*valueize) (gimple_call_fn (stmt));
7809 if (TREE_CODE (fn) == ADDR_EXPR
7810 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
7811 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
7812 && gimple_builtin_call_types_compatible_p (stmt,
7813 TREE_OPERAND (fn, 0)))
7815 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
7816 tree retval;
7817 unsigned i;
7818 for (i = 0; i < gimple_call_num_args (stmt); ++i)
7819 args[i] = (*valueize) (gimple_call_arg (stmt, i));
7820 retval = fold_builtin_call_array (loc,
7821 gimple_call_return_type (call_stmt),
7822 fn, gimple_call_num_args (stmt), args);
7823 if (retval)
7825 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7826 STRIP_NOPS (retval);
7827 retval = fold_convert (gimple_call_return_type (call_stmt),
7828 retval);
7830 return retval;
7832 return NULL_TREE;
7835 default:
7836 return NULL_TREE;
7840 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7841 Returns NULL_TREE if folding to a constant is not possible, otherwise
7842 returns a constant according to is_gimple_min_invariant. */
7844 tree
7845 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
7847 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
7848 if (res && is_gimple_min_invariant (res))
7849 return res;
7850 return NULL_TREE;
7854 /* The following set of functions are supposed to fold references using
7855 their constant initializers. */
7857 /* See if we can find constructor defining value of BASE.
7858 When we know the consructor with constant offset (such as
7859 base is array[40] and we do know constructor of array), then
7860 BIT_OFFSET is adjusted accordingly.
7862 As a special case, return error_mark_node when constructor
7863 is not explicitly available, but it is known to be zero
7864 such as 'static const int a;'. */
7865 static tree
7866 get_base_constructor (tree base, poly_int64 *bit_offset,
7867 tree (*valueize)(tree))
7869 poly_int64 bit_offset2, size, max_size;
7870 bool reverse;
7872 if (TREE_CODE (base) == MEM_REF)
7874 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
7875 if (!boff.to_shwi (bit_offset))
7876 return NULL_TREE;
7878 if (valueize
7879 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
7880 base = valueize (TREE_OPERAND (base, 0));
7881 if (!base || TREE_CODE (base) != ADDR_EXPR)
7882 return NULL_TREE;
7883 base = TREE_OPERAND (base, 0);
7885 else if (valueize
7886 && TREE_CODE (base) == SSA_NAME)
7887 base = valueize (base);
7889 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7890 DECL_INITIAL. If BASE is a nested reference into another
7891 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7892 the inner reference. */
7893 switch (TREE_CODE (base))
7895 case VAR_DECL:
7896 case CONST_DECL:
7898 tree init = ctor_for_folding (base);
7900 /* Our semantic is exact opposite of ctor_for_folding;
7901 NULL means unknown, while error_mark_node is 0. */
7902 if (init == error_mark_node)
7903 return NULL_TREE;
7904 if (!init)
7905 return error_mark_node;
7906 return init;
7909 case VIEW_CONVERT_EXPR:
7910 return get_base_constructor (TREE_OPERAND (base, 0),
7911 bit_offset, valueize);
7913 case ARRAY_REF:
7914 case COMPONENT_REF:
7915 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
7916 &reverse);
7917 if (!known_size_p (max_size) || maybe_ne (size, max_size))
7918 return NULL_TREE;
7919 *bit_offset += bit_offset2;
7920 return get_base_constructor (base, bit_offset, valueize);
7922 case CONSTRUCTOR:
7923 return base;
7925 default:
7926 if (CONSTANT_CLASS_P (base))
7927 return base;
7929 return NULL_TREE;
7933 /* CTOR is a CONSTRUCTOR of an array or vector type. Fold a reference of SIZE
7934 bits to the memory at bit OFFSET. If non-null, TYPE is the expected type of
7935 the reference; otherwise the type of the referenced element is used instead.
7936 When SIZE is zero, attempt to fold a reference to the entire element OFFSET
7937 refers to. Increment *SUBOFF by the bit offset of the accessed element. */
7939 static tree
7940 fold_array_ctor_reference (tree type, tree ctor,
7941 unsigned HOST_WIDE_INT offset,
7942 unsigned HOST_WIDE_INT size,
7943 tree from_decl,
7944 unsigned HOST_WIDE_INT *suboff)
7946 offset_int low_bound;
7947 offset_int elt_size;
7948 offset_int access_index;
7949 tree domain_type = NULL_TREE;
7950 HOST_WIDE_INT inner_offset;
7952 /* Compute low bound and elt size. */
7953 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
7954 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
7955 if (domain_type && TYPE_MIN_VALUE (domain_type))
7957 /* Static constructors for variably sized objects make no sense. */
7958 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
7959 return NULL_TREE;
7960 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
7962 else
7963 low_bound = 0;
7964 /* Static constructors for variably sized objects make no sense. */
7965 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
7966 return NULL_TREE;
7967 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
7969 /* When TYPE is non-null, verify that it specifies a constant-sized
7970 access of a multiple of the array element size. Avoid division
7971 by zero below when ELT_SIZE is zero, such as with the result of
7972 an initializer for a zero-length array or an empty struct. */
7973 if (elt_size == 0
7974 || (type
7975 && (!TYPE_SIZE_UNIT (type)
7976 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
7977 return NULL_TREE;
7979 /* Compute the array index we look for. */
7980 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
7981 elt_size);
7982 access_index += low_bound;
7984 /* And offset within the access. */
7985 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
7987 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
7988 if (size > elt_sz * BITS_PER_UNIT)
7990 /* native_encode_expr constraints. */
7991 if (size > MAX_BITSIZE_MODE_ANY_MODE
7992 || size % BITS_PER_UNIT != 0
7993 || inner_offset % BITS_PER_UNIT != 0
7994 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
7995 return NULL_TREE;
7997 unsigned ctor_idx;
7998 tree val = get_array_ctor_element_at_index (ctor, access_index,
7999 &ctor_idx);
8000 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
8001 return build_zero_cst (type);
8003 /* native-encode adjacent ctor elements. */
8004 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8005 unsigned bufoff = 0;
8006 offset_int index = 0;
8007 offset_int max_index = access_index;
8008 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
8009 if (!val)
8010 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
8011 else if (!CONSTANT_CLASS_P (val))
8012 return NULL_TREE;
8013 if (!elt->index)
8015 else if (TREE_CODE (elt->index) == RANGE_EXPR)
8017 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
8018 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
8020 else
8021 index = max_index = wi::to_offset (elt->index);
8022 index = wi::umax (index, access_index);
8025 if (bufoff + elt_sz > sizeof (buf))
8026 elt_sz = sizeof (buf) - bufoff;
8027 int len = native_encode_expr (val, buf + bufoff, elt_sz,
8028 inner_offset / BITS_PER_UNIT);
8029 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
8030 return NULL_TREE;
8031 inner_offset = 0;
8032 bufoff += len;
8034 access_index += 1;
8035 if (wi::cmpu (access_index, index) == 0)
8036 val = elt->value;
8037 else if (wi::cmpu (access_index, max_index) > 0)
8039 ctor_idx++;
8040 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
8042 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
8043 ++max_index;
8045 else
8047 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
8048 index = 0;
8049 max_index = access_index;
8050 if (!elt->index)
8052 else if (TREE_CODE (elt->index) == RANGE_EXPR)
8054 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
8055 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
8057 else
8058 index = max_index = wi::to_offset (elt->index);
8059 index = wi::umax (index, access_index);
8060 if (wi::cmpu (access_index, index) == 0)
8061 val = elt->value;
8062 else
8063 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
8067 while (bufoff < size / BITS_PER_UNIT);
8068 *suboff += size;
8069 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
8072 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
8074 if (!size && TREE_CODE (val) != CONSTRUCTOR)
8076 /* For the final reference to the entire accessed element
8077 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
8078 may be null) in favor of the type of the element, and set
8079 SIZE to the size of the accessed element. */
8080 inner_offset = 0;
8081 type = TREE_TYPE (val);
8082 size = elt_sz * BITS_PER_UNIT;
8084 else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
8085 && TREE_CODE (val) == CONSTRUCTOR
8086 && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
8087 /* If this isn't the last element in the CTOR and a CTOR itself
8088 and it does not cover the whole object we are requesting give up
8089 since we're not set up for combining from multiple CTORs. */
8090 return NULL_TREE;
8092 *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
8093 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
8094 suboff);
8097 /* Memory not explicitly mentioned in constructor is 0 (or
8098 the reference is out of range). */
8099 return type ? build_zero_cst (type) : NULL_TREE;
8102 /* CTOR is a CONSTRUCTOR of a record or union type. Fold a reference of SIZE
8103 bits to the memory at bit OFFSET. If non-null, TYPE is the expected type of
8104 the reference; otherwise the type of the referenced member is used instead.
8105 When SIZE is zero, attempt to fold a reference to the entire member OFFSET
8106 refers to. Increment *SUBOFF by the bit offset of the accessed member. */
8108 static tree
8109 fold_nonarray_ctor_reference (tree type, tree ctor,
8110 unsigned HOST_WIDE_INT offset,
8111 unsigned HOST_WIDE_INT size,
8112 tree from_decl,
8113 unsigned HOST_WIDE_INT *suboff)
8115 unsigned HOST_WIDE_INT cnt;
8116 tree cfield, cval;
8118 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
8120 tree byte_offset = DECL_FIELD_OFFSET (cfield);
8121 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
8122 tree field_size = DECL_SIZE (cfield);
8124 if (!field_size)
8126 /* Determine the size of the flexible array member from
8127 the size of the initializer provided for it. */
8128 field_size = TYPE_SIZE (TREE_TYPE (cval));
8131 /* Variable sized objects in static constructors makes no sense,
8132 but field_size can be NULL for flexible array members. */
8133 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
8134 && TREE_CODE (byte_offset) == INTEGER_CST
8135 && (field_size != NULL_TREE
8136 ? TREE_CODE (field_size) == INTEGER_CST
8137 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
8139 /* Compute bit offset of the field. */
8140 offset_int bitoffset
8141 = (wi::to_offset (field_offset)
8142 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
8143 /* Compute bit offset where the field ends. */
8144 offset_int bitoffset_end;
8145 if (field_size != NULL_TREE)
8146 bitoffset_end = bitoffset + wi::to_offset (field_size);
8147 else
8148 bitoffset_end = 0;
8150 /* Compute the bit offset of the end of the desired access.
8151 As a special case, if the size of the desired access is
8152 zero, assume the access is to the entire field (and let
8153 the caller make any necessary adjustments by storing
8154 the actual bounds of the field in FIELDBOUNDS). */
8155 offset_int access_end = offset_int (offset);
8156 if (size)
8157 access_end += size;
8158 else
8159 access_end = bitoffset_end;
8161 /* Is there any overlap between the desired access at
8162 [OFFSET, OFFSET+SIZE) and the offset of the field within
8163 the object at [BITOFFSET, BITOFFSET_END)? */
8164 if (wi::cmps (access_end, bitoffset) > 0
8165 && (field_size == NULL_TREE
8166 || wi::lts_p (offset, bitoffset_end)))
8168 *suboff += bitoffset.to_uhwi ();
8170 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
8172 /* For the final reference to the entire accessed member
8173 (SIZE is zero), reset OFFSET, disegard TYPE (which may
8174 be null) in favor of the type of the member, and set
8175 SIZE to the size of the accessed member. */
8176 offset = bitoffset.to_uhwi ();
8177 type = TREE_TYPE (cval);
8178 size = (bitoffset_end - bitoffset).to_uhwi ();
8181 /* We do have overlap. Now see if the field is large enough
8182 to cover the access. Give up for accesses that extend
8183 beyond the end of the object or that span multiple fields. */
8184 if (wi::cmps (access_end, bitoffset_end) > 0)
8185 return NULL_TREE;
8186 if (offset < bitoffset)
8187 return NULL_TREE;
8189 offset_int inner_offset = offset_int (offset) - bitoffset;
8191 /* Integral bit-fields are left-justified on big-endian targets, so
8192 we must arrange for native_encode_int to start at their MSB. */
8193 if (DECL_BIT_FIELD (cfield) && INTEGRAL_TYPE_P (TREE_TYPE (cfield)))
8195 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8196 return NULL_TREE;
8197 const unsigned int encoding_size
8198 = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (cfield)));
8199 if (BYTES_BIG_ENDIAN)
8200 inner_offset += encoding_size - wi::to_offset (field_size);
8203 return fold_ctor_reference (type, cval,
8204 inner_offset.to_uhwi (), size,
8205 from_decl, suboff);
8209 if (!type)
8210 return NULL_TREE;
8212 return build_zero_cst (type);
8215 /* CTOR is a value initializing memory. Fold a reference of TYPE and
8216 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
8217 is zero, attempt to fold a reference to the entire subobject
8218 which OFFSET refers to. This is used when folding accesses to
8219 string members of aggregates. When non-null, set *SUBOFF to
8220 the bit offset of the accessed subobject. */
8222 tree
8223 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
8224 const poly_uint64 &poly_size, tree from_decl,
8225 unsigned HOST_WIDE_INT *suboff /* = NULL */)
8227 tree ret;
8229 /* We found the field with exact match. */
8230 if (type
8231 && useless_type_conversion_p (type, TREE_TYPE (ctor))
8232 && known_eq (poly_offset, 0U))
8233 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
8235 /* The remaining optimizations need a constant size and offset. */
8236 unsigned HOST_WIDE_INT size, offset;
8237 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
8238 return NULL_TREE;
8240 /* We are at the end of walk, see if we can view convert the
8241 result. */
8242 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
8243 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
8244 && known_eq (wi::to_poly_widest (TYPE_SIZE (type)), size)
8245 && known_eq (wi::to_poly_widest (TYPE_SIZE (TREE_TYPE (ctor))), size))
8247 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
8248 if (ret)
8250 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
8251 if (ret)
8252 STRIP_USELESS_TYPE_CONVERSION (ret);
8254 return ret;
8257 /* For constants and byte-aligned/sized reads, try to go through
8258 native_encode/interpret. */
8259 if (CONSTANT_CLASS_P (ctor)
8260 && BITS_PER_UNIT == 8
8261 && offset % BITS_PER_UNIT == 0
8262 && offset / BITS_PER_UNIT <= INT_MAX
8263 && size % BITS_PER_UNIT == 0
8264 && size <= MAX_BITSIZE_MODE_ANY_MODE
8265 && can_native_interpret_type_p (type))
8267 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8268 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
8269 offset / BITS_PER_UNIT);
8270 if (len > 0)
8271 return native_interpret_expr (type, buf, len);
8274 /* For constructors, try first a recursive local processing, but in any case
8275 this requires the native storage order. */
8276 if (TREE_CODE (ctor) == CONSTRUCTOR
8277 && !(AGGREGATE_TYPE_P (TREE_TYPE (ctor))
8278 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (ctor))))
8280 unsigned HOST_WIDE_INT dummy = 0;
8281 if (!suboff)
8282 suboff = &dummy;
8284 tree ret;
8285 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
8286 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
8287 ret = fold_array_ctor_reference (type, ctor, offset, size,
8288 from_decl, suboff);
8289 else
8290 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
8291 from_decl, suboff);
8293 /* Otherwise fall back to native_encode_initializer. This may be done
8294 only from the outermost fold_ctor_reference call (because it itself
8295 recurses into CONSTRUCTORs and doesn't update suboff). */
8296 if (ret == NULL_TREE
8297 && suboff == &dummy
8298 && BITS_PER_UNIT == 8
8299 && offset % BITS_PER_UNIT == 0
8300 && offset / BITS_PER_UNIT <= INT_MAX
8301 && size % BITS_PER_UNIT == 0
8302 && size <= MAX_BITSIZE_MODE_ANY_MODE
8303 && can_native_interpret_type_p (type))
8305 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8306 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
8307 offset / BITS_PER_UNIT);
8308 if (len > 0)
8309 return native_interpret_expr (type, buf, len);
8312 return ret;
8315 return NULL_TREE;
8318 /* Return the tree representing the element referenced by T if T is an
8319 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8320 names using VALUEIZE. Return NULL_TREE otherwise. */
8322 tree
8323 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
8325 tree ctor, idx, base;
8326 poly_int64 offset, size, max_size;
8327 tree tem;
8328 bool reverse;
8330 if (TREE_THIS_VOLATILE (t))
8331 return NULL_TREE;
8333 if (DECL_P (t))
8334 return get_symbol_constant_value (t);
8336 tem = fold_read_from_constant_string (t);
8337 if (tem)
8338 return tem;
8340 switch (TREE_CODE (t))
8342 case ARRAY_REF:
8343 case ARRAY_RANGE_REF:
8344 /* Constant indexes are handled well by get_base_constructor.
8345 Only special case variable offsets.
8346 FIXME: This code can't handle nested references with variable indexes
8347 (they will be handled only by iteration of ccp). Perhaps we can bring
8348 get_ref_base_and_extent here and make it use a valueize callback. */
8349 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
8350 && valueize
8351 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
8352 && poly_int_tree_p (idx))
8354 tree low_bound, unit_size;
8356 /* If the resulting bit-offset is constant, track it. */
8357 if ((low_bound = array_ref_low_bound (t),
8358 poly_int_tree_p (low_bound))
8359 && (unit_size = array_ref_element_size (t),
8360 tree_fits_uhwi_p (unit_size)))
8362 poly_offset_int woffset
8363 = wi::sext (wi::to_poly_offset (idx)
8364 - wi::to_poly_offset (low_bound),
8365 TYPE_PRECISION (sizetype));
8366 woffset *= tree_to_uhwi (unit_size);
8367 woffset *= BITS_PER_UNIT;
8368 if (woffset.to_shwi (&offset))
8370 base = TREE_OPERAND (t, 0);
8371 ctor = get_base_constructor (base, &offset, valueize);
8372 /* Empty constructor. Always fold to 0. */
8373 if (ctor == error_mark_node)
8374 return build_zero_cst (TREE_TYPE (t));
8375 /* Out of bound array access. Value is undefined,
8376 but don't fold. */
8377 if (maybe_lt (offset, 0))
8378 return NULL_TREE;
8379 /* We cannot determine ctor. */
8380 if (!ctor)
8381 return NULL_TREE;
8382 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
8383 tree_to_uhwi (unit_size)
8384 * BITS_PER_UNIT,
8385 base);
8389 /* Fallthru. */
8391 case COMPONENT_REF:
8392 case BIT_FIELD_REF:
8393 case TARGET_MEM_REF:
8394 case MEM_REF:
8395 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
8396 ctor = get_base_constructor (base, &offset, valueize);
8398 /* Empty constructor. Always fold to 0. */
8399 if (ctor == error_mark_node)
8400 return build_zero_cst (TREE_TYPE (t));
8401 /* We do not know precise address. */
8402 if (!known_size_p (max_size) || maybe_ne (max_size, size))
8403 return NULL_TREE;
8404 /* We cannot determine ctor. */
8405 if (!ctor)
8406 return NULL_TREE;
8408 /* Out of bound array access. Value is undefined, but don't fold. */
8409 if (maybe_lt (offset, 0))
8410 return NULL_TREE;
8412 tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
8413 if (tem)
8414 return tem;
8416 /* For bit field reads try to read the representative and
8417 adjust. */
8418 if (TREE_CODE (t) == COMPONENT_REF
8419 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
8420 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
8422 HOST_WIDE_INT csize, coffset;
8423 tree field = TREE_OPERAND (t, 1);
8424 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8425 if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
8426 && size.is_constant (&csize)
8427 && offset.is_constant (&coffset)
8428 && (coffset % BITS_PER_UNIT != 0
8429 || csize % BITS_PER_UNIT != 0)
8430 && !reverse
8431 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
8433 poly_int64 bitoffset;
8434 poly_uint64 field_offset, repr_offset;
8435 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8436 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
8437 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
8438 else
8439 bitoffset = 0;
8440 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8441 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
8442 HOST_WIDE_INT bitoff;
8443 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8444 - TYPE_PRECISION (TREE_TYPE (field)));
8445 if (bitoffset.is_constant (&bitoff)
8446 && bitoff >= 0
8447 && bitoff <= diff)
8449 offset -= bitoff;
8450 size = tree_to_uhwi (DECL_SIZE (repr));
8452 tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
8453 size, base);
8454 if (tem && TREE_CODE (tem) == INTEGER_CST)
8456 if (!BYTES_BIG_ENDIAN)
8457 tem = wide_int_to_tree (TREE_TYPE (field),
8458 wi::lrshift (wi::to_wide (tem),
8459 bitoff));
8460 else
8461 tem = wide_int_to_tree (TREE_TYPE (field),
8462 wi::lrshift (wi::to_wide (tem),
8463 diff - bitoff));
8464 return tem;
8469 break;
8471 case REALPART_EXPR:
8472 case IMAGPART_EXPR:
8474 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
8475 if (c && TREE_CODE (c) == COMPLEX_CST)
8476 return fold_build1_loc (EXPR_LOCATION (t),
8477 TREE_CODE (t), TREE_TYPE (t), c);
8478 break;
8481 default:
8482 break;
8485 return NULL_TREE;
8488 tree
8489 fold_const_aggregate_ref (tree t)
8491 return fold_const_aggregate_ref_1 (t, NULL);
8494 /* Lookup virtual method with index TOKEN in a virtual table V
8495 at OFFSET.
8496 Set CAN_REFER if non-NULL to false if method
8497 is not referable or if the virtual table is ill-formed (such as rewriten
8498 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8500 tree
8501 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
8502 tree v,
8503 unsigned HOST_WIDE_INT offset,
8504 bool *can_refer)
8506 tree vtable = v, init, fn;
8507 unsigned HOST_WIDE_INT size;
8508 unsigned HOST_WIDE_INT elt_size, access_index;
8509 tree domain_type;
8511 if (can_refer)
8512 *can_refer = true;
8514 /* First of all double check we have virtual table. */
8515 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
8517 /* Pass down that we lost track of the target. */
8518 if (can_refer)
8519 *can_refer = false;
8520 return NULL_TREE;
8523 init = ctor_for_folding (v);
8525 /* The virtual tables should always be born with constructors
8526 and we always should assume that they are avaialble for
8527 folding. At the moment we do not stream them in all cases,
8528 but it should never happen that ctor seem unreachable. */
8529 gcc_assert (init);
8530 if (init == error_mark_node)
8532 /* Pass down that we lost track of the target. */
8533 if (can_refer)
8534 *can_refer = false;
8535 return NULL_TREE;
8537 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
8538 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
8539 offset *= BITS_PER_UNIT;
8540 offset += token * size;
8542 /* Lookup the value in the constructor that is assumed to be array.
8543 This is equivalent to
8544 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8545 offset, size, NULL);
8546 but in a constant time. We expect that frontend produced a simple
8547 array without indexed initializers. */
8549 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
8550 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
8551 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
8552 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
8554 access_index = offset / BITS_PER_UNIT / elt_size;
8555 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
8557 /* The C++ FE can now produce indexed fields, and we check if the indexes
8558 match. */
8559 if (access_index < CONSTRUCTOR_NELTS (init))
8561 fn = CONSTRUCTOR_ELT (init, access_index)->value;
8562 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
8563 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8564 STRIP_NOPS (fn);
8566 else
8567 fn = NULL;
8569 /* For type inconsistent program we may end up looking up virtual method
8570 in virtual table that does not contain TOKEN entries. We may overrun
8571 the virtual table and pick up a constant or RTTI info pointer.
8572 In any case the call is undefined. */
8573 if (!fn
8574 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
8575 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
8576 fn = builtin_decl_unreachable ();
8577 else
8579 fn = TREE_OPERAND (fn, 0);
8581 /* When cgraph node is missing and function is not public, we cannot
8582 devirtualize. This can happen in WHOPR when the actual method
8583 ends up in other partition, because we found devirtualization
8584 possibility too late. */
8585 if (!can_refer_decl_in_current_unit_p (fn, vtable))
8587 if (can_refer)
8589 *can_refer = false;
8590 return fn;
8592 return NULL_TREE;
8596 /* Make sure we create a cgraph node for functions we'll reference.
8597 They can be non-existent if the reference comes from an entry
8598 of an external vtable for example. */
8599 cgraph_node::get_create (fn);
8601 return fn;
8604 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8605 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8606 KNOWN_BINFO carries the binfo describing the true type of
8607 OBJ_TYPE_REF_OBJECT(REF).
8608 Set CAN_REFER if non-NULL to false if method
8609 is not referable or if the virtual table is ill-formed (such as rewriten
8610 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8612 tree
8613 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
8614 bool *can_refer)
8616 unsigned HOST_WIDE_INT offset;
8617 tree v;
8619 v = BINFO_VTABLE (known_binfo);
8620 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8621 if (!v)
8622 return NULL_TREE;
8624 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
8626 if (can_refer)
8627 *can_refer = false;
8628 return NULL_TREE;
8630 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
8633 /* Given a pointer value T, return a simplified version of an
8634 indirection through T, or NULL_TREE if no simplification is
8635 possible. Note that the resulting type may be different from
8636 the type pointed to in the sense that it is still compatible
8637 from the langhooks point of view. */
8639 tree
8640 gimple_fold_indirect_ref (tree t)
8642 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
8643 tree sub = t;
8644 tree subtype;
8646 STRIP_NOPS (sub);
8647 subtype = TREE_TYPE (sub);
8648 if (!POINTER_TYPE_P (subtype)
8649 || TYPE_REF_CAN_ALIAS_ALL (ptype))
8650 return NULL_TREE;
8652 if (TREE_CODE (sub) == ADDR_EXPR)
8654 tree op = TREE_OPERAND (sub, 0);
8655 tree optype = TREE_TYPE (op);
8656 /* *&p => p */
8657 if (useless_type_conversion_p (type, optype))
8658 return op;
8660 /* *(foo *)&fooarray => fooarray[0] */
8661 if (TREE_CODE (optype) == ARRAY_TYPE
8662 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
8663 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8665 tree type_domain = TYPE_DOMAIN (optype);
8666 tree min_val = size_zero_node;
8667 if (type_domain && TYPE_MIN_VALUE (type_domain))
8668 min_val = TYPE_MIN_VALUE (type_domain);
8669 if (TREE_CODE (min_val) == INTEGER_CST)
8670 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
8672 /* *(foo *)&complexfoo => __real__ complexfoo */
8673 else if (TREE_CODE (optype) == COMPLEX_TYPE
8674 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8675 return fold_build1 (REALPART_EXPR, type, op);
8676 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8677 else if (TREE_CODE (optype) == VECTOR_TYPE
8678 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8680 tree part_width = TYPE_SIZE (type);
8681 tree index = bitsize_int (0);
8682 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
8686 /* *(p + CST) -> ... */
8687 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
8688 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
8690 tree addr = TREE_OPERAND (sub, 0);
8691 tree off = TREE_OPERAND (sub, 1);
8692 tree addrtype;
8694 STRIP_NOPS (addr);
8695 addrtype = TREE_TYPE (addr);
8697 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8698 if (TREE_CODE (addr) == ADDR_EXPR
8699 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
8700 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
8701 && tree_fits_uhwi_p (off))
8703 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
8704 tree part_width = TYPE_SIZE (type);
8705 unsigned HOST_WIDE_INT part_widthi
8706 = tree_to_shwi (part_width) / BITS_PER_UNIT;
8707 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
8708 tree index = bitsize_int (indexi);
8709 if (known_lt (offset / part_widthi,
8710 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
8711 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
8712 part_width, index);
8715 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8716 if (TREE_CODE (addr) == ADDR_EXPR
8717 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
8718 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
8720 tree size = TYPE_SIZE_UNIT (type);
8721 if (tree_int_cst_equal (size, off))
8722 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
8725 /* *(p + CST) -> MEM_REF <p, CST>. */
8726 if (TREE_CODE (addr) != ADDR_EXPR
8727 || DECL_P (TREE_OPERAND (addr, 0)))
8728 return fold_build2 (MEM_REF, type,
8729 addr,
8730 wide_int_to_tree (ptype, wi::to_wide (off)));
8733 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8734 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
8735 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
8736 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
8738 tree type_domain;
8739 tree min_val = size_zero_node;
8740 tree osub = sub;
8741 sub = gimple_fold_indirect_ref (sub);
8742 if (! sub)
8743 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
8744 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
8745 if (type_domain && TYPE_MIN_VALUE (type_domain))
8746 min_val = TYPE_MIN_VALUE (type_domain);
8747 if (TREE_CODE (min_val) == INTEGER_CST)
8748 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
8751 return NULL_TREE;
8754 /* Return true if CODE is an operation that when operating on signed
8755 integer types involves undefined behavior on overflow and the
8756 operation can be expressed with unsigned arithmetic. */
8758 bool
8759 arith_code_with_undefined_signed_overflow (tree_code code)
8761 switch (code)
8763 case ABS_EXPR:
8764 case PLUS_EXPR:
8765 case MINUS_EXPR:
8766 case MULT_EXPR:
8767 case NEGATE_EXPR:
8768 case POINTER_PLUS_EXPR:
8769 return true;
8770 default:
8771 return false;
8775 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8776 operation that can be transformed to unsigned arithmetic by converting
8777 its operand, carrying out the operation in the corresponding unsigned
8778 type and converting the result back to the original type.
8780 If IN_PLACE is true, *GSI points to STMT, adjust the stmt in place and
8781 return NULL.
8782 Otherwise returns a sequence of statements that replace STMT and also
8783 contain a modified form of STMT itself. */
8785 static gimple_seq
8786 rewrite_to_defined_overflow (gimple_stmt_iterator *gsi, gimple *stmt,
8787 bool in_place)
8789 if (dump_file && (dump_flags & TDF_DETAILS))
8791 fprintf (dump_file, "rewriting stmt with undefined signed "
8792 "overflow ");
8793 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
8796 tree lhs = gimple_assign_lhs (stmt);
8797 tree type = unsigned_type_for (TREE_TYPE (lhs));
8798 gimple_seq stmts = NULL;
8799 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
8800 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
8801 else
8802 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
8804 tree op = gimple_op (stmt, i);
8805 op = gimple_convert (&stmts, type, op);
8806 gimple_set_op (stmt, i, op);
8808 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
8809 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
8810 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
8811 gimple_set_modified (stmt, true);
8812 if (in_place)
8814 if (stmts)
8815 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
8816 stmts = NULL;
8818 else
8819 gimple_seq_add_stmt (&stmts, stmt);
8820 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
8821 if (in_place)
8823 gsi_insert_after (gsi, cvt, GSI_SAME_STMT);
8824 update_stmt (stmt);
8826 else
8827 gimple_seq_add_stmt (&stmts, cvt);
8829 return stmts;
8832 void
8833 rewrite_to_defined_overflow (gimple_stmt_iterator *gsi)
8835 rewrite_to_defined_overflow (gsi, gsi_stmt (*gsi), true);
8838 gimple_seq
8839 rewrite_to_defined_overflow (gimple *stmt)
8841 return rewrite_to_defined_overflow (nullptr, stmt, false);
8844 /* The valueization hook we use for the gimple_build API simplification.
8845 This makes us match fold_buildN behavior by only combining with
8846 statements in the sequence(s) we are currently building. */
8848 static tree
8849 gimple_build_valueize (tree op)
8851 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
8852 return op;
8853 return NULL_TREE;
8856 /* Helper for gimple_build to perform the final insertion of stmts on SEQ. */
8858 static inline void
8859 gimple_build_insert_seq (gimple_stmt_iterator *gsi,
8860 bool before, gsi_iterator_update update,
8861 gimple_seq seq)
8863 if (before)
8865 if (gsi->bb)
8866 gsi_insert_seq_before (gsi, seq, update);
8867 else
8868 gsi_insert_seq_before_without_update (gsi, seq, update);
8870 else
8872 if (gsi->bb)
8873 gsi_insert_seq_after (gsi, seq, update);
8874 else
8875 gsi_insert_seq_after_without_update (gsi, seq, update);
8879 /* Build the expression CODE OP0 of type TYPE with location LOC,
8880 simplifying it first if possible. Returns the built
8881 expression value and inserts statements possibly defining it
8882 before GSI if BEFORE is true or after GSI if false and advance
8883 the iterator accordingly.
8884 If gsi refers to a basic block simplifying is allowed to look
8885 at all SSA defs while when it does not it is restricted to
8886 SSA defs that are not associated with a basic block yet,
8887 indicating they belong to the currently building sequence. */
8889 tree
8890 gimple_build (gimple_stmt_iterator *gsi,
8891 bool before, gsi_iterator_update update,
8892 location_t loc, enum tree_code code, tree type, tree op0)
8894 gimple_seq seq = NULL;
8895 tree res
8896 = gimple_simplify (code, type, op0, &seq,
8897 gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
8898 if (!res)
8900 res = create_tmp_reg_or_ssa_name (type);
8901 gimple *stmt;
8902 if (code == REALPART_EXPR
8903 || code == IMAGPART_EXPR
8904 || code == VIEW_CONVERT_EXPR)
8905 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
8906 else
8907 stmt = gimple_build_assign (res, code, op0);
8908 gimple_set_location (stmt, loc);
8909 gimple_seq_add_stmt_without_update (&seq, stmt);
8911 gimple_build_insert_seq (gsi, before, update, seq);
8912 return res;
8915 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8916 simplifying it first if possible. Returns the built
8917 expression value inserting any new statements at GSI honoring BEFORE
8918 and UPDATE. */
8920 tree
8921 gimple_build (gimple_stmt_iterator *gsi,
8922 bool before, gsi_iterator_update update,
8923 location_t loc, enum tree_code code, tree type,
8924 tree op0, tree op1)
8926 gimple_seq seq = NULL;
8927 tree res
8928 = gimple_simplify (code, type, op0, op1, &seq,
8929 gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
8930 if (!res)
8932 res = create_tmp_reg_or_ssa_name (type);
8933 gimple *stmt = gimple_build_assign (res, code, op0, op1);
8934 gimple_set_location (stmt, loc);
8935 gimple_seq_add_stmt_without_update (&seq, stmt);
8937 gimple_build_insert_seq (gsi, before, update, seq);
8938 return res;
8941 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8942 simplifying it first if possible. Returns the built
8943 expression value inserting any new statements at GSI honoring BEFORE
8944 and UPDATE. */
8946 tree
8947 gimple_build (gimple_stmt_iterator *gsi,
8948 bool before, gsi_iterator_update update,
8949 location_t loc, enum tree_code code, tree type,
8950 tree op0, tree op1, tree op2)
8953 gimple_seq seq = NULL;
8954 tree res
8955 = gimple_simplify (code, type, op0, op1, op2, &seq,
8956 gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
8957 if (!res)
8959 res = create_tmp_reg_or_ssa_name (type);
8960 gimple *stmt;
8961 if (code == BIT_FIELD_REF)
8962 stmt = gimple_build_assign (res, code,
8963 build3 (code, type, op0, op1, op2));
8964 else
8965 stmt = gimple_build_assign (res, code, op0, op1, op2);
8966 gimple_set_location (stmt, loc);
8967 gimple_seq_add_stmt_without_update (&seq, stmt);
8969 gimple_build_insert_seq (gsi, before, update, seq);
8970 return res;
8973 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8974 void) with a location LOC. Returns the built expression value (or NULL_TREE
8975 if TYPE is void) inserting any new statements at GSI honoring BEFORE
8976 and UPDATE. */
8978 tree
8979 gimple_build (gimple_stmt_iterator *gsi,
8980 bool before, gsi_iterator_update update,
8981 location_t loc, combined_fn fn, tree type)
8983 tree res = NULL_TREE;
8984 gimple_seq seq = NULL;
8985 gcall *stmt;
8986 if (internal_fn_p (fn))
8987 stmt = gimple_build_call_internal (as_internal_fn (fn), 0);
8988 else
8990 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8991 stmt = gimple_build_call (decl, 0);
8993 if (!VOID_TYPE_P (type))
8995 res = create_tmp_reg_or_ssa_name (type);
8996 gimple_call_set_lhs (stmt, res);
8998 gimple_set_location (stmt, loc);
8999 gimple_seq_add_stmt_without_update (&seq, stmt);
9000 gimple_build_insert_seq (gsi, before, update, seq);
9001 return res;
9004 /* Build the call FN (ARG0) with a result of type TYPE
9005 (or no result if TYPE is void) with location LOC,
9006 simplifying it first if possible. Returns the built
9007 expression value (or NULL_TREE if TYPE is void) inserting any new
9008 statements at GSI honoring BEFORE and UPDATE. */
9010 tree
9011 gimple_build (gimple_stmt_iterator *gsi,
9012 bool before, gsi_iterator_update update,
9013 location_t loc, combined_fn fn,
9014 tree type, tree arg0)
9016 gimple_seq seq = NULL;
9017 tree res = gimple_simplify (fn, type, arg0, &seq, gimple_build_valueize);
9018 if (!res)
9020 gcall *stmt;
9021 if (internal_fn_p (fn))
9022 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
9023 else
9025 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
9026 stmt = gimple_build_call (decl, 1, arg0);
9028 if (!VOID_TYPE_P (type))
9030 res = create_tmp_reg_or_ssa_name (type);
9031 gimple_call_set_lhs (stmt, res);
9033 gimple_set_location (stmt, loc);
9034 gimple_seq_add_stmt_without_update (&seq, stmt);
9036 gimple_build_insert_seq (gsi, before, update, seq);
9037 return res;
9040 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
9041 (or no result if TYPE is void) with location LOC,
9042 simplifying it first if possible. Returns the built
9043 expression value (or NULL_TREE if TYPE is void) inserting any new
9044 statements at GSI honoring BEFORE and UPDATE. */
9046 tree
9047 gimple_build (gimple_stmt_iterator *gsi,
9048 bool before, gsi_iterator_update update,
9049 location_t loc, combined_fn fn,
9050 tree type, tree arg0, tree arg1)
9052 gimple_seq seq = NULL;
9053 tree res = gimple_simplify (fn, type, arg0, arg1, &seq,
9054 gimple_build_valueize);
9055 if (!res)
9057 gcall *stmt;
9058 if (internal_fn_p (fn))
9059 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
9060 else
9062 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
9063 stmt = gimple_build_call (decl, 2, arg0, arg1);
9065 if (!VOID_TYPE_P (type))
9067 res = create_tmp_reg_or_ssa_name (type);
9068 gimple_call_set_lhs (stmt, res);
9070 gimple_set_location (stmt, loc);
9071 gimple_seq_add_stmt_without_update (&seq, stmt);
9073 gimple_build_insert_seq (gsi, before, update, seq);
9074 return res;
9077 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
9078 (or no result if TYPE is void) with location LOC,
9079 simplifying it first if possible. Returns the built
9080 expression value (or NULL_TREE if TYPE is void) inserting any new
9081 statements at GSI honoring BEFORE and UPDATE. */
9083 tree
9084 gimple_build (gimple_stmt_iterator *gsi,
9085 bool before, gsi_iterator_update update,
9086 location_t loc, combined_fn fn,
9087 tree type, tree arg0, tree arg1, tree arg2)
9089 gimple_seq seq = NULL;
9090 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
9091 &seq, gimple_build_valueize);
9092 if (!res)
9094 gcall *stmt;
9095 if (internal_fn_p (fn))
9096 stmt = gimple_build_call_internal (as_internal_fn (fn),
9097 3, arg0, arg1, arg2);
9098 else
9100 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
9101 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
9103 if (!VOID_TYPE_P (type))
9105 res = create_tmp_reg_or_ssa_name (type);
9106 gimple_call_set_lhs (stmt, res);
9108 gimple_set_location (stmt, loc);
9109 gimple_seq_add_stmt_without_update (&seq, stmt);
9111 gimple_build_insert_seq (gsi, before, update, seq);
9112 return res;
9115 /* Build CODE (OP0) with a result of type TYPE (or no result if TYPE is
9116 void) with location LOC, simplifying it first if possible. Returns the
9117 built expression value (or NULL_TREE if TYPE is void) inserting any new
9118 statements at GSI honoring BEFORE and UPDATE. */
9120 tree
9121 gimple_build (gimple_stmt_iterator *gsi,
9122 bool before, gsi_iterator_update update,
9123 location_t loc, code_helper code, tree type, tree op0)
9125 if (code.is_tree_code ())
9126 return gimple_build (gsi, before, update, loc, tree_code (code), type, op0);
9127 return gimple_build (gsi, before, update, loc, combined_fn (code), type, op0);
9130 /* Build CODE (OP0, OP1) with a result of type TYPE (or no result if TYPE is
9131 void) with location LOC, simplifying it first if possible. Returns the
9132 built expression value (or NULL_TREE if TYPE is void) inserting any new
9133 statements at GSI honoring BEFORE and UPDATE. */
9135 tree
9136 gimple_build (gimple_stmt_iterator *gsi,
9137 bool before, gsi_iterator_update update,
9138 location_t loc, code_helper code, tree type, tree op0, tree op1)
9140 if (code.is_tree_code ())
9141 return gimple_build (gsi, before, update,
9142 loc, tree_code (code), type, op0, op1);
9143 return gimple_build (gsi, before, update,
9144 loc, combined_fn (code), type, op0, op1);
9147 /* Build CODE (OP0, OP1, OP2) with a result of type TYPE (or no result if TYPE
9148 is void) with location LOC, simplifying it first if possible. Returns the
9149 built expression value (or NULL_TREE if TYPE is void) inserting any new
9150 statements at GSI honoring BEFORE and UPDATE. */
9152 tree
9153 gimple_build (gimple_stmt_iterator *gsi,
9154 bool before, gsi_iterator_update update,
9155 location_t loc, code_helper code,
9156 tree type, tree op0, tree op1, tree op2)
9158 if (code.is_tree_code ())
9159 return gimple_build (gsi, before, update,
9160 loc, tree_code (code), type, op0, op1, op2);
9161 return gimple_build (gsi, before, update,
9162 loc, combined_fn (code), type, op0, op1, op2);
9165 /* Build the conversion (TYPE) OP with a result of type TYPE
9166 with location LOC if such conversion is neccesary in GIMPLE,
9167 simplifying it first.
9168 Returns the built expression inserting any new statements
9169 at GSI honoring BEFORE and UPDATE. */
9171 tree
9172 gimple_convert (gimple_stmt_iterator *gsi,
9173 bool before, gsi_iterator_update update,
9174 location_t loc, tree type, tree op)
9176 if (useless_type_conversion_p (type, TREE_TYPE (op)))
9177 return op;
9178 return gimple_build (gsi, before, update, loc, NOP_EXPR, type, op);
9181 /* Build the conversion (ptrofftype) OP with a result of a type
9182 compatible with ptrofftype with location LOC if such conversion
9183 is neccesary in GIMPLE, simplifying it first.
9184 Returns the built expression value inserting any new statements
9185 at GSI honoring BEFORE and UPDATE. */
9187 tree
9188 gimple_convert_to_ptrofftype (gimple_stmt_iterator *gsi,
9189 bool before, gsi_iterator_update update,
9190 location_t loc, tree op)
9192 if (ptrofftype_p (TREE_TYPE (op)))
9193 return op;
9194 return gimple_convert (gsi, before, update, loc, sizetype, op);
9197 /* Build a vector of type TYPE in which each element has the value OP.
9198 Return a gimple value for the result, inserting any new statements
9199 at GSI honoring BEFORE and UPDATE. */
9201 tree
9202 gimple_build_vector_from_val (gimple_stmt_iterator *gsi,
9203 bool before, gsi_iterator_update update,
9204 location_t loc, tree type, tree op)
9206 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
9207 && !CONSTANT_CLASS_P (op))
9208 return gimple_build (gsi, before, update,
9209 loc, VEC_DUPLICATE_EXPR, type, op);
9211 tree res, vec = build_vector_from_val (type, op);
9212 if (is_gimple_val (vec))
9213 return vec;
9214 if (gimple_in_ssa_p (cfun))
9215 res = make_ssa_name (type);
9216 else
9217 res = create_tmp_reg (type);
9218 gimple_seq seq = NULL;
9219 gimple *stmt = gimple_build_assign (res, vec);
9220 gimple_set_location (stmt, loc);
9221 gimple_seq_add_stmt_without_update (&seq, stmt);
9222 gimple_build_insert_seq (gsi, before, update, seq);
9223 return res;
9226 /* Build a vector from BUILDER, handling the case in which some elements
9227 are non-constant. Return a gimple value for the result, inserting
9228 any new instructions to GSI honoring BEFORE and UPDATE.
9230 BUILDER must not have a stepped encoding on entry. This is because
9231 the function is not geared up to handle the arithmetic that would
9232 be needed in the variable case, and any code building a vector that
9233 is known to be constant should use BUILDER->build () directly. */
9235 tree
9236 gimple_build_vector (gimple_stmt_iterator *gsi,
9237 bool before, gsi_iterator_update update,
9238 location_t loc, tree_vector_builder *builder)
9240 gcc_assert (builder->nelts_per_pattern () <= 2);
9241 unsigned int encoded_nelts = builder->encoded_nelts ();
9242 for (unsigned int i = 0; i < encoded_nelts; ++i)
9243 if (!CONSTANT_CLASS_P ((*builder)[i]))
9245 gimple_seq seq = NULL;
9246 tree type = builder->type ();
9247 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
9248 vec<constructor_elt, va_gc> *v;
9249 vec_alloc (v, nelts);
9250 for (i = 0; i < nelts; ++i)
9251 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
9253 tree res;
9254 if (gimple_in_ssa_p (cfun))
9255 res = make_ssa_name (type);
9256 else
9257 res = create_tmp_reg (type);
9258 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
9259 gimple_set_location (stmt, loc);
9260 gimple_seq_add_stmt_without_update (&seq, stmt);
9261 gimple_build_insert_seq (gsi, before, update, seq);
9262 return res;
9264 return builder->build ();
9267 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
9268 and generate a value guaranteed to be rounded upwards to ALIGN.
9270 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
9272 tree
9273 gimple_build_round_up (gimple_stmt_iterator *gsi,
9274 bool before, gsi_iterator_update update,
9275 location_t loc, tree type,
9276 tree old_size, unsigned HOST_WIDE_INT align)
9278 unsigned HOST_WIDE_INT tg_mask = align - 1;
9279 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
9280 gcc_assert (INTEGRAL_TYPE_P (type));
9281 tree tree_mask = build_int_cst (type, tg_mask);
9282 tree oversize = gimple_build (gsi, before, update,
9283 loc, PLUS_EXPR, type, old_size, tree_mask);
9285 tree mask = build_int_cst (type, -align);
9286 return gimple_build (gsi, before, update,
9287 loc, BIT_AND_EXPR, type, oversize, mask);
9290 /* Return true if the result of assignment STMT is known to be non-negative.
9291 If the return value is based on the assumption that signed overflow is
9292 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9293 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9295 static bool
9296 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9297 int depth)
9299 enum tree_code code = gimple_assign_rhs_code (stmt);
9300 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
9301 switch (get_gimple_rhs_class (code))
9303 case GIMPLE_UNARY_RHS:
9304 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
9305 type,
9306 gimple_assign_rhs1 (stmt),
9307 strict_overflow_p, depth);
9308 case GIMPLE_BINARY_RHS:
9309 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
9310 type,
9311 gimple_assign_rhs1 (stmt),
9312 gimple_assign_rhs2 (stmt),
9313 strict_overflow_p, depth);
9314 case GIMPLE_TERNARY_RHS:
9315 return false;
9316 case GIMPLE_SINGLE_RHS:
9317 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
9318 strict_overflow_p, depth);
9319 case GIMPLE_INVALID_RHS:
9320 break;
9322 gcc_unreachable ();
9325 /* Return true if return value of call STMT is known to be non-negative.
9326 If the return value is based on the assumption that signed overflow is
9327 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9328 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9330 static bool
9331 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9332 int depth)
9334 tree arg0 = gimple_call_num_args (stmt) > 0 ?
9335 gimple_call_arg (stmt, 0) : NULL_TREE;
9336 tree arg1 = gimple_call_num_args (stmt) > 1 ?
9337 gimple_call_arg (stmt, 1) : NULL_TREE;
9338 tree lhs = gimple_call_lhs (stmt);
9339 return (lhs
9340 && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs),
9341 gimple_call_combined_fn (stmt),
9342 arg0, arg1,
9343 strict_overflow_p, depth));
9346 /* Return true if return value of call STMT is known to be non-negative.
9347 If the return value is based on the assumption that signed overflow is
9348 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9349 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9351 static bool
9352 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9353 int depth)
9355 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9357 tree arg = gimple_phi_arg_def (stmt, i);
9358 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
9359 return false;
9361 return true;
9364 /* Return true if STMT is known to compute a non-negative value.
9365 If the return value is based on the assumption that signed overflow is
9366 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9367 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9369 bool
9370 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9371 int depth)
9373 tree type = gimple_range_type (stmt);
9374 if (type && frange::supports_p (type))
9376 frange r;
9377 bool sign;
9378 if (get_global_range_query ()->range_of_stmt (r, stmt)
9379 && r.signbit_p (sign))
9380 return !sign;
9382 switch (gimple_code (stmt))
9384 case GIMPLE_ASSIGN:
9385 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
9386 depth);
9387 case GIMPLE_CALL:
9388 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
9389 depth);
9390 case GIMPLE_PHI:
9391 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
9392 depth);
9393 default:
9394 return false;
9398 /* Return true if the floating-point value computed by assignment STMT
9399 is known to have an integer value. We also allow +Inf, -Inf and NaN
9400 to be considered integer values. Return false for signaling NaN.
9402 DEPTH is the current nesting depth of the query. */
9404 static bool
9405 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
9407 enum tree_code code = gimple_assign_rhs_code (stmt);
9408 switch (get_gimple_rhs_class (code))
9410 case GIMPLE_UNARY_RHS:
9411 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
9412 gimple_assign_rhs1 (stmt), depth);
9413 case GIMPLE_BINARY_RHS:
9414 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
9415 gimple_assign_rhs1 (stmt),
9416 gimple_assign_rhs2 (stmt), depth);
9417 case GIMPLE_TERNARY_RHS:
9418 return false;
9419 case GIMPLE_SINGLE_RHS:
9420 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
9421 case GIMPLE_INVALID_RHS:
9422 break;
9424 gcc_unreachable ();
9427 /* Return true if the floating-point value computed by call STMT is known
9428 to have an integer value. We also allow +Inf, -Inf and NaN to be
9429 considered integer values. Return false for signaling NaN.
9431 DEPTH is the current nesting depth of the query. */
9433 static bool
9434 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
9436 tree arg0 = (gimple_call_num_args (stmt) > 0
9437 ? gimple_call_arg (stmt, 0)
9438 : NULL_TREE);
9439 tree arg1 = (gimple_call_num_args (stmt) > 1
9440 ? gimple_call_arg (stmt, 1)
9441 : NULL_TREE);
9442 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
9443 arg0, arg1, depth);
9446 /* Return true if the floating-point result of phi STMT is known to have
9447 an integer value. We also allow +Inf, -Inf and NaN to be considered
9448 integer values. Return false for signaling NaN.
9450 DEPTH is the current nesting depth of the query. */
9452 static bool
9453 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
9455 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9457 tree arg = gimple_phi_arg_def (stmt, i);
9458 if (!integer_valued_real_single_p (arg, depth + 1))
9459 return false;
9461 return true;
9464 /* Return true if the floating-point value computed by STMT is known
9465 to have an integer value. We also allow +Inf, -Inf and NaN to be
9466 considered integer values. Return false for signaling NaN.
9468 DEPTH is the current nesting depth of the query. */
9470 bool
9471 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
9473 switch (gimple_code (stmt))
9475 case GIMPLE_ASSIGN:
9476 return gimple_assign_integer_valued_real_p (stmt, depth);
9477 case GIMPLE_CALL:
9478 return gimple_call_integer_valued_real_p (stmt, depth);
9479 case GIMPLE_PHI:
9480 return gimple_phi_integer_valued_real_p (stmt, depth);
9481 default:
9482 return false;