ada: Fix infinite loop with multiple limited with clauses
[official-gcc.git] / gcc / gimple-fold.cc
blobdc89975270c34f8871eb81d938e0cb294fdcadbd
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2023 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.cc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-access.h"
34 #include "gimple-ssa-warn-restrict.h"
35 #include "fold-const.h"
36 #include "stmt.h"
37 #include "expr.h"
38 #include "stor-layout.h"
39 #include "dumpfile.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
42 #include "gimplify.h"
43 #include "tree-into-ssa.h"
44 #include "tree-dfa.h"
45 #include "tree-object-size.h"
46 #include "tree-ssa.h"
47 #include "tree-ssa-propagate.h"
48 #include "ipa-utils.h"
49 #include "tree-ssa-address.h"
50 #include "langhooks.h"
51 #include "gimplify-me.h"
52 #include "dbgcnt.h"
53 #include "builtins.h"
54 #include "tree-eh.h"
55 #include "gimple-match.h"
56 #include "gomp-constants.h"
57 #include "optabs-query.h"
58 #include "omp-general.h"
59 #include "tree-cfg.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "asan.h"
64 #include "diagnostic-core.h"
65 #include "intl.h"
66 #include "calls.h"
67 #include "tree-vector-builder.h"
68 #include "tree-ssa-strlen.h"
69 #include "varasm.h"
70 #include "internal-fn.h"
71 #include "gimple-range.h"
73 enum strlen_range_kind {
74 /* Compute the exact constant string length. */
75 SRK_STRLEN,
76 /* Compute the maximum constant string length. */
77 SRK_STRLENMAX,
78 /* Compute a range of string lengths bounded by object sizes. When
79 the length of a string cannot be determined, consider as the upper
80 bound the size of the enclosing object the string may be a member
81 or element of. Also determine the size of the largest character
82 array the string may refer to. */
83 SRK_LENRANGE,
84 /* Determine the integer value of the argument (not string length). */
85 SRK_INT_VALUE
88 static bool
89 get_range_strlen (tree, bitmap, strlen_range_kind, c_strlen_data *, unsigned);
91 /* Return true when DECL can be referenced from current unit.
92 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
93 We can get declarations that are not possible to reference for various
94 reasons:
96 1) When analyzing C++ virtual tables.
97 C++ virtual tables do have known constructors even
98 when they are keyed to other compilation unit.
99 Those tables can contain pointers to methods and vars
100 in other units. Those methods have both STATIC and EXTERNAL
101 set.
102 2) In WHOPR mode devirtualization might lead to reference
103 to method that was partitioned elsehwere.
104 In this case we have static VAR_DECL or FUNCTION_DECL
105 that has no corresponding callgraph/varpool node
106 declaring the body.
107 3) COMDAT functions referred by external vtables that
108 we devirtualize only during final compilation stage.
109 At this time we already decided that we will not output
110 the function body and thus we can't reference the symbol
111 directly. */
113 static bool
114 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
116 varpool_node *vnode;
117 struct cgraph_node *node;
118 symtab_node *snode;
120 if (DECL_ABSTRACT_P (decl))
121 return false;
123 /* We are concerned only about static/external vars and functions. */
124 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
125 || !VAR_OR_FUNCTION_DECL_P (decl))
126 return true;
128 /* Static objects can be referred only if they are defined and not optimized
129 out yet. */
130 if (!TREE_PUBLIC (decl))
132 if (DECL_EXTERNAL (decl))
133 return false;
134 /* Before we start optimizing unreachable code we can be sure all
135 static objects are defined. */
136 if (symtab->function_flags_ready)
137 return true;
138 snode = symtab_node::get (decl);
139 if (!snode || !snode->definition)
140 return false;
141 node = dyn_cast <cgraph_node *> (snode);
142 return !node || !node->inlined_to;
145 /* We will later output the initializer, so we can refer to it.
146 So we are concerned only when DECL comes from initializer of
147 external var or var that has been optimized out. */
148 if (!from_decl
149 || !VAR_P (from_decl)
150 || (!DECL_EXTERNAL (from_decl)
151 && (vnode = varpool_node::get (from_decl)) != NULL
152 && vnode->definition)
153 || (flag_ltrans
154 && (vnode = varpool_node::get (from_decl)) != NULL
155 && vnode->in_other_partition))
156 return true;
157 /* We are folding reference from external vtable. The vtable may reffer
158 to a symbol keyed to other compilation unit. The other compilation
159 unit may be in separate DSO and the symbol may be hidden. */
160 if (DECL_VISIBILITY_SPECIFIED (decl)
161 && DECL_EXTERNAL (decl)
162 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
163 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
164 return false;
165 /* When function is public, we always can introduce new reference.
166 Exception are the COMDAT functions where introducing a direct
167 reference imply need to include function body in the curren tunit. */
168 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
169 return true;
170 /* We have COMDAT. We are going to check if we still have definition
171 or if the definition is going to be output in other partition.
172 Bypass this when gimplifying; all needed functions will be produced.
174 As observed in PR20991 for already optimized out comdat virtual functions
175 it may be tempting to not necessarily give up because the copy will be
176 output elsewhere when corresponding vtable is output.
177 This is however not possible - ABI specify that COMDATs are output in
178 units where they are used and when the other unit was compiled with LTO
179 it is possible that vtable was kept public while the function itself
180 was privatized. */
181 if (!symtab->function_flags_ready)
182 return true;
184 snode = symtab_node::get (decl);
185 if (!snode
186 || ((!snode->definition || DECL_EXTERNAL (decl))
187 && (!snode->in_other_partition
188 || (!snode->forced_by_abi && !snode->force_output))))
189 return false;
190 node = dyn_cast <cgraph_node *> (snode);
191 return !node || !node->inlined_to;
194 /* Create a temporary for TYPE for a statement STMT. If the current function
195 is in SSA form, a SSA name is created. Otherwise a temporary register
196 is made. */
198 tree
199 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
201 if (gimple_in_ssa_p (cfun))
202 return make_ssa_name (type, stmt);
203 else
204 return create_tmp_reg (type);
207 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
208 acceptable form for is_gimple_min_invariant.
209 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
211 tree
212 canonicalize_constructor_val (tree cval, tree from_decl)
214 if (CONSTANT_CLASS_P (cval))
215 return cval;
217 tree orig_cval = cval;
218 STRIP_NOPS (cval);
219 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
220 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
222 tree ptr = TREE_OPERAND (cval, 0);
223 if (is_gimple_min_invariant (ptr))
224 cval = build1_loc (EXPR_LOCATION (cval),
225 ADDR_EXPR, TREE_TYPE (ptr),
226 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
227 ptr,
228 fold_convert (ptr_type_node,
229 TREE_OPERAND (cval, 1))));
231 if (TREE_CODE (cval) == ADDR_EXPR)
233 tree base = NULL_TREE;
234 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
236 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
237 if (base)
238 TREE_OPERAND (cval, 0) = base;
240 else
241 base = get_base_address (TREE_OPERAND (cval, 0));
242 if (!base)
243 return NULL_TREE;
245 if (VAR_OR_FUNCTION_DECL_P (base)
246 && !can_refer_decl_in_current_unit_p (base, from_decl))
247 return NULL_TREE;
248 if (TREE_TYPE (base) == error_mark_node)
249 return NULL_TREE;
250 if (VAR_P (base))
251 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
252 but since the use can be in a debug stmt we can't. */
254 else if (TREE_CODE (base) == FUNCTION_DECL)
256 /* Make sure we create a cgraph node for functions we'll reference.
257 They can be non-existent if the reference comes from an entry
258 of an external vtable for example. */
259 cgraph_node::get_create (base);
261 /* Fixup types in global initializers. */
262 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
263 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
265 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
266 cval = fold_convert (TREE_TYPE (orig_cval), cval);
267 return cval;
269 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
270 if (TREE_CODE (cval) == INTEGER_CST)
272 if (TREE_OVERFLOW_P (cval))
273 cval = drop_tree_overflow (cval);
274 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
275 cval = fold_convert (TREE_TYPE (orig_cval), cval);
276 return cval;
278 return orig_cval;
281 /* If SYM is a constant variable with known value, return the value.
282 NULL_TREE is returned otherwise. */
284 tree
285 get_symbol_constant_value (tree sym)
287 tree val = ctor_for_folding (sym);
288 if (val != error_mark_node)
290 if (val)
292 val = canonicalize_constructor_val (unshare_expr (val), sym);
293 if (val
294 && is_gimple_min_invariant (val)
295 && useless_type_conversion_p (TREE_TYPE (sym), TREE_TYPE (val)))
296 return val;
297 else
298 return NULL_TREE;
300 /* Variables declared 'const' without an initializer
301 have zero as the initializer if they may not be
302 overridden at link or run time. */
303 if (!val
304 && is_gimple_reg_type (TREE_TYPE (sym)))
305 return build_zero_cst (TREE_TYPE (sym));
308 return NULL_TREE;
313 /* Subroutine of fold_stmt. We perform constant folding of the
314 memory reference tree EXPR. */
316 static tree
317 maybe_fold_reference (tree expr)
319 tree result = NULL_TREE;
321 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
322 || TREE_CODE (expr) == REALPART_EXPR
323 || TREE_CODE (expr) == IMAGPART_EXPR)
324 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
325 result = fold_unary_loc (EXPR_LOCATION (expr),
326 TREE_CODE (expr),
327 TREE_TYPE (expr),
328 TREE_OPERAND (expr, 0));
329 else if (TREE_CODE (expr) == BIT_FIELD_REF
330 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
331 result = fold_ternary_loc (EXPR_LOCATION (expr),
332 TREE_CODE (expr),
333 TREE_TYPE (expr),
334 TREE_OPERAND (expr, 0),
335 TREE_OPERAND (expr, 1),
336 TREE_OPERAND (expr, 2));
337 else
338 result = fold_const_aggregate_ref (expr);
340 if (result && is_gimple_min_invariant (result))
341 return result;
343 return NULL_TREE;
346 /* Return true if EXPR is an acceptable right-hand-side for a
347 GIMPLE assignment. We validate the entire tree, not just
348 the root node, thus catching expressions that embed complex
349 operands that are not permitted in GIMPLE. This function
350 is needed because the folding routines in fold-const.cc
351 may return such expressions in some cases, e.g., an array
352 access with an embedded index addition. It may make more
353 sense to have folding routines that are sensitive to the
354 constraints on GIMPLE operands, rather than abandoning any
355 any attempt to fold if the usual folding turns out to be too
356 aggressive. */
358 bool
359 valid_gimple_rhs_p (tree expr)
361 enum tree_code code = TREE_CODE (expr);
363 switch (TREE_CODE_CLASS (code))
365 case tcc_declaration:
366 if (!is_gimple_variable (expr))
367 return false;
368 break;
370 case tcc_constant:
371 /* All constants are ok. */
372 break;
374 case tcc_comparison:
375 /* GENERIC allows comparisons with non-boolean types, reject
376 those for GIMPLE. Let vector-typed comparisons pass - rules
377 for GENERIC and GIMPLE are the same here. */
378 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
379 && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
380 || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
381 && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
382 return false;
384 /* Fallthru. */
385 case tcc_binary:
386 if (!is_gimple_val (TREE_OPERAND (expr, 0))
387 || !is_gimple_val (TREE_OPERAND (expr, 1)))
388 return false;
389 break;
391 case tcc_unary:
392 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
393 return false;
394 break;
396 case tcc_expression:
397 switch (code)
399 case ADDR_EXPR:
401 tree t;
402 if (is_gimple_min_invariant (expr))
403 return true;
404 t = TREE_OPERAND (expr, 0);
405 while (handled_component_p (t))
407 /* ??? More checks needed, see the GIMPLE verifier. */
408 if ((TREE_CODE (t) == ARRAY_REF
409 || TREE_CODE (t) == ARRAY_RANGE_REF)
410 && !is_gimple_val (TREE_OPERAND (t, 1)))
411 return false;
412 t = TREE_OPERAND (t, 0);
414 if (!is_gimple_id (t))
415 return false;
417 break;
419 default:
420 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
422 if (!is_gimple_val (TREE_OPERAND (expr, 0))
423 || !is_gimple_val (TREE_OPERAND (expr, 1))
424 || !is_gimple_val (TREE_OPERAND (expr, 2)))
425 return false;
426 break;
428 return false;
430 break;
432 case tcc_vl_exp:
433 return false;
435 case tcc_exceptional:
436 if (code == CONSTRUCTOR)
438 unsigned i;
439 tree elt;
440 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
441 if (!is_gimple_val (elt))
442 return false;
443 return true;
445 if (code != SSA_NAME)
446 return false;
447 break;
449 case tcc_reference:
450 if (code == BIT_FIELD_REF)
451 return is_gimple_val (TREE_OPERAND (expr, 0));
452 return false;
454 default:
455 return false;
458 return true;
462 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
463 replacement rhs for the statement or NULL_TREE if no simplification
464 could be made. It is assumed that the operands have been previously
465 folded. */
467 static tree
468 fold_gimple_assign (gimple_stmt_iterator *si)
470 gimple *stmt = gsi_stmt (*si);
471 enum tree_code subcode = gimple_assign_rhs_code (stmt);
472 location_t loc = gimple_location (stmt);
474 tree result = NULL_TREE;
476 switch (get_gimple_rhs_class (subcode))
478 case GIMPLE_SINGLE_RHS:
480 tree rhs = gimple_assign_rhs1 (stmt);
482 if (TREE_CLOBBER_P (rhs))
483 return NULL_TREE;
485 if (REFERENCE_CLASS_P (rhs))
486 return maybe_fold_reference (rhs);
488 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
490 tree val = OBJ_TYPE_REF_EXPR (rhs);
491 if (is_gimple_min_invariant (val))
492 return val;
493 else if (flag_devirtualize && virtual_method_call_p (rhs))
495 bool final;
496 vec <cgraph_node *>targets
497 = possible_polymorphic_call_targets (rhs, stmt, &final);
498 if (final && targets.length () <= 1 && dbg_cnt (devirt))
500 if (dump_enabled_p ())
502 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
503 "resolving virtual function address "
504 "reference to function %s\n",
505 targets.length () == 1
506 ? targets[0]->name ()
507 : "NULL");
509 if (targets.length () == 1)
511 val = fold_convert (TREE_TYPE (val),
512 build_fold_addr_expr_loc
513 (loc, targets[0]->decl));
514 STRIP_USELESS_TYPE_CONVERSION (val);
516 else
517 /* We cannot use __builtin_unreachable here because it
518 cannot have address taken. */
519 val = build_int_cst (TREE_TYPE (val), 0);
520 return val;
525 else if (TREE_CODE (rhs) == ADDR_EXPR)
527 tree ref = TREE_OPERAND (rhs, 0);
528 if (TREE_CODE (ref) == MEM_REF
529 && integer_zerop (TREE_OPERAND (ref, 1)))
531 result = TREE_OPERAND (ref, 0);
532 if (!useless_type_conversion_p (TREE_TYPE (rhs),
533 TREE_TYPE (result)))
534 result = build1 (NOP_EXPR, TREE_TYPE (rhs), result);
535 return result;
539 else if (TREE_CODE (rhs) == CONSTRUCTOR
540 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
542 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
543 unsigned i;
544 tree val;
546 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
547 if (! CONSTANT_CLASS_P (val))
548 return NULL_TREE;
550 return build_vector_from_ctor (TREE_TYPE (rhs),
551 CONSTRUCTOR_ELTS (rhs));
554 else if (DECL_P (rhs)
555 && is_gimple_reg_type (TREE_TYPE (rhs)))
556 return get_symbol_constant_value (rhs);
558 break;
560 case GIMPLE_UNARY_RHS:
561 break;
563 case GIMPLE_BINARY_RHS:
564 break;
566 case GIMPLE_TERNARY_RHS:
567 result = fold_ternary_loc (loc, subcode,
568 TREE_TYPE (gimple_assign_lhs (stmt)),
569 gimple_assign_rhs1 (stmt),
570 gimple_assign_rhs2 (stmt),
571 gimple_assign_rhs3 (stmt));
573 if (result)
575 STRIP_USELESS_TYPE_CONVERSION (result);
576 if (valid_gimple_rhs_p (result))
577 return result;
579 break;
581 case GIMPLE_INVALID_RHS:
582 gcc_unreachable ();
585 return NULL_TREE;
589 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
590 adjusting the replacement stmts location and virtual operands.
591 If the statement has a lhs the last stmt in the sequence is expected
592 to assign to that lhs. */
594 void
595 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
597 gimple *stmt = gsi_stmt (*si_p);
599 if (gimple_has_location (stmt))
600 annotate_all_with_location (stmts, gimple_location (stmt));
602 /* First iterate over the replacement statements backward, assigning
603 virtual operands to their defining statements. */
604 gimple *laststore = NULL;
605 for (gimple_stmt_iterator i = gsi_last (stmts);
606 !gsi_end_p (i); gsi_prev (&i))
608 gimple *new_stmt = gsi_stmt (i);
609 if ((gimple_assign_single_p (new_stmt)
610 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
611 || (is_gimple_call (new_stmt)
612 && (gimple_call_flags (new_stmt)
613 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
615 tree vdef;
616 if (!laststore)
617 vdef = gimple_vdef (stmt);
618 else
619 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
620 gimple_set_vdef (new_stmt, vdef);
621 if (vdef && TREE_CODE (vdef) == SSA_NAME)
622 SSA_NAME_DEF_STMT (vdef) = new_stmt;
623 laststore = new_stmt;
627 /* Second iterate over the statements forward, assigning virtual
628 operands to their uses. */
629 tree reaching_vuse = gimple_vuse (stmt);
630 for (gimple_stmt_iterator i = gsi_start (stmts);
631 !gsi_end_p (i); gsi_next (&i))
633 gimple *new_stmt = gsi_stmt (i);
634 /* If the new statement possibly has a VUSE, update it with exact SSA
635 name we know will reach this one. */
636 if (gimple_has_mem_ops (new_stmt))
637 gimple_set_vuse (new_stmt, reaching_vuse);
638 gimple_set_modified (new_stmt, true);
639 if (gimple_vdef (new_stmt))
640 reaching_vuse = gimple_vdef (new_stmt);
643 /* If the new sequence does not do a store release the virtual
644 definition of the original statement. */
645 if (reaching_vuse
646 && reaching_vuse == gimple_vuse (stmt))
648 tree vdef = gimple_vdef (stmt);
649 if (vdef
650 && TREE_CODE (vdef) == SSA_NAME)
652 unlink_stmt_vdef (stmt);
653 release_ssa_name (vdef);
657 /* Finally replace the original statement with the sequence. */
658 gsi_replace_with_seq (si_p, stmts, false);
661 /* Helper function for update_gimple_call and
662 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
663 with GIMPLE_CALL NEW_STMT. */
665 static void
666 finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
667 gimple *stmt)
669 tree lhs = gimple_call_lhs (stmt);
670 gimple_call_set_lhs (new_stmt, lhs);
671 if (lhs && TREE_CODE (lhs) == SSA_NAME)
672 SSA_NAME_DEF_STMT (lhs) = new_stmt;
673 gimple_move_vops (new_stmt, stmt);
674 gimple_set_location (new_stmt, gimple_location (stmt));
675 if (gimple_block (new_stmt) == NULL_TREE)
676 gimple_set_block (new_stmt, gimple_block (stmt));
677 gsi_replace (si_p, new_stmt, false);
680 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
681 with number of arguments NARGS, where the arguments in GIMPLE form
682 follow NARGS argument. */
684 bool
685 update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
687 va_list ap;
688 gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
690 gcc_assert (is_gimple_call (stmt));
691 va_start (ap, nargs);
692 new_stmt = gimple_build_call_valist (fn, nargs, ap);
693 finish_update_gimple_call (si_p, new_stmt, stmt);
694 va_end (ap);
695 return true;
698 /* Return true if EXPR is a CALL_EXPR suitable for representation
699 as a single GIMPLE_CALL statement. If the arguments require
700 further gimplification, return false. */
702 static bool
703 valid_gimple_call_p (tree expr)
705 unsigned i, nargs;
707 if (TREE_CODE (expr) != CALL_EXPR)
708 return false;
710 nargs = call_expr_nargs (expr);
711 for (i = 0; i < nargs; i++)
713 tree arg = CALL_EXPR_ARG (expr, i);
714 if (is_gimple_reg_type (TREE_TYPE (arg)))
716 if (!is_gimple_val (arg))
717 return false;
719 else
720 if (!is_gimple_lvalue (arg))
721 return false;
724 return true;
727 /* Convert EXPR into a GIMPLE value suitable for substitution on the
728 RHS of an assignment. Insert the necessary statements before
729 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
730 is replaced. If the call is expected to produces a result, then it
731 is replaced by an assignment of the new RHS to the result variable.
732 If the result is to be ignored, then the call is replaced by a
733 GIMPLE_NOP. A proper VDEF chain is retained by making the first
734 VUSE and the last VDEF of the whole sequence be the same as the replaced
735 statement and using new SSA names for stores in between. */
737 void
738 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
740 tree lhs;
741 gimple *stmt, *new_stmt;
742 gimple_stmt_iterator i;
743 gimple_seq stmts = NULL;
745 stmt = gsi_stmt (*si_p);
747 gcc_assert (is_gimple_call (stmt));
749 if (valid_gimple_call_p (expr))
751 /* The call has simplified to another call. */
752 tree fn = CALL_EXPR_FN (expr);
753 unsigned i;
754 unsigned nargs = call_expr_nargs (expr);
755 vec<tree> args = vNULL;
756 gcall *new_stmt;
758 if (nargs > 0)
760 args.create (nargs);
761 args.safe_grow_cleared (nargs, true);
763 for (i = 0; i < nargs; i++)
764 args[i] = CALL_EXPR_ARG (expr, i);
767 new_stmt = gimple_build_call_vec (fn, args);
768 finish_update_gimple_call (si_p, new_stmt, stmt);
769 args.release ();
770 return;
773 lhs = gimple_call_lhs (stmt);
774 if (lhs == NULL_TREE)
776 push_gimplify_context (gimple_in_ssa_p (cfun));
777 gimplify_and_add (expr, &stmts);
778 pop_gimplify_context (NULL);
780 /* We can end up with folding a memcpy of an empty class assignment
781 which gets optimized away by C++ gimplification. */
782 if (gimple_seq_empty_p (stmts))
784 if (gimple_in_ssa_p (cfun))
786 unlink_stmt_vdef (stmt);
787 release_defs (stmt);
789 gsi_replace (si_p, gimple_build_nop (), false);
790 return;
793 else
795 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
796 new_stmt = gimple_build_assign (lhs, tmp);
797 i = gsi_last (stmts);
798 gsi_insert_after_without_update (&i, new_stmt,
799 GSI_CONTINUE_LINKING);
802 gsi_replace_with_seq_vops (si_p, stmts);
806 /* Replace the call at *GSI with the gimple value VAL. */
808 void
809 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
811 gimple *stmt = gsi_stmt (*gsi);
812 tree lhs = gimple_call_lhs (stmt);
813 gimple *repl;
814 if (lhs)
816 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
817 val = fold_convert (TREE_TYPE (lhs), val);
818 repl = gimple_build_assign (lhs, val);
820 else
821 repl = gimple_build_nop ();
822 tree vdef = gimple_vdef (stmt);
823 if (vdef && TREE_CODE (vdef) == SSA_NAME)
825 unlink_stmt_vdef (stmt);
826 release_ssa_name (vdef);
828 gsi_replace (gsi, repl, false);
831 /* Replace the call at *GSI with the new call REPL and fold that
832 again. */
834 static void
835 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
837 gimple *stmt = gsi_stmt (*gsi);
838 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
839 gimple_set_location (repl, gimple_location (stmt));
840 gimple_move_vops (repl, stmt);
841 gsi_replace (gsi, repl, false);
842 fold_stmt (gsi);
845 /* Return true if VAR is a VAR_DECL or a component thereof. */
847 static bool
848 var_decl_component_p (tree var)
850 tree inner = var;
851 while (handled_component_p (inner))
852 inner = TREE_OPERAND (inner, 0);
853 return (DECL_P (inner)
854 || (TREE_CODE (inner) == MEM_REF
855 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
858 /* Return TRUE if the SIZE argument, representing the size of an
859 object, is in a range of values of which exactly zero is valid. */
861 static bool
862 size_must_be_zero_p (tree size)
864 if (integer_zerop (size))
865 return true;
867 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
868 return false;
870 tree type = TREE_TYPE (size);
871 int prec = TYPE_PRECISION (type);
873 /* Compute the value of SSIZE_MAX, the largest positive value that
874 can be stored in ssize_t, the signed counterpart of size_t. */
875 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
876 wide_int zero = wi::zero (TYPE_PRECISION (type));
877 value_range valid_range (type, zero, ssize_max);
878 value_range vr;
879 if (cfun)
880 get_range_query (cfun)->range_of_expr (vr, size);
881 else
882 get_global_range_query ()->range_of_expr (vr, size);
883 if (vr.undefined_p ())
884 vr.set_varying (TREE_TYPE (size));
885 vr.intersect (valid_range);
886 return vr.zero_p ();
889 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
890 diagnose (otherwise undefined) overlapping copies without preventing
891 folding. When folded, GCC guarantees that overlapping memcpy has
892 the same semantics as memmove. Call to the library memcpy need not
893 provide the same guarantee. Return false if no simplification can
894 be made. */
896 static bool
897 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
898 tree dest, tree src, enum built_in_function code)
900 gimple *stmt = gsi_stmt (*gsi);
901 tree lhs = gimple_call_lhs (stmt);
902 tree len = gimple_call_arg (stmt, 2);
903 location_t loc = gimple_location (stmt);
905 /* If the LEN parameter is a constant zero or in range where
906 the only valid value is zero, return DEST. */
907 if (size_must_be_zero_p (len))
909 gimple *repl;
910 if (gimple_call_lhs (stmt))
911 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
912 else
913 repl = gimple_build_nop ();
914 tree vdef = gimple_vdef (stmt);
915 if (vdef && TREE_CODE (vdef) == SSA_NAME)
917 unlink_stmt_vdef (stmt);
918 release_ssa_name (vdef);
920 gsi_replace (gsi, repl, false);
921 return true;
924 /* If SRC and DEST are the same (and not volatile), return
925 DEST{,+LEN,+LEN-1}. */
926 if (operand_equal_p (src, dest, 0))
928 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
929 It's safe and may even be emitted by GCC itself (see bug
930 32667). */
931 unlink_stmt_vdef (stmt);
932 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
933 release_ssa_name (gimple_vdef (stmt));
934 if (!lhs)
936 gsi_replace (gsi, gimple_build_nop (), false);
937 return true;
939 goto done;
941 else
943 /* We cannot (easily) change the type of the copy if it is a storage
944 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
945 modify the storage order of objects (see storage_order_barrier_p). */
946 tree srctype
947 = POINTER_TYPE_P (TREE_TYPE (src))
948 ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
949 tree desttype
950 = POINTER_TYPE_P (TREE_TYPE (dest))
951 ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
952 tree destvar, srcvar, srcoff;
953 unsigned int src_align, dest_align;
954 unsigned HOST_WIDE_INT tmp_len;
955 const char *tmp_str;
957 /* Build accesses at offset zero with a ref-all character type. */
958 tree off0
959 = build_int_cst (build_pointer_type_for_mode (char_type_node,
960 ptr_mode, true), 0);
962 /* If we can perform the copy efficiently with first doing all loads
963 and then all stores inline it that way. Currently efficiently
964 means that we can load all the memory into a single integer
965 register which is what MOVE_MAX gives us. */
966 src_align = get_pointer_alignment (src);
967 dest_align = get_pointer_alignment (dest);
968 if (tree_fits_uhwi_p (len)
969 && compare_tree_int (len, MOVE_MAX) <= 0
970 /* FIXME: Don't transform copies from strings with known length.
971 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
972 from being handled, and the case was XFAILed for that reason.
973 Now that it is handled and the XFAIL removed, as soon as other
974 strlenopt tests that rely on it for passing are adjusted, this
975 hack can be removed. */
976 && !c_strlen (src, 1)
977 && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
978 && memchr (tmp_str, 0, tmp_len) == NULL)
979 && !(srctype
980 && AGGREGATE_TYPE_P (srctype)
981 && TYPE_REVERSE_STORAGE_ORDER (srctype))
982 && !(desttype
983 && AGGREGATE_TYPE_P (desttype)
984 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
986 unsigned ilen = tree_to_uhwi (len);
987 if (pow2p_hwi (ilen))
989 /* Detect out-of-bounds accesses without issuing warnings.
990 Avoid folding out-of-bounds copies but to avoid false
991 positives for unreachable code defer warning until after
992 DCE has worked its magic.
993 -Wrestrict is still diagnosed. */
994 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
995 dest, src, len, len,
996 false, false))
997 if (warning != OPT_Wrestrict)
998 return false;
1000 scalar_int_mode mode;
1001 if (int_mode_for_size (ilen * 8, 0).exists (&mode)
1002 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
1003 /* If the destination pointer is not aligned we must be able
1004 to emit an unaligned store. */
1005 && (dest_align >= GET_MODE_ALIGNMENT (mode)
1006 || !targetm.slow_unaligned_access (mode, dest_align)
1007 || (optab_handler (movmisalign_optab, mode)
1008 != CODE_FOR_nothing)))
1010 tree type = build_nonstandard_integer_type (ilen * 8, 1);
1011 tree srctype = type;
1012 tree desttype = type;
1013 if (src_align < GET_MODE_ALIGNMENT (mode))
1014 srctype = build_aligned_type (type, src_align);
1015 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
1016 tree tem = fold_const_aggregate_ref (srcmem);
1017 if (tem)
1018 srcmem = tem;
1019 else if (src_align < GET_MODE_ALIGNMENT (mode)
1020 && targetm.slow_unaligned_access (mode, src_align)
1021 && (optab_handler (movmisalign_optab, mode)
1022 == CODE_FOR_nothing))
1023 srcmem = NULL_TREE;
1024 if (srcmem)
1026 gimple *new_stmt;
1027 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
1029 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
1030 srcmem
1031 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
1032 new_stmt);
1033 gimple_assign_set_lhs (new_stmt, srcmem);
1034 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1035 gimple_set_location (new_stmt, loc);
1036 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1038 if (dest_align < GET_MODE_ALIGNMENT (mode))
1039 desttype = build_aligned_type (type, dest_align);
1040 new_stmt
1041 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
1042 dest, off0),
1043 srcmem);
1044 gimple_move_vops (new_stmt, stmt);
1045 if (!lhs)
1047 gsi_replace (gsi, new_stmt, false);
1048 return true;
1050 gimple_set_location (new_stmt, loc);
1051 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1052 goto done;
1058 if (code == BUILT_IN_MEMMOVE)
1060 /* Both DEST and SRC must be pointer types.
1061 ??? This is what old code did. Is the testing for pointer types
1062 really mandatory?
1064 If either SRC is readonly or length is 1, we can use memcpy. */
1065 if (!dest_align || !src_align)
1066 return false;
1067 if (readonly_data_expr (src)
1068 || (tree_fits_uhwi_p (len)
1069 && (MIN (src_align, dest_align) / BITS_PER_UNIT
1070 >= tree_to_uhwi (len))))
1072 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1073 if (!fn)
1074 return false;
1075 gimple_call_set_fndecl (stmt, fn);
1076 gimple_call_set_arg (stmt, 0, dest);
1077 gimple_call_set_arg (stmt, 1, src);
1078 fold_stmt (gsi);
1079 return true;
1082 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1083 if (TREE_CODE (src) == ADDR_EXPR
1084 && TREE_CODE (dest) == ADDR_EXPR)
1086 tree src_base, dest_base, fn;
1087 poly_int64 src_offset = 0, dest_offset = 0;
1088 poly_uint64 maxsize;
1090 srcvar = TREE_OPERAND (src, 0);
1091 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
1092 if (src_base == NULL)
1093 src_base = srcvar;
1094 destvar = TREE_OPERAND (dest, 0);
1095 dest_base = get_addr_base_and_unit_offset (destvar,
1096 &dest_offset);
1097 if (dest_base == NULL)
1098 dest_base = destvar;
1099 if (!poly_int_tree_p (len, &maxsize))
1100 maxsize = -1;
1101 if (SSA_VAR_P (src_base)
1102 && SSA_VAR_P (dest_base))
1104 if (operand_equal_p (src_base, dest_base, 0)
1105 && ranges_maybe_overlap_p (src_offset, maxsize,
1106 dest_offset, maxsize))
1107 return false;
1109 else if (TREE_CODE (src_base) == MEM_REF
1110 && TREE_CODE (dest_base) == MEM_REF)
1112 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
1113 TREE_OPERAND (dest_base, 0), 0))
1114 return false;
1115 poly_offset_int full_src_offset
1116 = mem_ref_offset (src_base) + src_offset;
1117 poly_offset_int full_dest_offset
1118 = mem_ref_offset (dest_base) + dest_offset;
1119 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
1120 full_dest_offset, maxsize))
1121 return false;
1123 else
1124 return false;
1126 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1127 if (!fn)
1128 return false;
1129 gimple_call_set_fndecl (stmt, fn);
1130 gimple_call_set_arg (stmt, 0, dest);
1131 gimple_call_set_arg (stmt, 1, src);
1132 fold_stmt (gsi);
1133 return true;
1136 /* If the destination and source do not alias optimize into
1137 memcpy as well. */
1138 if ((is_gimple_min_invariant (dest)
1139 || TREE_CODE (dest) == SSA_NAME)
1140 && (is_gimple_min_invariant (src)
1141 || TREE_CODE (src) == SSA_NAME))
1143 ao_ref destr, srcr;
1144 ao_ref_init_from_ptr_and_size (&destr, dest, len);
1145 ao_ref_init_from_ptr_and_size (&srcr, src, len);
1146 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1148 tree fn;
1149 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1150 if (!fn)
1151 return false;
1152 gimple_call_set_fndecl (stmt, fn);
1153 gimple_call_set_arg (stmt, 0, dest);
1154 gimple_call_set_arg (stmt, 1, src);
1155 fold_stmt (gsi);
1156 return true;
1160 return false;
1163 if (!tree_fits_shwi_p (len))
1164 return false;
1165 if (!srctype
1166 || (AGGREGATE_TYPE_P (srctype)
1167 && TYPE_REVERSE_STORAGE_ORDER (srctype)))
1168 return false;
1169 if (!desttype
1170 || (AGGREGATE_TYPE_P (desttype)
1171 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
1172 return false;
1173 /* In the following try to find a type that is most natural to be
1174 used for the memcpy source and destination and that allows
1175 the most optimization when memcpy is turned into a plain assignment
1176 using that type. In theory we could always use a char[len] type
1177 but that only gains us that the destination and source possibly
1178 no longer will have their address taken. */
1179 if (TREE_CODE (srctype) == ARRAY_TYPE
1180 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1181 srctype = TREE_TYPE (srctype);
1182 if (TREE_CODE (desttype) == ARRAY_TYPE
1183 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1184 desttype = TREE_TYPE (desttype);
1185 if (TREE_ADDRESSABLE (srctype)
1186 || TREE_ADDRESSABLE (desttype))
1187 return false;
1189 /* Make sure we are not copying using a floating-point mode or
1190 a type whose size possibly does not match its precision. */
1191 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1192 || TREE_CODE (desttype) == BOOLEAN_TYPE
1193 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1194 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1195 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1196 || TREE_CODE (srctype) == BOOLEAN_TYPE
1197 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1198 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1199 if (!srctype)
1200 srctype = desttype;
1201 if (!desttype)
1202 desttype = srctype;
1203 if (!srctype)
1204 return false;
1206 src_align = get_pointer_alignment (src);
1207 dest_align = get_pointer_alignment (dest);
1209 /* Choose between src and destination type for the access based
1210 on alignment, whether the access constitutes a register access
1211 and whether it may actually expose a declaration for SSA rewrite
1212 or SRA decomposition. Also try to expose a string constant, we
1213 might be able to concatenate several of them later into a single
1214 string store. */
1215 destvar = NULL_TREE;
1216 srcvar = NULL_TREE;
1217 if (TREE_CODE (dest) == ADDR_EXPR
1218 && var_decl_component_p (TREE_OPERAND (dest, 0))
1219 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1220 && dest_align >= TYPE_ALIGN (desttype)
1221 && (is_gimple_reg_type (desttype)
1222 || src_align >= TYPE_ALIGN (desttype)))
1223 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1224 else if (TREE_CODE (src) == ADDR_EXPR
1225 && var_decl_component_p (TREE_OPERAND (src, 0))
1226 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1227 && src_align >= TYPE_ALIGN (srctype)
1228 && (is_gimple_reg_type (srctype)
1229 || dest_align >= TYPE_ALIGN (srctype)))
1230 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1231 /* FIXME: Don't transform copies from strings with known original length.
1232 As soon as strlenopt tests that rely on it for passing are adjusted,
1233 this hack can be removed. */
1234 else if (gimple_call_alloca_for_var_p (stmt)
1235 && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1236 && integer_zerop (srcoff)
1237 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1238 && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1239 srctype = TREE_TYPE (srcvar);
1240 else
1241 return false;
1243 /* Now that we chose an access type express the other side in
1244 terms of it if the target allows that with respect to alignment
1245 constraints. */
1246 if (srcvar == NULL_TREE)
1248 if (src_align >= TYPE_ALIGN (desttype))
1249 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1250 else
1252 enum machine_mode mode = TYPE_MODE (desttype);
1253 if ((mode == BLKmode && STRICT_ALIGNMENT)
1254 || (targetm.slow_unaligned_access (mode, src_align)
1255 && (optab_handler (movmisalign_optab, mode)
1256 == CODE_FOR_nothing)))
1257 return false;
1258 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1259 src_align);
1260 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1263 else if (destvar == NULL_TREE)
1265 if (dest_align >= TYPE_ALIGN (srctype))
1266 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1267 else
1269 enum machine_mode mode = TYPE_MODE (srctype);
1270 if ((mode == BLKmode && STRICT_ALIGNMENT)
1271 || (targetm.slow_unaligned_access (mode, dest_align)
1272 && (optab_handler (movmisalign_optab, mode)
1273 == CODE_FOR_nothing)))
1274 return false;
1275 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1276 dest_align);
1277 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1281 /* Same as above, detect out-of-bounds accesses without issuing
1282 warnings. Avoid folding out-of-bounds copies but to avoid
1283 false positives for unreachable code defer warning until
1284 after DCE has worked its magic.
1285 -Wrestrict is still diagnosed. */
1286 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1287 dest, src, len, len,
1288 false, false))
1289 if (warning != OPT_Wrestrict)
1290 return false;
1292 gimple *new_stmt;
1293 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1295 tree tem = fold_const_aggregate_ref (srcvar);
1296 if (tem)
1297 srcvar = tem;
1298 if (! is_gimple_min_invariant (srcvar))
1300 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1301 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1302 new_stmt);
1303 gimple_assign_set_lhs (new_stmt, srcvar);
1304 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1305 gimple_set_location (new_stmt, loc);
1306 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1308 new_stmt = gimple_build_assign (destvar, srcvar);
1309 goto set_vop_and_replace;
1312 /* We get an aggregate copy. If the source is a STRING_CST, then
1313 directly use its type to perform the copy. */
1314 if (TREE_CODE (srcvar) == STRING_CST)
1315 desttype = srctype;
1317 /* Or else, use an unsigned char[] type to perform the copy in order
1318 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1319 types or float modes behavior on copying. */
1320 else
1322 desttype = build_array_type_nelts (unsigned_char_type_node,
1323 tree_to_uhwi (len));
1324 srctype = desttype;
1325 if (src_align > TYPE_ALIGN (srctype))
1326 srctype = build_aligned_type (srctype, src_align);
1327 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1330 if (dest_align > TYPE_ALIGN (desttype))
1331 desttype = build_aligned_type (desttype, dest_align);
1332 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1333 new_stmt = gimple_build_assign (destvar, srcvar);
1335 set_vop_and_replace:
1336 gimple_move_vops (new_stmt, stmt);
1337 if (!lhs)
1339 gsi_replace (gsi, new_stmt, false);
1340 return true;
1342 gimple_set_location (new_stmt, loc);
1343 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1346 done:
1347 gimple_seq stmts = NULL;
1348 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1349 len = NULL_TREE;
1350 else if (code == BUILT_IN_MEMPCPY)
1352 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1353 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1354 TREE_TYPE (dest), dest, len);
1356 else
1357 gcc_unreachable ();
1359 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1360 gimple *repl = gimple_build_assign (lhs, dest);
1361 gsi_replace (gsi, repl, false);
1362 return true;
1365 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1366 to built-in memcmp (a, b, len). */
1368 static bool
1369 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1371 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1373 if (!fn)
1374 return false;
1376 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1378 gimple *stmt = gsi_stmt (*gsi);
1379 tree a = gimple_call_arg (stmt, 0);
1380 tree b = gimple_call_arg (stmt, 1);
1381 tree len = gimple_call_arg (stmt, 2);
1383 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1384 replace_call_with_call_and_fold (gsi, repl);
1386 return true;
1389 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1390 to built-in memmove (dest, src, len). */
1392 static bool
1393 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1395 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1397 if (!fn)
1398 return false;
1400 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1401 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1402 len) into memmove (dest, src, len). */
1404 gimple *stmt = gsi_stmt (*gsi);
1405 tree src = gimple_call_arg (stmt, 0);
1406 tree dest = gimple_call_arg (stmt, 1);
1407 tree len = gimple_call_arg (stmt, 2);
1409 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1410 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1411 replace_call_with_call_and_fold (gsi, repl);
1413 return true;
1416 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1417 to built-in memset (dest, 0, len). */
1419 static bool
1420 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1422 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1424 if (!fn)
1425 return false;
1427 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1429 gimple *stmt = gsi_stmt (*gsi);
1430 tree dest = gimple_call_arg (stmt, 0);
1431 tree len = gimple_call_arg (stmt, 1);
1433 gimple_seq seq = NULL;
1434 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1435 gimple_seq_add_stmt_without_update (&seq, repl);
1436 gsi_replace_with_seq_vops (gsi, seq);
1437 fold_stmt (gsi);
1439 return true;
1442 /* Fold function call to builtin memset or bzero at *GSI setting the
1443 memory of size LEN to VAL. Return whether a simplification was made. */
1445 static bool
1446 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1448 gimple *stmt = gsi_stmt (*gsi);
1449 tree etype;
1450 unsigned HOST_WIDE_INT length, cval;
1452 /* If the LEN parameter is zero, return DEST. */
1453 if (integer_zerop (len))
1455 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1456 return true;
1459 if (! tree_fits_uhwi_p (len))
1460 return false;
1462 if (TREE_CODE (c) != INTEGER_CST)
1463 return false;
1465 tree dest = gimple_call_arg (stmt, 0);
1466 tree var = dest;
1467 if (TREE_CODE (var) != ADDR_EXPR)
1468 return false;
1470 var = TREE_OPERAND (var, 0);
1471 if (TREE_THIS_VOLATILE (var))
1472 return false;
1474 etype = TREE_TYPE (var);
1475 if (TREE_CODE (etype) == ARRAY_TYPE)
1476 etype = TREE_TYPE (etype);
1478 if ((!INTEGRAL_TYPE_P (etype)
1479 && !POINTER_TYPE_P (etype))
1480 || TREE_CODE (etype) == BITINT_TYPE)
1481 return NULL_TREE;
1483 if (! var_decl_component_p (var))
1484 return NULL_TREE;
1486 length = tree_to_uhwi (len);
1487 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1488 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1489 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
1490 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1491 return NULL_TREE;
1493 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1494 return NULL_TREE;
1496 if (!type_has_mode_precision_p (etype))
1497 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1498 TYPE_UNSIGNED (etype));
1500 if (integer_zerop (c))
1501 cval = 0;
1502 else
1504 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1505 return NULL_TREE;
1507 cval = TREE_INT_CST_LOW (c);
1508 cval &= 0xff;
1509 cval |= cval << 8;
1510 cval |= cval << 16;
1511 cval |= (cval << 31) << 1;
1514 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1515 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1516 gimple_move_vops (store, stmt);
1517 gimple_set_location (store, gimple_location (stmt));
1518 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1519 if (gimple_call_lhs (stmt))
1521 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1522 gsi_replace (gsi, asgn, false);
1524 else
1526 gimple_stmt_iterator gsi2 = *gsi;
1527 gsi_prev (gsi);
1528 gsi_remove (&gsi2, true);
1531 return true;
1534 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1536 static bool
1537 get_range_strlen_tree (tree arg, bitmap visited, strlen_range_kind rkind,
1538 c_strlen_data *pdata, unsigned eltsize)
1540 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1542 /* The length computed by this invocation of the function. */
1543 tree val = NULL_TREE;
1545 /* True if VAL is an optimistic (tight) bound determined from
1546 the size of the character array in which the string may be
1547 stored. In that case, the computed VAL is used to set
1548 PDATA->MAXBOUND. */
1549 bool tight_bound = false;
1551 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1552 if (TREE_CODE (arg) == ADDR_EXPR
1553 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1555 tree op = TREE_OPERAND (arg, 0);
1556 if (integer_zerop (TREE_OPERAND (op, 1)))
1558 tree aop0 = TREE_OPERAND (op, 0);
1559 if (TREE_CODE (aop0) == INDIRECT_REF
1560 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1561 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1562 pdata, eltsize);
1564 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1565 && rkind == SRK_LENRANGE)
1567 /* Fail if an array is the last member of a struct object
1568 since it could be treated as a (fake) flexible array
1569 member. */
1570 tree idx = TREE_OPERAND (op, 1);
1572 arg = TREE_OPERAND (op, 0);
1573 tree optype = TREE_TYPE (arg);
1574 if (tree dom = TYPE_DOMAIN (optype))
1575 if (tree bound = TYPE_MAX_VALUE (dom))
1576 if (TREE_CODE (bound) == INTEGER_CST
1577 && TREE_CODE (idx) == INTEGER_CST
1578 && tree_int_cst_lt (bound, idx))
1579 return false;
1583 if (rkind == SRK_INT_VALUE)
1585 /* We are computing the maximum value (not string length). */
1586 val = arg;
1587 if (TREE_CODE (val) != INTEGER_CST
1588 || tree_int_cst_sgn (val) < 0)
1589 return false;
1591 else
1593 c_strlen_data lendata = { };
1594 val = c_strlen (arg, 1, &lendata, eltsize);
1596 if (!val && lendata.decl)
1598 /* ARG refers to an unterminated const character array.
1599 DATA.DECL with size DATA.LEN. */
1600 val = lendata.minlen;
1601 pdata->decl = lendata.decl;
1605 /* Set if VAL represents the maximum length based on array size (set
1606 when exact length cannot be determined). */
1607 bool maxbound = false;
1609 if (!val && rkind == SRK_LENRANGE)
1611 if (TREE_CODE (arg) == ADDR_EXPR)
1612 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1613 pdata, eltsize);
1615 if (TREE_CODE (arg) == ARRAY_REF)
1617 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1619 /* Determine the "innermost" array type. */
1620 while (TREE_CODE (optype) == ARRAY_TYPE
1621 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1622 optype = TREE_TYPE (optype);
1624 /* Avoid arrays of pointers. */
1625 tree eltype = TREE_TYPE (optype);
1626 if (TREE_CODE (optype) != ARRAY_TYPE
1627 || !INTEGRAL_TYPE_P (eltype))
1628 return false;
1630 /* Fail when the array bound is unknown or zero. */
1631 val = TYPE_SIZE_UNIT (optype);
1632 if (!val
1633 || TREE_CODE (val) != INTEGER_CST
1634 || integer_zerop (val))
1635 return false;
1637 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1638 integer_one_node);
1640 /* Set the minimum size to zero since the string in
1641 the array could have zero length. */
1642 pdata->minlen = ssize_int (0);
1644 tight_bound = true;
1646 else if (TREE_CODE (arg) == COMPONENT_REF
1647 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1648 == ARRAY_TYPE))
1650 /* Use the type of the member array to determine the upper
1651 bound on the length of the array. This may be overly
1652 optimistic if the array itself isn't NUL-terminated and
1653 the caller relies on the subsequent member to contain
1654 the NUL but that would only be considered valid if
1655 the array were the last member of a struct. */
1657 tree fld = TREE_OPERAND (arg, 1);
1659 tree optype = TREE_TYPE (fld);
1661 /* Determine the "innermost" array type. */
1662 while (TREE_CODE (optype) == ARRAY_TYPE
1663 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1664 optype = TREE_TYPE (optype);
1666 /* Fail when the array bound is unknown or zero. */
1667 val = TYPE_SIZE_UNIT (optype);
1668 if (!val
1669 || TREE_CODE (val) != INTEGER_CST
1670 || integer_zerop (val))
1671 return false;
1672 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1673 integer_one_node);
1675 /* Set the minimum size to zero since the string in
1676 the array could have zero length. */
1677 pdata->minlen = ssize_int (0);
1679 /* The array size determined above is an optimistic bound
1680 on the length. If the array isn't nul-terminated the
1681 length computed by the library function would be greater.
1682 Even though using strlen to cross the subobject boundary
1683 is undefined, avoid drawing conclusions from the member
1684 type about the length here. */
1685 tight_bound = true;
1687 else if (TREE_CODE (arg) == MEM_REF
1688 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1689 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1690 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1692 /* Handle a MEM_REF into a DECL accessing an array of integers,
1693 being conservative about references to extern structures with
1694 flexible array members that can be initialized to arbitrary
1695 numbers of elements as an extension (static structs are okay). */
1696 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1697 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1698 && (decl_binds_to_current_def_p (ref)
1699 || !array_ref_flexible_size_p (arg)))
1701 /* Fail if the offset is out of bounds. Such accesses
1702 should be diagnosed at some point. */
1703 val = DECL_SIZE_UNIT (ref);
1704 if (!val
1705 || TREE_CODE (val) != INTEGER_CST
1706 || integer_zerop (val))
1707 return false;
1709 poly_offset_int psiz = wi::to_offset (val);
1710 poly_offset_int poff = mem_ref_offset (arg);
1711 if (known_le (psiz, poff))
1712 return false;
1714 pdata->minlen = ssize_int (0);
1716 /* Subtract the offset and one for the terminating nul. */
1717 psiz -= poff;
1718 psiz -= 1;
1719 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1720 /* Since VAL reflects the size of a declared object
1721 rather the type of the access it is not a tight bound. */
1724 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
1726 /* Avoid handling pointers to arrays. GCC might misuse
1727 a pointer to an array of one bound to point to an array
1728 object of a greater bound. */
1729 tree argtype = TREE_TYPE (arg);
1730 if (TREE_CODE (argtype) == ARRAY_TYPE)
1732 val = TYPE_SIZE_UNIT (argtype);
1733 if (!val
1734 || TREE_CODE (val) != INTEGER_CST
1735 || integer_zerop (val))
1736 return false;
1737 val = wide_int_to_tree (TREE_TYPE (val),
1738 wi::sub (wi::to_wide (val), 1));
1740 /* Set the minimum size to zero since the string in
1741 the array could have zero length. */
1742 pdata->minlen = ssize_int (0);
1745 maxbound = true;
1748 if (!val)
1749 return false;
1751 /* Adjust the lower bound on the string length as necessary. */
1752 if (!pdata->minlen
1753 || (rkind != SRK_STRLEN
1754 && TREE_CODE (pdata->minlen) == INTEGER_CST
1755 && TREE_CODE (val) == INTEGER_CST
1756 && tree_int_cst_lt (val, pdata->minlen)))
1757 pdata->minlen = val;
1759 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1761 /* Adjust the tighter (more optimistic) string length bound
1762 if necessary and proceed to adjust the more conservative
1763 bound. */
1764 if (TREE_CODE (val) == INTEGER_CST)
1766 if (tree_int_cst_lt (pdata->maxbound, val))
1767 pdata->maxbound = val;
1769 else
1770 pdata->maxbound = val;
1772 else if (pdata->maxbound || maxbound)
1773 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1774 if VAL corresponds to the maximum length determined based
1775 on the type of the object. */
1776 pdata->maxbound = val;
1778 if (tight_bound)
1780 /* VAL computed above represents an optimistically tight bound
1781 on the length of the string based on the referenced object's
1782 or subobject's type. Determine the conservative upper bound
1783 based on the enclosing object's size if possible. */
1784 if (rkind == SRK_LENRANGE)
1786 poly_int64 offset;
1787 tree base = get_addr_base_and_unit_offset (arg, &offset);
1788 if (!base)
1790 /* When the call above fails due to a non-constant offset
1791 assume the offset is zero and use the size of the whole
1792 enclosing object instead. */
1793 base = get_base_address (arg);
1794 offset = 0;
1796 /* If the base object is a pointer no upper bound on the length
1797 can be determined. Otherwise the maximum length is equal to
1798 the size of the enclosing object minus the offset of
1799 the referenced subobject minus 1 (for the terminating nul). */
1800 tree type = TREE_TYPE (base);
1801 if (TREE_CODE (type) == POINTER_TYPE
1802 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1803 || !(val = DECL_SIZE_UNIT (base)))
1804 val = build_all_ones_cst (size_type_node);
1805 else
1807 val = DECL_SIZE_UNIT (base);
1808 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1809 size_int (offset + 1));
1812 else
1813 return false;
1816 if (pdata->maxlen)
1818 /* Adjust the more conservative bound if possible/necessary
1819 and fail otherwise. */
1820 if (rkind != SRK_STRLEN)
1822 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1823 || TREE_CODE (val) != INTEGER_CST)
1824 return false;
1826 if (tree_int_cst_lt (pdata->maxlen, val))
1827 pdata->maxlen = val;
1828 return true;
1830 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1832 /* Fail if the length of this ARG is different from that
1833 previously determined from another ARG. */
1834 return false;
1838 pdata->maxlen = val;
1839 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1842 /* For an ARG referencing one or more strings, try to obtain the range
1843 of their lengths, or the size of the largest array ARG referes to if
1844 the range of lengths cannot be determined, and store all in *PDATA.
1845 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1846 the maximum constant value.
1847 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1848 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1849 length or if we are unable to determine the length, return false.
1850 VISITED is a bitmap of visited variables.
1851 RKIND determines the kind of value or range to obtain (see
1852 strlen_range_kind).
1853 Set PDATA->DECL if ARG refers to an unterminated constant array.
1854 On input, set ELTSIZE to 1 for normal single byte character strings,
1855 and either 2 or 4 for wide characer strings (the size of wchar_t).
1856 Return true if *PDATA was successfully populated and false otherwise. */
1858 static bool
1859 get_range_strlen (tree arg, bitmap visited,
1860 strlen_range_kind rkind,
1861 c_strlen_data *pdata, unsigned eltsize)
1864 if (TREE_CODE (arg) != SSA_NAME)
1865 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1867 /* If ARG is registered for SSA update we cannot look at its defining
1868 statement. */
1869 if (name_registered_for_update_p (arg))
1870 return false;
1872 /* If we were already here, break the infinite cycle. */
1873 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1874 return true;
1876 tree var = arg;
1877 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1879 switch (gimple_code (def_stmt))
1881 case GIMPLE_ASSIGN:
1882 /* The RHS of the statement defining VAR must either have a
1883 constant length or come from another SSA_NAME with a constant
1884 length. */
1885 if (gimple_assign_single_p (def_stmt)
1886 || gimple_assign_unary_nop_p (def_stmt))
1888 tree rhs = gimple_assign_rhs1 (def_stmt);
1889 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1891 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1893 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1894 gimple_assign_rhs3 (def_stmt) };
1896 for (unsigned int i = 0; i < 2; i++)
1897 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1899 if (rkind != SRK_LENRANGE)
1900 return false;
1901 /* Set the upper bound to the maximum to prevent
1902 it from being adjusted in the next iteration but
1903 leave MINLEN and the more conservative MAXBOUND
1904 determined so far alone (or leave them null if
1905 they haven't been set yet). That the MINLEN is
1906 in fact zero can be determined from MAXLEN being
1907 unbounded but the discovered minimum is used for
1908 diagnostics. */
1909 pdata->maxlen = build_all_ones_cst (size_type_node);
1911 return true;
1913 return false;
1915 case GIMPLE_PHI:
1916 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1917 must have a constant length. */
1918 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1920 tree arg = gimple_phi_arg (def_stmt, i)->def;
1922 /* If this PHI has itself as an argument, we cannot
1923 determine the string length of this argument. However,
1924 if we can find a constant string length for the other
1925 PHI args then we can still be sure that this is a
1926 constant string length. So be optimistic and just
1927 continue with the next argument. */
1928 if (arg == gimple_phi_result (def_stmt))
1929 continue;
1931 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1933 if (rkind != SRK_LENRANGE)
1934 return false;
1935 /* Set the upper bound to the maximum to prevent
1936 it from being adjusted in the next iteration but
1937 leave MINLEN and the more conservative MAXBOUND
1938 determined so far alone (or leave them null if
1939 they haven't been set yet). That the MINLEN is
1940 in fact zero can be determined from MAXLEN being
1941 unbounded but the discovered minimum is used for
1942 diagnostics. */
1943 pdata->maxlen = build_all_ones_cst (size_type_node);
1946 return true;
1948 default:
1949 return false;
1953 /* Try to obtain the range of the lengths of the string(s) referenced
1954 by ARG, or the size of the largest array ARG refers to if the range
1955 of lengths cannot be determined, and store all in *PDATA which must
1956 be zero-initialized on input except PDATA->MAXBOUND may be set to
1957 a non-null tree node other than INTEGER_CST to request to have it
1958 set to the length of the longest string in a PHI. ELTSIZE is
1959 the expected size of the string element in bytes: 1 for char and
1960 some power of 2 for wide characters.
1961 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1962 for optimization. Returning false means that a nonzero PDATA->MINLEN
1963 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1964 is -1 (in that case, the actual range is indeterminate, i.e.,
1965 [0, PTRDIFF_MAX - 2]. */
1967 bool
1968 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1970 auto_bitmap visited;
1971 tree maxbound = pdata->maxbound;
1973 if (!get_range_strlen (arg, visited, SRK_LENRANGE, pdata, eltsize))
1975 /* On failure extend the length range to an impossible maximum
1976 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1977 members can stay unchanged regardless. */
1978 pdata->minlen = ssize_int (0);
1979 pdata->maxlen = build_all_ones_cst (size_type_node);
1981 else if (!pdata->minlen)
1982 pdata->minlen = ssize_int (0);
1984 /* If it's unchanged from it initial non-null value, set the conservative
1985 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1986 if (maxbound && pdata->maxbound == maxbound)
1987 pdata->maxbound = build_all_ones_cst (size_type_node);
1989 return !integer_all_onesp (pdata->maxlen);
1992 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1993 For ARG of pointer types, NONSTR indicates if the caller is prepared
1994 to handle unterminated strings. For integer ARG and when RKIND ==
1995 SRK_INT_VALUE, NONSTR must be null.
1997 If an unterminated array is discovered and our caller handles
1998 unterminated arrays, then bubble up the offending DECL and
1999 return the maximum size. Otherwise return NULL. */
2001 static tree
2002 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
2004 /* A non-null NONSTR is meaningless when determining the maximum
2005 value of an integer ARG. */
2006 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
2007 /* ARG must have an integral type when RKIND says so. */
2008 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
2010 auto_bitmap visited;
2012 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2013 is unbounded. */
2014 c_strlen_data lendata = { };
2015 if (!get_range_strlen (arg, visited, rkind, &lendata, /* eltsize = */1))
2016 lendata.maxlen = NULL_TREE;
2017 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
2018 lendata.maxlen = NULL_TREE;
2020 if (nonstr)
2022 /* For callers prepared to handle unterminated arrays set
2023 *NONSTR to point to the declaration of the array and return
2024 the maximum length/size. */
2025 *nonstr = lendata.decl;
2026 return lendata.maxlen;
2029 /* Fail if the constant array isn't nul-terminated. */
2030 return lendata.decl ? NULL_TREE : lendata.maxlen;
2033 /* Return true if LEN is known to be less than or equal to (or if STRICT is
2034 true, strictly less than) the lower bound of SIZE at compile time and false
2035 otherwise. */
2037 static bool
2038 known_lower (gimple *stmt, tree len, tree size, bool strict = false)
2040 if (len == NULL_TREE)
2041 return false;
2043 wide_int size_range[2];
2044 wide_int len_range[2];
2045 if (get_range (len, stmt, len_range) && get_range (size, stmt, size_range))
2047 if (strict)
2048 return wi::ltu_p (len_range[1], size_range[0]);
2049 else
2050 return wi::leu_p (len_range[1], size_range[0]);
2053 return false;
2056 /* Fold function call to builtin strcpy with arguments DEST and SRC.
2057 If LEN is not NULL, it represents the length of the string to be
2058 copied. Return NULL_TREE if no simplification can be made. */
2060 static bool
2061 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
2062 tree dest, tree src)
2064 gimple *stmt = gsi_stmt (*gsi);
2065 location_t loc = gimple_location (stmt);
2066 tree fn;
2068 /* If SRC and DEST are the same (and not volatile), return DEST. */
2069 if (operand_equal_p (src, dest, 0))
2071 /* Issue -Wrestrict unless the pointers are null (those do
2072 not point to objects and so do not indicate an overlap;
2073 such calls could be the result of sanitization and jump
2074 threading). */
2075 if (!integer_zerop (dest) && !warning_suppressed_p (stmt, OPT_Wrestrict))
2077 tree func = gimple_call_fndecl (stmt);
2079 warning_at (loc, OPT_Wrestrict,
2080 "%qD source argument is the same as destination",
2081 func);
2084 replace_call_with_value (gsi, dest);
2085 return true;
2088 if (optimize_function_for_size_p (cfun))
2089 return false;
2091 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2092 if (!fn)
2093 return false;
2095 /* Set to non-null if ARG refers to an unterminated array. */
2096 tree nonstr = NULL;
2097 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
2099 if (nonstr)
2101 /* Avoid folding calls with unterminated arrays. */
2102 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
2103 warn_string_no_nul (loc, stmt, "strcpy", src, nonstr);
2104 suppress_warning (stmt, OPT_Wstringop_overread);
2105 return false;
2108 if (!len)
2109 return false;
2111 len = fold_convert_loc (loc, size_type_node, len);
2112 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
2113 len = force_gimple_operand_gsi (gsi, len, true,
2114 NULL_TREE, true, GSI_SAME_STMT);
2115 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2116 replace_call_with_call_and_fold (gsi, repl);
2117 return true;
2120 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2121 If SLEN is not NULL, it represents the length of the source string.
2122 Return NULL_TREE if no simplification can be made. */
2124 static bool
2125 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
2126 tree dest, tree src, tree len)
2128 gimple *stmt = gsi_stmt (*gsi);
2129 location_t loc = gimple_location (stmt);
2130 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
2132 /* If the LEN parameter is zero, return DEST. */
2133 if (integer_zerop (len))
2135 /* Avoid warning if the destination refers to an array/pointer
2136 decorate with attribute nonstring. */
2137 if (!nonstring)
2139 tree fndecl = gimple_call_fndecl (stmt);
2141 /* Warn about the lack of nul termination: the result is not
2142 a (nul-terminated) string. */
2143 tree slen = get_maxval_strlen (src, SRK_STRLEN);
2144 if (slen && !integer_zerop (slen))
2145 warning_at (loc, OPT_Wstringop_truncation,
2146 "%qD destination unchanged after copying no bytes "
2147 "from a string of length %E",
2148 fndecl, slen);
2149 else
2150 warning_at (loc, OPT_Wstringop_truncation,
2151 "%qD destination unchanged after copying no bytes",
2152 fndecl);
2155 replace_call_with_value (gsi, dest);
2156 return true;
2159 /* We can't compare slen with len as constants below if len is not a
2160 constant. */
2161 if (TREE_CODE (len) != INTEGER_CST)
2162 return false;
2164 /* Now, we must be passed a constant src ptr parameter. */
2165 tree slen = get_maxval_strlen (src, SRK_STRLEN);
2166 if (!slen || TREE_CODE (slen) != INTEGER_CST)
2167 return false;
2169 /* The size of the source string including the terminating nul. */
2170 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
2172 /* We do not support simplification of this case, though we do
2173 support it when expanding trees into RTL. */
2174 /* FIXME: generate a call to __builtin_memset. */
2175 if (tree_int_cst_lt (ssize, len))
2176 return false;
2178 /* Diagnose truncation that leaves the copy unterminated. */
2179 maybe_diag_stxncpy_trunc (*gsi, src, len);
2181 /* OK transform into builtin memcpy. */
2182 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2183 if (!fn)
2184 return false;
2186 len = fold_convert_loc (loc, size_type_node, len);
2187 len = force_gimple_operand_gsi (gsi, len, true,
2188 NULL_TREE, true, GSI_SAME_STMT);
2189 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2190 replace_call_with_call_and_fold (gsi, repl);
2192 return true;
2195 /* Fold function call to builtin strchr or strrchr.
2196 If both arguments are constant, evaluate and fold the result,
2197 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2198 In general strlen is significantly faster than strchr
2199 due to being a simpler operation. */
2200 static bool
2201 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
2203 gimple *stmt = gsi_stmt (*gsi);
2204 tree str = gimple_call_arg (stmt, 0);
2205 tree c = gimple_call_arg (stmt, 1);
2206 location_t loc = gimple_location (stmt);
2207 const char *p;
2208 char ch;
2210 if (!gimple_call_lhs (stmt))
2211 return false;
2213 /* Avoid folding if the first argument is not a nul-terminated array.
2214 Defer warning until later. */
2215 if (!check_nul_terminated_array (NULL_TREE, str))
2216 return false;
2218 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
2220 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
2222 if (p1 == NULL)
2224 replace_call_with_value (gsi, integer_zero_node);
2225 return true;
2228 tree len = build_int_cst (size_type_node, p1 - p);
2229 gimple_seq stmts = NULL;
2230 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2231 POINTER_PLUS_EXPR, str, len);
2232 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2233 gsi_replace_with_seq_vops (gsi, stmts);
2234 return true;
2237 if (!integer_zerop (c))
2238 return false;
2240 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2241 if (is_strrchr && optimize_function_for_size_p (cfun))
2243 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2245 if (strchr_fn)
2247 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2248 replace_call_with_call_and_fold (gsi, repl);
2249 return true;
2252 return false;
2255 tree len;
2256 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2258 if (!strlen_fn)
2259 return false;
2261 /* Create newstr = strlen (str). */
2262 gimple_seq stmts = NULL;
2263 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2264 gimple_set_location (new_stmt, loc);
2265 len = create_tmp_reg_or_ssa_name (size_type_node);
2266 gimple_call_set_lhs (new_stmt, len);
2267 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2269 /* Create (str p+ strlen (str)). */
2270 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2271 POINTER_PLUS_EXPR, str, len);
2272 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2273 gsi_replace_with_seq_vops (gsi, stmts);
2274 /* gsi now points at the assignment to the lhs, get a
2275 stmt iterator to the strlen.
2276 ??? We can't use gsi_for_stmt as that doesn't work when the
2277 CFG isn't built yet. */
2278 gimple_stmt_iterator gsi2 = *gsi;
2279 gsi_prev (&gsi2);
2280 fold_stmt (&gsi2);
2281 return true;
2284 /* Fold function call to builtin strstr.
2285 If both arguments are constant, evaluate and fold the result,
2286 additionally fold strstr (x, "") into x and strstr (x, "c")
2287 into strchr (x, 'c'). */
2288 static bool
2289 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2291 gimple *stmt = gsi_stmt (*gsi);
2292 if (!gimple_call_lhs (stmt))
2293 return false;
2295 tree haystack = gimple_call_arg (stmt, 0);
2296 tree needle = gimple_call_arg (stmt, 1);
2298 /* Avoid folding if either argument is not a nul-terminated array.
2299 Defer warning until later. */
2300 if (!check_nul_terminated_array (NULL_TREE, haystack)
2301 || !check_nul_terminated_array (NULL_TREE, needle))
2302 return false;
2304 const char *q = c_getstr (needle);
2305 if (q == NULL)
2306 return false;
2308 if (const char *p = c_getstr (haystack))
2310 const char *r = strstr (p, q);
2312 if (r == NULL)
2314 replace_call_with_value (gsi, integer_zero_node);
2315 return true;
2318 tree len = build_int_cst (size_type_node, r - p);
2319 gimple_seq stmts = NULL;
2320 gimple *new_stmt
2321 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2322 haystack, len);
2323 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2324 gsi_replace_with_seq_vops (gsi, stmts);
2325 return true;
2328 /* For strstr (x, "") return x. */
2329 if (q[0] == '\0')
2331 replace_call_with_value (gsi, haystack);
2332 return true;
2335 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2336 if (q[1] == '\0')
2338 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2339 if (strchr_fn)
2341 tree c = build_int_cst (integer_type_node, q[0]);
2342 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2343 replace_call_with_call_and_fold (gsi, repl);
2344 return true;
2348 return false;
2351 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2352 to the call.
2354 Return NULL_TREE if no simplification was possible, otherwise return the
2355 simplified form of the call as a tree.
2357 The simplified form may be a constant or other expression which
2358 computes the same value, but in a more efficient manner (including
2359 calls to other builtin functions).
2361 The call may contain arguments which need to be evaluated, but
2362 which are not useful to determine the result of the call. In
2363 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2364 COMPOUND_EXPR will be an argument which must be evaluated.
2365 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2366 COMPOUND_EXPR in the chain will contain the tree for the simplified
2367 form of the builtin function call. */
2369 static bool
2370 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2372 gimple *stmt = gsi_stmt (*gsi);
2373 location_t loc = gimple_location (stmt);
2375 const char *p = c_getstr (src);
2377 /* If the string length is zero, return the dst parameter. */
2378 if (p && *p == '\0')
2380 replace_call_with_value (gsi, dst);
2381 return true;
2384 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2385 return false;
2387 /* See if we can store by pieces into (dst + strlen(dst)). */
2388 tree newdst;
2389 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2390 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2392 if (!strlen_fn || !memcpy_fn)
2393 return false;
2395 /* If the length of the source string isn't computable don't
2396 split strcat into strlen and memcpy. */
2397 tree len = get_maxval_strlen (src, SRK_STRLEN);
2398 if (! len)
2399 return false;
2401 /* Create strlen (dst). */
2402 gimple_seq stmts = NULL, stmts2;
2403 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2404 gimple_set_location (repl, loc);
2405 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2406 gimple_call_set_lhs (repl, newdst);
2407 gimple_seq_add_stmt_without_update (&stmts, repl);
2409 /* Create (dst p+ strlen (dst)). */
2410 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2411 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2412 gimple_seq_add_seq_without_update (&stmts, stmts2);
2414 len = fold_convert_loc (loc, size_type_node, len);
2415 len = size_binop_loc (loc, PLUS_EXPR, len,
2416 build_int_cst (size_type_node, 1));
2417 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2418 gimple_seq_add_seq_without_update (&stmts, stmts2);
2420 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2421 gimple_seq_add_stmt_without_update (&stmts, repl);
2422 if (gimple_call_lhs (stmt))
2424 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2425 gimple_seq_add_stmt_without_update (&stmts, repl);
2426 gsi_replace_with_seq_vops (gsi, stmts);
2427 /* gsi now points at the assignment to the lhs, get a
2428 stmt iterator to the memcpy call.
2429 ??? We can't use gsi_for_stmt as that doesn't work when the
2430 CFG isn't built yet. */
2431 gimple_stmt_iterator gsi2 = *gsi;
2432 gsi_prev (&gsi2);
2433 fold_stmt (&gsi2);
2435 else
2437 gsi_replace_with_seq_vops (gsi, stmts);
2438 fold_stmt (gsi);
2440 return true;
2443 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2444 are the arguments to the call. */
2446 static bool
2447 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2449 gimple *stmt = gsi_stmt (*gsi);
2450 tree dest = gimple_call_arg (stmt, 0);
2451 tree src = gimple_call_arg (stmt, 1);
2452 tree size = gimple_call_arg (stmt, 2);
2453 tree fn;
2454 const char *p;
2457 p = c_getstr (src);
2458 /* If the SRC parameter is "", return DEST. */
2459 if (p && *p == '\0')
2461 replace_call_with_value (gsi, dest);
2462 return true;
2465 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2466 return false;
2468 /* If __builtin_strcat_chk is used, assume strcat is available. */
2469 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2470 if (!fn)
2471 return false;
2473 gimple *repl = gimple_build_call (fn, 2, dest, src);
2474 replace_call_with_call_and_fold (gsi, repl);
2475 return true;
2478 /* Simplify a call to the strncat builtin. */
2480 static bool
2481 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2483 gimple *stmt = gsi_stmt (*gsi);
2484 tree dst = gimple_call_arg (stmt, 0);
2485 tree src = gimple_call_arg (stmt, 1);
2486 tree len = gimple_call_arg (stmt, 2);
2487 tree src_len = c_strlen (src, 1);
2489 /* If the requested length is zero, or the src parameter string
2490 length is zero, return the dst parameter. */
2491 if (integer_zerop (len) || (src_len && integer_zerop (src_len)))
2493 replace_call_with_value (gsi, dst);
2494 return true;
2497 /* Return early if the requested len is less than the string length.
2498 Warnings will be issued elsewhere later. */
2499 if (!src_len || known_lower (stmt, len, src_len, true))
2500 return false;
2502 /* Warn on constant LEN. */
2503 if (TREE_CODE (len) == INTEGER_CST)
2505 bool nowarn = warning_suppressed_p (stmt, OPT_Wstringop_overflow_);
2506 tree dstsize;
2508 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize)
2509 && TREE_CODE (dstsize) == INTEGER_CST)
2511 int cmpdst = tree_int_cst_compare (len, dstsize);
2513 if (cmpdst >= 0)
2515 tree fndecl = gimple_call_fndecl (stmt);
2517 /* Strncat copies (at most) LEN bytes and always appends
2518 the terminating NUL so the specified bound should never
2519 be equal to (or greater than) the size of the destination.
2520 If it is, the copy could overflow. */
2521 location_t loc = gimple_location (stmt);
2522 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2523 cmpdst == 0
2524 ? G_("%qD specified bound %E equals "
2525 "destination size")
2526 : G_("%qD specified bound %E exceeds "
2527 "destination size %E"),
2528 fndecl, len, dstsize);
2529 if (nowarn)
2530 suppress_warning (stmt, OPT_Wstringop_overflow_);
2534 if (!nowarn && TREE_CODE (src_len) == INTEGER_CST
2535 && tree_int_cst_compare (src_len, len) == 0)
2537 tree fndecl = gimple_call_fndecl (stmt);
2538 location_t loc = gimple_location (stmt);
2540 /* To avoid possible overflow the specified bound should also
2541 not be equal to the length of the source, even when the size
2542 of the destination is unknown (it's not an uncommon mistake
2543 to specify as the bound to strncpy the length of the source). */
2544 if (warning_at (loc, OPT_Wstringop_overflow_,
2545 "%qD specified bound %E equals source length",
2546 fndecl, len))
2547 suppress_warning (stmt, OPT_Wstringop_overflow_);
2551 if (!known_lower (stmt, src_len, len))
2552 return false;
2554 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2556 /* If the replacement _DECL isn't initialized, don't do the
2557 transformation. */
2558 if (!fn)
2559 return false;
2561 /* Otherwise, emit a call to strcat. */
2562 gcall *repl = gimple_build_call (fn, 2, dst, src);
2563 replace_call_with_call_and_fold (gsi, repl);
2564 return true;
2567 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2568 LEN, and SIZE. */
2570 static bool
2571 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2573 gimple *stmt = gsi_stmt (*gsi);
2574 tree dest = gimple_call_arg (stmt, 0);
2575 tree src = gimple_call_arg (stmt, 1);
2576 tree len = gimple_call_arg (stmt, 2);
2577 tree size = gimple_call_arg (stmt, 3);
2578 tree fn;
2579 const char *p;
2581 p = c_getstr (src);
2582 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2583 if ((p && *p == '\0')
2584 || integer_zerop (len))
2586 replace_call_with_value (gsi, dest);
2587 return true;
2590 if (! integer_all_onesp (size))
2592 tree src_len = c_strlen (src, 1);
2593 if (known_lower (stmt, src_len, len))
2595 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2596 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2597 if (!fn)
2598 return false;
2600 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2601 replace_call_with_call_and_fold (gsi, repl);
2602 return true;
2604 return false;
2607 /* If __builtin_strncat_chk is used, assume strncat is available. */
2608 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2609 if (!fn)
2610 return false;
2612 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2613 replace_call_with_call_and_fold (gsi, repl);
2614 return true;
2617 /* Build and append gimple statements to STMTS that would load a first
2618 character of a memory location identified by STR. LOC is location
2619 of the statement. */
2621 static tree
2622 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2624 tree var;
2626 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2627 tree cst_uchar_ptr_node
2628 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2629 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2631 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2632 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2633 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2635 gimple_assign_set_lhs (stmt, var);
2636 gimple_seq_add_stmt_without_update (stmts, stmt);
2638 return var;
2641 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2643 static bool
2644 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2646 gimple *stmt = gsi_stmt (*gsi);
2647 tree callee = gimple_call_fndecl (stmt);
2648 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2650 tree type = integer_type_node;
2651 tree str1 = gimple_call_arg (stmt, 0);
2652 tree str2 = gimple_call_arg (stmt, 1);
2653 tree lhs = gimple_call_lhs (stmt);
2655 tree bound_node = NULL_TREE;
2656 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
2658 /* Handle strncmp and strncasecmp functions. */
2659 if (gimple_call_num_args (stmt) == 3)
2661 bound_node = gimple_call_arg (stmt, 2);
2662 if (tree_fits_uhwi_p (bound_node))
2663 bound = tree_to_uhwi (bound_node);
2666 /* If the BOUND parameter is zero, return zero. */
2667 if (bound == 0)
2669 replace_call_with_value (gsi, integer_zero_node);
2670 return true;
2673 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2674 if (operand_equal_p (str1, str2, 0))
2676 replace_call_with_value (gsi, integer_zero_node);
2677 return true;
2680 /* Initially set to the number of characters, including the terminating
2681 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2682 the array Sx is not terminated by a nul.
2683 For nul-terminated strings then adjusted to their length so that
2684 LENx == NULPOSx holds. */
2685 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2686 const char *p1 = getbyterep (str1, &len1);
2687 const char *p2 = getbyterep (str2, &len2);
2689 /* The position of the terminating nul character if one exists, otherwise
2690 a value greater than LENx. */
2691 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2693 if (p1)
2695 size_t n = strnlen (p1, len1);
2696 if (n < len1)
2697 len1 = nulpos1 = n;
2700 if (p2)
2702 size_t n = strnlen (p2, len2);
2703 if (n < len2)
2704 len2 = nulpos2 = n;
2707 /* For known strings, return an immediate value. */
2708 if (p1 && p2)
2710 int r = 0;
2711 bool known_result = false;
2713 switch (fcode)
2715 case BUILT_IN_STRCMP:
2716 case BUILT_IN_STRCMP_EQ:
2717 if (len1 != nulpos1 || len2 != nulpos2)
2718 break;
2720 r = strcmp (p1, p2);
2721 known_result = true;
2722 break;
2724 case BUILT_IN_STRNCMP:
2725 case BUILT_IN_STRNCMP_EQ:
2727 if (bound == HOST_WIDE_INT_M1U)
2728 break;
2730 /* Reduce the bound to be no more than the length
2731 of the shorter of the two strings, or the sizes
2732 of the unterminated arrays. */
2733 unsigned HOST_WIDE_INT n = bound;
2735 if (len1 == nulpos1 && len1 < n)
2736 n = len1 + 1;
2737 if (len2 == nulpos2 && len2 < n)
2738 n = len2 + 1;
2740 if (MIN (nulpos1, nulpos2) + 1 < n)
2741 break;
2743 r = strncmp (p1, p2, n);
2744 known_result = true;
2745 break;
2747 /* Only handleable situation is where the string are equal (result 0),
2748 which is already handled by operand_equal_p case. */
2749 case BUILT_IN_STRCASECMP:
2750 break;
2751 case BUILT_IN_STRNCASECMP:
2753 if (bound == HOST_WIDE_INT_M1U)
2754 break;
2755 r = strncmp (p1, p2, bound);
2756 if (r == 0)
2757 known_result = true;
2758 break;
2760 default:
2761 gcc_unreachable ();
2764 if (known_result)
2766 replace_call_with_value (gsi, build_cmp_result (type, r));
2767 return true;
2771 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
2772 || fcode == BUILT_IN_STRCMP
2773 || fcode == BUILT_IN_STRCMP_EQ
2774 || fcode == BUILT_IN_STRCASECMP;
2776 location_t loc = gimple_location (stmt);
2778 /* If the second arg is "", return *(const unsigned char*)arg1. */
2779 if (p2 && *p2 == '\0' && nonzero_bound)
2781 gimple_seq stmts = NULL;
2782 tree var = gimple_load_first_char (loc, str1, &stmts);
2783 if (lhs)
2785 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2786 gimple_seq_add_stmt_without_update (&stmts, stmt);
2789 gsi_replace_with_seq_vops (gsi, stmts);
2790 return true;
2793 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2794 if (p1 && *p1 == '\0' && nonzero_bound)
2796 gimple_seq stmts = NULL;
2797 tree var = gimple_load_first_char (loc, str2, &stmts);
2799 if (lhs)
2801 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2802 stmt = gimple_build_assign (c, NOP_EXPR, var);
2803 gimple_seq_add_stmt_without_update (&stmts, stmt);
2805 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2806 gimple_seq_add_stmt_without_update (&stmts, stmt);
2809 gsi_replace_with_seq_vops (gsi, stmts);
2810 return true;
2813 /* If BOUND is one, return an expression corresponding to
2814 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2815 if (fcode == BUILT_IN_STRNCMP && bound == 1)
2817 gimple_seq stmts = NULL;
2818 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2819 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2821 if (lhs)
2823 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2824 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2825 gimple_seq_add_stmt_without_update (&stmts, convert1);
2827 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2828 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2829 gimple_seq_add_stmt_without_update (&stmts, convert2);
2831 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2832 gimple_seq_add_stmt_without_update (&stmts, stmt);
2835 gsi_replace_with_seq_vops (gsi, stmts);
2836 return true;
2839 /* If BOUND is greater than the length of one constant string,
2840 and the other argument is also a nul-terminated string, replace
2841 strncmp with strcmp. */
2842 if (fcode == BUILT_IN_STRNCMP
2843 && bound > 0 && bound < HOST_WIDE_INT_M1U
2844 && ((p2 && len2 < bound && len2 == nulpos2)
2845 || (p1 && len1 < bound && len1 == nulpos1)))
2847 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2848 if (!fn)
2849 return false;
2850 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2851 replace_call_with_call_and_fold (gsi, repl);
2852 return true;
2855 return false;
2858 /* Fold a call to the memchr pointed by GSI iterator. */
2860 static bool
2861 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2863 gimple *stmt = gsi_stmt (*gsi);
2864 tree lhs = gimple_call_lhs (stmt);
2865 tree arg1 = gimple_call_arg (stmt, 0);
2866 tree arg2 = gimple_call_arg (stmt, 1);
2867 tree len = gimple_call_arg (stmt, 2);
2869 /* If the LEN parameter is zero, return zero. */
2870 if (integer_zerop (len))
2872 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2873 return true;
2876 char c;
2877 if (TREE_CODE (arg2) != INTEGER_CST
2878 || !tree_fits_uhwi_p (len)
2879 || !target_char_cst_p (arg2, &c))
2880 return false;
2882 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2883 unsigned HOST_WIDE_INT string_length;
2884 const char *p1 = getbyterep (arg1, &string_length);
2886 if (p1)
2888 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2889 if (r == NULL)
2891 tree mem_size, offset_node;
2892 byte_representation (arg1, &offset_node, &mem_size, NULL);
2893 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2894 ? 0 : tree_to_uhwi (offset_node);
2895 /* MEM_SIZE is the size of the array the string literal
2896 is stored in. */
2897 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2898 gcc_checking_assert (string_length <= string_size);
2899 if (length <= string_size)
2901 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2902 return true;
2905 else
2907 unsigned HOST_WIDE_INT offset = r - p1;
2908 gimple_seq stmts = NULL;
2909 if (lhs != NULL_TREE)
2911 tree offset_cst = build_int_cst (sizetype, offset);
2912 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2913 arg1, offset_cst);
2914 gimple_seq_add_stmt_without_update (&stmts, stmt);
2916 else
2917 gimple_seq_add_stmt_without_update (&stmts,
2918 gimple_build_nop ());
2920 gsi_replace_with_seq_vops (gsi, stmts);
2921 return true;
2925 return false;
2928 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2929 to the call. IGNORE is true if the value returned
2930 by the builtin will be ignored. UNLOCKED is true is true if this
2931 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2932 the known length of the string. Return NULL_TREE if no simplification
2933 was possible. */
2935 static bool
2936 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2937 tree arg0, tree arg1,
2938 bool unlocked)
2940 gimple *stmt = gsi_stmt (*gsi);
2942 /* If we're using an unlocked function, assume the other unlocked
2943 functions exist explicitly. */
2944 tree const fn_fputc = (unlocked
2945 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2946 : builtin_decl_implicit (BUILT_IN_FPUTC));
2947 tree const fn_fwrite = (unlocked
2948 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2949 : builtin_decl_implicit (BUILT_IN_FWRITE));
2951 /* If the return value is used, don't do the transformation. */
2952 if (gimple_call_lhs (stmt))
2953 return false;
2955 /* Get the length of the string passed to fputs. If the length
2956 can't be determined, punt. */
2957 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2958 if (!len || TREE_CODE (len) != INTEGER_CST)
2959 return false;
2961 switch (compare_tree_int (len, 1))
2963 case -1: /* length is 0, delete the call entirely . */
2964 replace_call_with_value (gsi, integer_zero_node);
2965 return true;
2967 case 0: /* length is 1, call fputc. */
2969 const char *p = c_getstr (arg0);
2970 if (p != NULL)
2972 if (!fn_fputc)
2973 return false;
2975 gimple *repl
2976 = gimple_build_call (fn_fputc, 2,
2977 build_int_cst (integer_type_node, p[0]),
2978 arg1);
2979 replace_call_with_call_and_fold (gsi, repl);
2980 return true;
2983 /* FALLTHROUGH */
2984 case 1: /* length is greater than 1, call fwrite. */
2986 /* If optimizing for size keep fputs. */
2987 if (optimize_function_for_size_p (cfun))
2988 return false;
2989 /* New argument list transforming fputs(string, stream) to
2990 fwrite(string, 1, len, stream). */
2991 if (!fn_fwrite)
2992 return false;
2994 gimple *repl
2995 = gimple_build_call (fn_fwrite, 4, arg0, size_one_node,
2996 fold_convert (size_type_node, len), arg1);
2997 replace_call_with_call_and_fold (gsi, repl);
2998 return true;
3000 default:
3001 gcc_unreachable ();
3005 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
3006 DEST, SRC, LEN, and SIZE are the arguments to the call.
3007 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
3008 code of the builtin. If MAXLEN is not NULL, it is maximum length
3009 passed as third argument. */
3011 static bool
3012 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
3013 tree dest, tree src, tree len, tree size,
3014 enum built_in_function fcode)
3016 gimple *stmt = gsi_stmt (*gsi);
3017 location_t loc = gimple_location (stmt);
3018 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3019 tree fn;
3021 /* If SRC and DEST are the same (and not volatile), return DEST
3022 (resp. DEST+LEN for __mempcpy_chk). */
3023 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
3025 if (fcode != BUILT_IN_MEMPCPY_CHK)
3027 replace_call_with_value (gsi, dest);
3028 return true;
3030 else
3032 gimple_seq stmts = NULL;
3033 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
3034 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
3035 TREE_TYPE (dest), dest, len);
3036 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3037 replace_call_with_value (gsi, temp);
3038 return true;
3042 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3043 if (! integer_all_onesp (size)
3044 && !known_lower (stmt, len, size)
3045 && !known_lower (stmt, maxlen, size))
3047 /* MAXLEN and LEN both cannot be proved to be less than SIZE, at
3048 least try to optimize (void) __mempcpy_chk () into
3049 (void) __memcpy_chk () */
3050 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
3052 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3053 if (!fn)
3054 return false;
3056 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3057 replace_call_with_call_and_fold (gsi, repl);
3058 return true;
3060 return false;
3063 fn = NULL_TREE;
3064 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3065 mem{cpy,pcpy,move,set} is available. */
3066 switch (fcode)
3068 case BUILT_IN_MEMCPY_CHK:
3069 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
3070 break;
3071 case BUILT_IN_MEMPCPY_CHK:
3072 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
3073 break;
3074 case BUILT_IN_MEMMOVE_CHK:
3075 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
3076 break;
3077 case BUILT_IN_MEMSET_CHK:
3078 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
3079 break;
3080 default:
3081 break;
3084 if (!fn)
3085 return false;
3087 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3088 replace_call_with_call_and_fold (gsi, repl);
3089 return true;
3092 /* Print a message in the dump file recording transformation of FROM to TO. */
3094 static void
3095 dump_transformation (gcall *from, gcall *to)
3097 if (dump_enabled_p ())
3098 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, from, "simplified %T to %T\n",
3099 gimple_call_fn (from), gimple_call_fn (to));
3102 /* Fold a call to the __st[rp]cpy_chk builtin.
3103 DEST, SRC, and SIZE are the arguments to the call.
3104 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3105 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3106 strings passed as second argument. */
3108 static bool
3109 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
3110 tree dest,
3111 tree src, tree size,
3112 enum built_in_function fcode)
3114 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3115 location_t loc = gimple_location (stmt);
3116 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3117 tree len, fn;
3119 /* If SRC and DEST are the same (and not volatile), return DEST. */
3120 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
3122 /* Issue -Wrestrict unless the pointers are null (those do
3123 not point to objects and so do not indicate an overlap;
3124 such calls could be the result of sanitization and jump
3125 threading). */
3126 if (!integer_zerop (dest)
3127 && !warning_suppressed_p (stmt, OPT_Wrestrict))
3129 tree func = gimple_call_fndecl (stmt);
3131 warning_at (loc, OPT_Wrestrict,
3132 "%qD source argument is the same as destination",
3133 func);
3136 replace_call_with_value (gsi, dest);
3137 return true;
3140 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
3141 if (! integer_all_onesp (size))
3143 len = c_strlen (src, 1);
3144 if (!known_lower (stmt, len, size, true)
3145 && !known_lower (stmt, maxlen, size, true))
3147 if (fcode == BUILT_IN_STPCPY_CHK)
3149 if (! ignore)
3150 return false;
3152 /* If return value of __stpcpy_chk is ignored,
3153 optimize into __strcpy_chk. */
3154 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
3155 if (!fn)
3156 return false;
3158 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
3159 replace_call_with_call_and_fold (gsi, repl);
3160 return true;
3163 if (! len || TREE_SIDE_EFFECTS (len))
3164 return false;
3166 /* If c_strlen returned something, but not provably less than size,
3167 transform __strcpy_chk into __memcpy_chk. */
3168 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3169 if (!fn)
3170 return false;
3172 gimple_seq stmts = NULL;
3173 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
3174 len = gimple_convert (&stmts, loc, size_type_node, len);
3175 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
3176 build_int_cst (size_type_node, 1));
3177 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3178 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3179 replace_call_with_call_and_fold (gsi, repl);
3180 return true;
3184 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3185 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK && !ignore
3186 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
3187 if (!fn)
3188 return false;
3190 gcall *repl = gimple_build_call (fn, 2, dest, src);
3191 dump_transformation (stmt, repl);
3192 replace_call_with_call_and_fold (gsi, repl);
3193 return true;
3196 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3197 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3198 length passed as third argument. IGNORE is true if return value can be
3199 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3201 static bool
3202 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
3203 tree dest, tree src,
3204 tree len, tree size,
3205 enum built_in_function fcode)
3207 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3208 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3209 tree fn;
3211 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3212 if (! integer_all_onesp (size)
3213 && !known_lower (stmt, len, size) && !known_lower (stmt, maxlen, size))
3215 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
3217 /* If return value of __stpncpy_chk is ignored,
3218 optimize into __strncpy_chk. */
3219 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3220 if (fn)
3222 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3223 replace_call_with_call_and_fold (gsi, repl);
3224 return true;
3227 return false;
3230 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3231 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK && !ignore
3232 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3233 if (!fn)
3234 return false;
3236 gcall *repl = gimple_build_call (fn, 3, dest, src, len);
3237 dump_transformation (stmt, repl);
3238 replace_call_with_call_and_fold (gsi, repl);
3239 return true;
3242 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3243 Return NULL_TREE if no simplification can be made. */
3245 static bool
3246 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3248 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3249 location_t loc = gimple_location (stmt);
3250 tree dest = gimple_call_arg (stmt, 0);
3251 tree src = gimple_call_arg (stmt, 1);
3252 tree fn, lenp1;
3254 /* If the result is unused, replace stpcpy with strcpy. */
3255 if (gimple_call_lhs (stmt) == NULL_TREE)
3257 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3258 if (!fn)
3259 return false;
3260 gimple_call_set_fndecl (stmt, fn);
3261 fold_stmt (gsi);
3262 return true;
3265 /* Set to non-null if ARG refers to an unterminated array. */
3266 c_strlen_data data = { };
3267 /* The size of the unterminated array if SRC referes to one. */
3268 tree size;
3269 /* True if the size is exact/constant, false if it's the lower bound
3270 of a range. */
3271 bool exact;
3272 tree len = c_strlen (src, 1, &data, 1);
3273 if (!len
3274 || TREE_CODE (len) != INTEGER_CST)
3276 data.decl = unterminated_array (src, &size, &exact);
3277 if (!data.decl)
3278 return false;
3281 if (data.decl)
3283 /* Avoid folding calls with unterminated arrays. */
3284 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
3285 warn_string_no_nul (loc, stmt, "stpcpy", src, data.decl, size,
3286 exact);
3287 suppress_warning (stmt, OPT_Wstringop_overread);
3288 return false;
3291 if (optimize_function_for_size_p (cfun)
3292 /* If length is zero it's small enough. */
3293 && !integer_zerop (len))
3294 return false;
3296 /* If the source has a known length replace stpcpy with memcpy. */
3297 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3298 if (!fn)
3299 return false;
3301 gimple_seq stmts = NULL;
3302 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3303 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3304 tem, build_int_cst (size_type_node, 1));
3305 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3306 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
3307 gimple_move_vops (repl, stmt);
3308 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3309 /* Replace the result with dest + len. */
3310 stmts = NULL;
3311 tem = gimple_convert (&stmts, loc, sizetype, len);
3312 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3313 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3314 POINTER_PLUS_EXPR, dest, tem);
3315 gsi_replace (gsi, ret, false);
3316 /* Finally fold the memcpy call. */
3317 gimple_stmt_iterator gsi2 = *gsi;
3318 gsi_prev (&gsi2);
3319 fold_stmt (&gsi2);
3320 return true;
3323 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3324 NULL_TREE if a normal call should be emitted rather than expanding
3325 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3326 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3327 passed as second argument. */
3329 static bool
3330 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3331 enum built_in_function fcode)
3333 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3334 tree dest, size, len, fn, fmt, flag;
3335 const char *fmt_str;
3337 /* Verify the required arguments in the original call. */
3338 if (gimple_call_num_args (stmt) < 5)
3339 return false;
3341 dest = gimple_call_arg (stmt, 0);
3342 len = gimple_call_arg (stmt, 1);
3343 flag = gimple_call_arg (stmt, 2);
3344 size = gimple_call_arg (stmt, 3);
3345 fmt = gimple_call_arg (stmt, 4);
3347 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3348 if (! integer_all_onesp (size)
3349 && !known_lower (stmt, len, size) && !known_lower (stmt, maxlen, size))
3350 return false;
3352 if (!init_target_chars ())
3353 return false;
3355 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3356 or if format doesn't contain % chars or is "%s". */
3357 if (! integer_zerop (flag))
3359 fmt_str = c_getstr (fmt);
3360 if (fmt_str == NULL)
3361 return false;
3362 if (strchr (fmt_str, target_percent) != NULL
3363 && strcmp (fmt_str, target_percent_s))
3364 return false;
3367 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3368 available. */
3369 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3370 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3371 if (!fn)
3372 return false;
3374 /* Replace the called function and the first 5 argument by 3 retaining
3375 trailing varargs. */
3376 gimple_call_set_fndecl (stmt, fn);
3377 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3378 gimple_call_set_arg (stmt, 0, dest);
3379 gimple_call_set_arg (stmt, 1, len);
3380 gimple_call_set_arg (stmt, 2, fmt);
3381 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3382 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3383 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3384 fold_stmt (gsi);
3385 return true;
3388 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3389 Return NULL_TREE if a normal call should be emitted rather than
3390 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3391 or BUILT_IN_VSPRINTF_CHK. */
3393 static bool
3394 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3395 enum built_in_function fcode)
3397 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3398 tree dest, size, len, fn, fmt, flag;
3399 const char *fmt_str;
3400 unsigned nargs = gimple_call_num_args (stmt);
3402 /* Verify the required arguments in the original call. */
3403 if (nargs < 4)
3404 return false;
3405 dest = gimple_call_arg (stmt, 0);
3406 flag = gimple_call_arg (stmt, 1);
3407 size = gimple_call_arg (stmt, 2);
3408 fmt = gimple_call_arg (stmt, 3);
3410 len = NULL_TREE;
3412 if (!init_target_chars ())
3413 return false;
3415 /* Check whether the format is a literal string constant. */
3416 fmt_str = c_getstr (fmt);
3417 if (fmt_str != NULL)
3419 /* If the format doesn't contain % args or %%, we know the size. */
3420 if (strchr (fmt_str, target_percent) == 0)
3422 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3423 len = build_int_cstu (size_type_node, strlen (fmt_str));
3425 /* If the format is "%s" and first ... argument is a string literal,
3426 we know the size too. */
3427 else if (fcode == BUILT_IN_SPRINTF_CHK
3428 && strcmp (fmt_str, target_percent_s) == 0)
3430 tree arg;
3432 if (nargs == 5)
3434 arg = gimple_call_arg (stmt, 4);
3435 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3436 len = c_strlen (arg, 1);
3441 if (! integer_all_onesp (size) && !known_lower (stmt, len, size, true))
3442 return false;
3444 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3445 or if format doesn't contain % chars or is "%s". */
3446 if (! integer_zerop (flag))
3448 if (fmt_str == NULL)
3449 return false;
3450 if (strchr (fmt_str, target_percent) != NULL
3451 && strcmp (fmt_str, target_percent_s))
3452 return false;
3455 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3456 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3457 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3458 if (!fn)
3459 return false;
3461 /* Replace the called function and the first 4 argument by 2 retaining
3462 trailing varargs. */
3463 gimple_call_set_fndecl (stmt, fn);
3464 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3465 gimple_call_set_arg (stmt, 0, dest);
3466 gimple_call_set_arg (stmt, 1, fmt);
3467 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3468 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3469 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3470 fold_stmt (gsi);
3471 return true;
3474 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3475 ORIG may be null if this is a 2-argument call. We don't attempt to
3476 simplify calls with more than 3 arguments.
3478 Return true if simplification was possible, otherwise false. */
3480 bool
3481 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3483 gimple *stmt = gsi_stmt (*gsi);
3485 /* Verify the required arguments in the original call. We deal with two
3486 types of sprintf() calls: 'sprintf (str, fmt)' and
3487 'sprintf (dest, "%s", orig)'. */
3488 if (gimple_call_num_args (stmt) > 3)
3489 return false;
3491 tree orig = NULL_TREE;
3492 if (gimple_call_num_args (stmt) == 3)
3493 orig = gimple_call_arg (stmt, 2);
3495 /* Check whether the format is a literal string constant. */
3496 tree fmt = gimple_call_arg (stmt, 1);
3497 const char *fmt_str = c_getstr (fmt);
3498 if (fmt_str == NULL)
3499 return false;
3501 tree dest = gimple_call_arg (stmt, 0);
3503 if (!init_target_chars ())
3504 return false;
3506 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3507 if (!fn)
3508 return false;
3510 /* If the format doesn't contain % args or %%, use strcpy. */
3511 if (strchr (fmt_str, target_percent) == NULL)
3513 /* Don't optimize sprintf (buf, "abc", ptr++). */
3514 if (orig)
3515 return false;
3517 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3518 'format' is known to contain no % formats. */
3519 gimple_seq stmts = NULL;
3520 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3522 /* Propagate the NO_WARNING bit to avoid issuing the same
3523 warning more than once. */
3524 copy_warning (repl, stmt);
3526 gimple_seq_add_stmt_without_update (&stmts, repl);
3527 if (tree lhs = gimple_call_lhs (stmt))
3529 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3530 strlen (fmt_str)));
3531 gimple_seq_add_stmt_without_update (&stmts, repl);
3532 gsi_replace_with_seq_vops (gsi, stmts);
3533 /* gsi now points at the assignment to the lhs, get a
3534 stmt iterator to the memcpy call.
3535 ??? We can't use gsi_for_stmt as that doesn't work when the
3536 CFG isn't built yet. */
3537 gimple_stmt_iterator gsi2 = *gsi;
3538 gsi_prev (&gsi2);
3539 fold_stmt (&gsi2);
3541 else
3543 gsi_replace_with_seq_vops (gsi, stmts);
3544 fold_stmt (gsi);
3546 return true;
3549 /* If the format is "%s", use strcpy if the result isn't used. */
3550 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3552 /* Don't crash on sprintf (str1, "%s"). */
3553 if (!orig)
3554 return false;
3556 /* Don't fold calls with source arguments of invalid (nonpointer)
3557 types. */
3558 if (!POINTER_TYPE_P (TREE_TYPE (orig)))
3559 return false;
3561 tree orig_len = NULL_TREE;
3562 if (gimple_call_lhs (stmt))
3564 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3565 if (!orig_len)
3566 return false;
3569 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3570 gimple_seq stmts = NULL;
3571 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3573 /* Propagate the NO_WARNING bit to avoid issuing the same
3574 warning more than once. */
3575 copy_warning (repl, stmt);
3577 gimple_seq_add_stmt_without_update (&stmts, repl);
3578 if (tree lhs = gimple_call_lhs (stmt))
3580 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3581 TREE_TYPE (orig_len)))
3582 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3583 repl = gimple_build_assign (lhs, orig_len);
3584 gimple_seq_add_stmt_without_update (&stmts, repl);
3585 gsi_replace_with_seq_vops (gsi, stmts);
3586 /* gsi now points at the assignment to the lhs, get a
3587 stmt iterator to the memcpy call.
3588 ??? We can't use gsi_for_stmt as that doesn't work when the
3589 CFG isn't built yet. */
3590 gimple_stmt_iterator gsi2 = *gsi;
3591 gsi_prev (&gsi2);
3592 fold_stmt (&gsi2);
3594 else
3596 gsi_replace_with_seq_vops (gsi, stmts);
3597 fold_stmt (gsi);
3599 return true;
3601 return false;
3604 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3605 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3606 attempt to simplify calls with more than 4 arguments.
3608 Return true if simplification was possible, otherwise false. */
3610 bool
3611 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3613 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3614 tree dest = gimple_call_arg (stmt, 0);
3615 tree destsize = gimple_call_arg (stmt, 1);
3616 tree fmt = gimple_call_arg (stmt, 2);
3617 tree orig = NULL_TREE;
3618 const char *fmt_str = NULL;
3620 if (gimple_call_num_args (stmt) > 4)
3621 return false;
3623 if (gimple_call_num_args (stmt) == 4)
3624 orig = gimple_call_arg (stmt, 3);
3626 /* Check whether the format is a literal string constant. */
3627 fmt_str = c_getstr (fmt);
3628 if (fmt_str == NULL)
3629 return false;
3631 if (!init_target_chars ())
3632 return false;
3634 /* If the format doesn't contain % args or %%, use strcpy. */
3635 if (strchr (fmt_str, target_percent) == NULL)
3637 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3638 if (!fn)
3639 return false;
3641 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3642 if (orig)
3643 return false;
3645 tree len = build_int_cstu (TREE_TYPE (destsize), strlen (fmt_str));
3647 /* We could expand this as
3648 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3649 or to
3650 memcpy (str, fmt_with_nul_at_cstm1, cst);
3651 but in the former case that might increase code size
3652 and in the latter case grow .rodata section too much.
3653 So punt for now. */
3654 if (!known_lower (stmt, len, destsize, true))
3655 return false;
3657 gimple_seq stmts = NULL;
3658 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3659 gimple_seq_add_stmt_without_update (&stmts, repl);
3660 if (tree lhs = gimple_call_lhs (stmt))
3662 repl = gimple_build_assign (lhs,
3663 fold_convert (TREE_TYPE (lhs), len));
3664 gimple_seq_add_stmt_without_update (&stmts, repl);
3665 gsi_replace_with_seq_vops (gsi, stmts);
3666 /* gsi now points at the assignment to the lhs, get a
3667 stmt iterator to the memcpy call.
3668 ??? We can't use gsi_for_stmt as that doesn't work when the
3669 CFG isn't built yet. */
3670 gimple_stmt_iterator gsi2 = *gsi;
3671 gsi_prev (&gsi2);
3672 fold_stmt (&gsi2);
3674 else
3676 gsi_replace_with_seq_vops (gsi, stmts);
3677 fold_stmt (gsi);
3679 return true;
3682 /* If the format is "%s", use strcpy if the result isn't used. */
3683 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3685 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3686 if (!fn)
3687 return false;
3689 /* Don't crash on snprintf (str1, cst, "%s"). */
3690 if (!orig)
3691 return false;
3693 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3695 /* We could expand this as
3696 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3697 or to
3698 memcpy (str1, str2_with_nul_at_cstm1, cst);
3699 but in the former case that might increase code size
3700 and in the latter case grow .rodata section too much.
3701 So punt for now. */
3702 if (!known_lower (stmt, orig_len, destsize, true))
3703 return false;
3705 /* Convert snprintf (str1, cst, "%s", str2) into
3706 strcpy (str1, str2) if strlen (str2) < cst. */
3707 gimple_seq stmts = NULL;
3708 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3709 gimple_seq_add_stmt_without_update (&stmts, repl);
3710 if (tree lhs = gimple_call_lhs (stmt))
3712 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3713 TREE_TYPE (orig_len)))
3714 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3715 repl = gimple_build_assign (lhs, orig_len);
3716 gimple_seq_add_stmt_without_update (&stmts, repl);
3717 gsi_replace_with_seq_vops (gsi, stmts);
3718 /* gsi now points at the assignment to the lhs, get a
3719 stmt iterator to the memcpy call.
3720 ??? We can't use gsi_for_stmt as that doesn't work when the
3721 CFG isn't built yet. */
3722 gimple_stmt_iterator gsi2 = *gsi;
3723 gsi_prev (&gsi2);
3724 fold_stmt (&gsi2);
3726 else
3728 gsi_replace_with_seq_vops (gsi, stmts);
3729 fold_stmt (gsi);
3731 return true;
3733 return false;
3736 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3737 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3738 more than 3 arguments, and ARG may be null in the 2-argument case.
3740 Return NULL_TREE if no simplification was possible, otherwise return the
3741 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3742 code of the function to be simplified. */
3744 static bool
3745 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3746 tree fp, tree fmt, tree arg,
3747 enum built_in_function fcode)
3749 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3750 tree fn_fputc, fn_fputs;
3751 const char *fmt_str = NULL;
3753 /* If the return value is used, don't do the transformation. */
3754 if (gimple_call_lhs (stmt) != NULL_TREE)
3755 return false;
3757 /* Check whether the format is a literal string constant. */
3758 fmt_str = c_getstr (fmt);
3759 if (fmt_str == NULL)
3760 return false;
3762 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3764 /* If we're using an unlocked function, assume the other
3765 unlocked functions exist explicitly. */
3766 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3767 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3769 else
3771 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3772 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3775 if (!init_target_chars ())
3776 return false;
3778 /* If the format doesn't contain % args or %%, use strcpy. */
3779 if (strchr (fmt_str, target_percent) == NULL)
3781 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3782 && arg)
3783 return false;
3785 /* If the format specifier was "", fprintf does nothing. */
3786 if (fmt_str[0] == '\0')
3788 replace_call_with_value (gsi, NULL_TREE);
3789 return true;
3792 /* When "string" doesn't contain %, replace all cases of
3793 fprintf (fp, string) with fputs (string, fp). The fputs
3794 builtin will take care of special cases like length == 1. */
3795 if (fn_fputs)
3797 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3798 replace_call_with_call_and_fold (gsi, repl);
3799 return true;
3803 /* The other optimizations can be done only on the non-va_list variants. */
3804 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3805 return false;
3807 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3808 else if (strcmp (fmt_str, target_percent_s) == 0)
3810 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3811 return false;
3812 if (fn_fputs)
3814 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3815 replace_call_with_call_and_fold (gsi, repl);
3816 return true;
3820 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3821 else if (strcmp (fmt_str, target_percent_c) == 0)
3823 if (!arg
3824 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3825 return false;
3826 if (fn_fputc)
3828 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3829 replace_call_with_call_and_fold (gsi, repl);
3830 return true;
3834 return false;
3837 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3838 FMT and ARG are the arguments to the call; we don't fold cases with
3839 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3841 Return NULL_TREE if no simplification was possible, otherwise return the
3842 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3843 code of the function to be simplified. */
3845 static bool
3846 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3847 tree arg, enum built_in_function fcode)
3849 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3850 tree fn_putchar, fn_puts, newarg;
3851 const char *fmt_str = NULL;
3853 /* If the return value is used, don't do the transformation. */
3854 if (gimple_call_lhs (stmt) != NULL_TREE)
3855 return false;
3857 /* Check whether the format is a literal string constant. */
3858 fmt_str = c_getstr (fmt);
3859 if (fmt_str == NULL)
3860 return false;
3862 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3864 /* If we're using an unlocked function, assume the other
3865 unlocked functions exist explicitly. */
3866 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3867 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3869 else
3871 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3872 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3875 if (!init_target_chars ())
3876 return false;
3878 if (strcmp (fmt_str, target_percent_s) == 0
3879 || strchr (fmt_str, target_percent) == NULL)
3881 const char *str;
3883 if (strcmp (fmt_str, target_percent_s) == 0)
3885 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3886 return false;
3888 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3889 return false;
3891 str = c_getstr (arg);
3892 if (str == NULL)
3893 return false;
3895 else
3897 /* The format specifier doesn't contain any '%' characters. */
3898 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3899 && arg)
3900 return false;
3901 str = fmt_str;
3904 /* If the string was "", printf does nothing. */
3905 if (str[0] == '\0')
3907 replace_call_with_value (gsi, NULL_TREE);
3908 return true;
3911 /* If the string has length of 1, call putchar. */
3912 if (str[1] == '\0')
3914 /* Given printf("c"), (where c is any one character,)
3915 convert "c"[0] to an int and pass that to the replacement
3916 function. */
3917 newarg = build_int_cst (integer_type_node, str[0]);
3918 if (fn_putchar)
3920 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3921 replace_call_with_call_and_fold (gsi, repl);
3922 return true;
3925 else
3927 /* If the string was "string\n", call puts("string"). */
3928 size_t len = strlen (str);
3929 if ((unsigned char)str[len - 1] == target_newline
3930 && (size_t) (int) len == len
3931 && (int) len > 0)
3933 char *newstr;
3935 /* Create a NUL-terminated string that's one char shorter
3936 than the original, stripping off the trailing '\n'. */
3937 newstr = xstrdup (str);
3938 newstr[len - 1] = '\0';
3939 newarg = build_string_literal (len, newstr);
3940 free (newstr);
3941 if (fn_puts)
3943 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3944 replace_call_with_call_and_fold (gsi, repl);
3945 return true;
3948 else
3949 /* We'd like to arrange to call fputs(string,stdout) here,
3950 but we need stdout and don't have a way to get it yet. */
3951 return false;
3955 /* The other optimizations can be done only on the non-va_list variants. */
3956 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3957 return false;
3959 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3960 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3962 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3963 return false;
3964 if (fn_puts)
3966 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3967 replace_call_with_call_and_fold (gsi, repl);
3968 return true;
3972 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3973 else if (strcmp (fmt_str, target_percent_c) == 0)
3975 if (!arg || ! useless_type_conversion_p (integer_type_node,
3976 TREE_TYPE (arg)))
3977 return false;
3978 if (fn_putchar)
3980 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3981 replace_call_with_call_and_fold (gsi, repl);
3982 return true;
3986 return false;
3991 /* Fold a call to __builtin_strlen with known length LEN. */
3993 static bool
3994 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3996 gimple *stmt = gsi_stmt (*gsi);
3997 tree arg = gimple_call_arg (stmt, 0);
3999 wide_int minlen;
4000 wide_int maxlen;
4002 c_strlen_data lendata = { };
4003 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
4004 && !lendata.decl
4005 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
4006 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
4008 /* The range of lengths refers to either a single constant
4009 string or to the longest and shortest constant string
4010 referenced by the argument of the strlen() call, or to
4011 the strings that can possibly be stored in the arrays
4012 the argument refers to. */
4013 minlen = wi::to_wide (lendata.minlen);
4014 maxlen = wi::to_wide (lendata.maxlen);
4016 else
4018 unsigned prec = TYPE_PRECISION (sizetype);
4020 minlen = wi::shwi (0, prec);
4021 maxlen = wi::to_wide (max_object_size (), prec) - 2;
4024 if (minlen == maxlen)
4026 /* Fold the strlen call to a constant. */
4027 tree type = TREE_TYPE (lendata.minlen);
4028 tree len = force_gimple_operand_gsi (gsi,
4029 wide_int_to_tree (type, minlen),
4030 true, NULL, true, GSI_SAME_STMT);
4031 replace_call_with_value (gsi, len);
4032 return true;
4035 /* Set the strlen() range to [0, MAXLEN]. */
4036 if (tree lhs = gimple_call_lhs (stmt))
4037 set_strlen_range (lhs, minlen, maxlen);
4039 return false;
4042 /* Fold a call to __builtin_acc_on_device. */
4044 static bool
4045 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
4047 /* Defer folding until we know which compiler we're in. */
4048 if (symtab->state != EXPANSION)
4049 return false;
4051 unsigned val_host = GOMP_DEVICE_HOST;
4052 unsigned val_dev = GOMP_DEVICE_NONE;
4054 #ifdef ACCEL_COMPILER
4055 val_host = GOMP_DEVICE_NOT_HOST;
4056 val_dev = ACCEL_COMPILER_acc_device;
4057 #endif
4059 location_t loc = gimple_location (gsi_stmt (*gsi));
4061 tree host_eq = make_ssa_name (boolean_type_node);
4062 gimple *host_ass = gimple_build_assign
4063 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
4064 gimple_set_location (host_ass, loc);
4065 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
4067 tree dev_eq = make_ssa_name (boolean_type_node);
4068 gimple *dev_ass = gimple_build_assign
4069 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
4070 gimple_set_location (dev_ass, loc);
4071 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
4073 tree result = make_ssa_name (boolean_type_node);
4074 gimple *result_ass = gimple_build_assign
4075 (result, BIT_IOR_EXPR, host_eq, dev_eq);
4076 gimple_set_location (result_ass, loc);
4077 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
4079 replace_call_with_value (gsi, result);
4081 return true;
4084 /* Fold realloc (0, n) -> malloc (n). */
4086 static bool
4087 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
4089 gimple *stmt = gsi_stmt (*gsi);
4090 tree arg = gimple_call_arg (stmt, 0);
4091 tree size = gimple_call_arg (stmt, 1);
4093 if (operand_equal_p (arg, null_pointer_node, 0))
4095 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
4096 if (fn_malloc)
4098 gcall *repl = gimple_build_call (fn_malloc, 1, size);
4099 replace_call_with_call_and_fold (gsi, repl);
4100 return true;
4103 return false;
4106 /* Number of bytes into which any type but aggregate, vector or
4107 _BitInt types should fit. */
4108 static constexpr size_t clear_padding_unit
4109 = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
4110 /* Buffer size on which __builtin_clear_padding folding code works. */
4111 static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
4113 /* Data passed through __builtin_clear_padding folding. */
4114 struct clear_padding_struct {
4115 location_t loc;
4116 /* 0 during __builtin_clear_padding folding, nonzero during
4117 clear_type_padding_in_mask. In that case, instead of clearing the
4118 non-padding bits in union_ptr array clear the padding bits in there. */
4119 bool clear_in_mask;
4120 tree base;
4121 tree alias_type;
4122 gimple_stmt_iterator *gsi;
4123 /* Alignment of buf->base + 0. */
4124 unsigned align;
4125 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4126 HOST_WIDE_INT off;
4127 /* Number of padding bytes before buf->off that don't have padding clear
4128 code emitted yet. */
4129 HOST_WIDE_INT padding_bytes;
4130 /* The size of the whole object. Never emit code to touch
4131 buf->base + buf->sz or following bytes. */
4132 HOST_WIDE_INT sz;
4133 /* Number of bytes recorded in buf->buf. */
4134 size_t size;
4135 /* When inside union, instead of emitting code we and bits inside of
4136 the union_ptr array. */
4137 unsigned char *union_ptr;
4138 /* Set bits mean padding bits that need to be cleared by the builtin. */
4139 unsigned char buf[clear_padding_buf_size + clear_padding_unit];
4142 /* Emit code to clear padding requested in BUF->buf - set bits
4143 in there stand for padding that should be cleared. FULL is true
4144 if everything from the buffer should be flushed, otherwise
4145 it can leave up to 2 * clear_padding_unit bytes for further
4146 processing. */
4148 static void
4149 clear_padding_flush (clear_padding_struct *buf, bool full)
4151 gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
4152 if (!full && buf->size < 2 * clear_padding_unit)
4153 return;
4154 gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
4155 size_t end = buf->size;
4156 if (!full)
4157 end = ((end - clear_padding_unit - 1) / clear_padding_unit
4158 * clear_padding_unit);
4159 size_t padding_bytes = buf->padding_bytes;
4160 if (buf->union_ptr)
4162 if (buf->clear_in_mask)
4164 /* During clear_type_padding_in_mask, clear the padding
4165 bits set in buf->buf in the buf->union_ptr mask. */
4166 for (size_t i = 0; i < end; i++)
4168 if (buf->buf[i] == (unsigned char) ~0)
4169 padding_bytes++;
4170 else
4172 memset (&buf->union_ptr[buf->off + i - padding_bytes],
4173 0, padding_bytes);
4174 padding_bytes = 0;
4175 buf->union_ptr[buf->off + i] &= ~buf->buf[i];
4178 if (full)
4180 memset (&buf->union_ptr[buf->off + end - padding_bytes],
4181 0, padding_bytes);
4182 buf->off = 0;
4183 buf->size = 0;
4184 buf->padding_bytes = 0;
4186 else
4188 memmove (buf->buf, buf->buf + end, buf->size - end);
4189 buf->off += end;
4190 buf->size -= end;
4191 buf->padding_bytes = padding_bytes;
4193 return;
4195 /* Inside of a union, instead of emitting any code, instead
4196 clear all bits in the union_ptr buffer that are clear
4197 in buf. Whole padding bytes don't clear anything. */
4198 for (size_t i = 0; i < end; i++)
4200 if (buf->buf[i] == (unsigned char) ~0)
4201 padding_bytes++;
4202 else
4204 padding_bytes = 0;
4205 buf->union_ptr[buf->off + i] &= buf->buf[i];
4208 if (full)
4210 buf->off = 0;
4211 buf->size = 0;
4212 buf->padding_bytes = 0;
4214 else
4216 memmove (buf->buf, buf->buf + end, buf->size - end);
4217 buf->off += end;
4218 buf->size -= end;
4219 buf->padding_bytes = padding_bytes;
4221 return;
4223 size_t wordsize = UNITS_PER_WORD;
4224 for (size_t i = 0; i < end; i += wordsize)
4226 size_t nonzero_first = wordsize;
4227 size_t nonzero_last = 0;
4228 size_t zero_first = wordsize;
4229 size_t zero_last = 0;
4230 bool all_ones = true, bytes_only = true;
4231 if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4232 > (unsigned HOST_WIDE_INT) buf->sz)
4234 gcc_assert (wordsize > 1);
4235 wordsize /= 2;
4236 i -= wordsize;
4237 continue;
4239 for (size_t j = i; j < i + wordsize && j < end; j++)
4241 if (buf->buf[j])
4243 if (nonzero_first == wordsize)
4245 nonzero_first = j - i;
4246 nonzero_last = j - i;
4248 if (nonzero_last != j - i)
4249 all_ones = false;
4250 nonzero_last = j + 1 - i;
4252 else
4254 if (zero_first == wordsize)
4255 zero_first = j - i;
4256 zero_last = j + 1 - i;
4258 if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4260 all_ones = false;
4261 bytes_only = false;
4264 size_t padding_end = i;
4265 if (padding_bytes)
4267 if (nonzero_first == 0
4268 && nonzero_last == wordsize
4269 && all_ones)
4271 /* All bits are padding and we had some padding
4272 before too. Just extend it. */
4273 padding_bytes += wordsize;
4274 continue;
4276 if (all_ones && nonzero_first == 0)
4278 padding_bytes += nonzero_last;
4279 padding_end += nonzero_last;
4280 nonzero_first = wordsize;
4281 nonzero_last = 0;
4283 else if (bytes_only && nonzero_first == 0)
4285 gcc_assert (zero_first && zero_first != wordsize);
4286 padding_bytes += zero_first;
4287 padding_end += zero_first;
4289 tree atype, src;
4290 if (padding_bytes == 1)
4292 atype = char_type_node;
4293 src = build_zero_cst (char_type_node);
4295 else
4297 atype = build_array_type_nelts (char_type_node, padding_bytes);
4298 src = build_constructor (atype, NULL);
4300 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4301 build_int_cst (buf->alias_type,
4302 buf->off + padding_end
4303 - padding_bytes));
4304 gimple *g = gimple_build_assign (dst, src);
4305 gimple_set_location (g, buf->loc);
4306 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4307 padding_bytes = 0;
4308 buf->padding_bytes = 0;
4310 if (nonzero_first == wordsize)
4311 /* All bits in a word are 0, there are no padding bits. */
4312 continue;
4313 if (all_ones && nonzero_last == wordsize)
4315 /* All bits between nonzero_first and end of word are padding
4316 bits, start counting padding_bytes. */
4317 padding_bytes = nonzero_last - nonzero_first;
4318 continue;
4320 if (bytes_only)
4322 /* If bitfields aren't involved in this word, prefer storing
4323 individual bytes or groups of them over performing a RMW
4324 operation on the whole word. */
4325 gcc_assert (i + zero_last <= end);
4326 for (size_t j = padding_end; j < i + zero_last; j++)
4328 if (buf->buf[j])
4330 size_t k;
4331 for (k = j; k < i + zero_last; k++)
4332 if (buf->buf[k] == 0)
4333 break;
4334 HOST_WIDE_INT off = buf->off + j;
4335 tree atype, src;
4336 if (k - j == 1)
4338 atype = char_type_node;
4339 src = build_zero_cst (char_type_node);
4341 else
4343 atype = build_array_type_nelts (char_type_node, k - j);
4344 src = build_constructor (atype, NULL);
4346 tree dst = build2_loc (buf->loc, MEM_REF, atype,
4347 buf->base,
4348 build_int_cst (buf->alias_type, off));
4349 gimple *g = gimple_build_assign (dst, src);
4350 gimple_set_location (g, buf->loc);
4351 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4352 j = k;
4355 if (nonzero_last == wordsize)
4356 padding_bytes = nonzero_last - zero_last;
4357 continue;
4359 for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4361 if (nonzero_last - nonzero_first <= eltsz
4362 && ((nonzero_first & ~(eltsz - 1))
4363 == ((nonzero_last - 1) & ~(eltsz - 1))))
4365 tree type;
4366 if (eltsz == 1)
4367 type = char_type_node;
4368 else
4369 type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4371 size_t start = nonzero_first & ~(eltsz - 1);
4372 HOST_WIDE_INT off = buf->off + i + start;
4373 tree atype = type;
4374 if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4375 atype = build_aligned_type (type, buf->align);
4376 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4377 build_int_cst (buf->alias_type, off));
4378 tree src;
4379 gimple *g;
4380 if (all_ones
4381 && nonzero_first == start
4382 && nonzero_last == start + eltsz)
4383 src = build_zero_cst (type);
4384 else
4386 src = make_ssa_name (type);
4387 tree tmp_dst = unshare_expr (dst);
4388 /* The folding introduces a read from the tmp_dst, we should
4389 prevent uninitialized warning analysis from issuing warning
4390 for such fake read. In order to suppress warning only for
4391 this expr, we should set the location of tmp_dst to
4392 UNKNOWN_LOCATION first, then suppress_warning will call
4393 set_no_warning_bit to set the no_warning flag only for
4394 tmp_dst. */
4395 SET_EXPR_LOCATION (tmp_dst, UNKNOWN_LOCATION);
4396 suppress_warning (tmp_dst, OPT_Wuninitialized);
4397 g = gimple_build_assign (src, tmp_dst);
4398 gimple_set_location (g, buf->loc);
4399 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4400 tree mask = native_interpret_expr (type,
4401 buf->buf + i + start,
4402 eltsz);
4403 gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4404 mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4405 tree src_masked = make_ssa_name (type);
4406 g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4407 src, mask);
4408 gimple_set_location (g, buf->loc);
4409 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4410 src = src_masked;
4412 g = gimple_build_assign (dst, src);
4413 gimple_set_location (g, buf->loc);
4414 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4415 break;
4419 if (full)
4421 if (padding_bytes)
4423 tree atype, src;
4424 if (padding_bytes == 1)
4426 atype = char_type_node;
4427 src = build_zero_cst (char_type_node);
4429 else
4431 atype = build_array_type_nelts (char_type_node, padding_bytes);
4432 src = build_constructor (atype, NULL);
4434 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4435 build_int_cst (buf->alias_type,
4436 buf->off + end
4437 - padding_bytes));
4438 gimple *g = gimple_build_assign (dst, src);
4439 gimple_set_location (g, buf->loc);
4440 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4442 size_t end_rem = end % UNITS_PER_WORD;
4443 buf->off += end - end_rem;
4444 buf->size = end_rem;
4445 memset (buf->buf, 0, buf->size);
4446 buf->padding_bytes = 0;
4448 else
4450 memmove (buf->buf, buf->buf + end, buf->size - end);
4451 buf->off += end;
4452 buf->size -= end;
4453 buf->padding_bytes = padding_bytes;
4457 /* Append PADDING_BYTES padding bytes. */
4459 static void
4460 clear_padding_add_padding (clear_padding_struct *buf,
4461 HOST_WIDE_INT padding_bytes)
4463 if (padding_bytes == 0)
4464 return;
4465 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4466 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4467 clear_padding_flush (buf, false);
4468 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4469 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4471 memset (buf->buf + buf->size, ~0, clear_padding_buf_size - buf->size);
4472 padding_bytes -= clear_padding_buf_size - buf->size;
4473 buf->size = clear_padding_buf_size;
4474 clear_padding_flush (buf, false);
4475 gcc_assert (buf->padding_bytes);
4476 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4477 is guaranteed to be all ones. */
4478 padding_bytes += buf->size;
4479 buf->size = padding_bytes % UNITS_PER_WORD;
4480 memset (buf->buf, ~0, buf->size);
4481 buf->off += padding_bytes - buf->size;
4482 buf->padding_bytes += padding_bytes - buf->size;
4484 else
4486 memset (buf->buf + buf->size, ~0, padding_bytes);
4487 buf->size += padding_bytes;
4491 static void clear_padding_type (clear_padding_struct *, tree,
4492 HOST_WIDE_INT, bool);
4494 /* Clear padding bits of union type TYPE. */
4496 static void
4497 clear_padding_union (clear_padding_struct *buf, tree type,
4498 HOST_WIDE_INT sz, bool for_auto_init)
4500 clear_padding_struct *union_buf;
4501 HOST_WIDE_INT start_off = 0, next_off = 0;
4502 size_t start_size = 0;
4503 if (buf->union_ptr)
4505 start_off = buf->off + buf->size;
4506 next_off = start_off + sz;
4507 start_size = start_off % UNITS_PER_WORD;
4508 start_off -= start_size;
4509 clear_padding_flush (buf, true);
4510 union_buf = buf;
4512 else
4514 if (sz + buf->size > clear_padding_buf_size)
4515 clear_padding_flush (buf, false);
4516 union_buf = XALLOCA (clear_padding_struct);
4517 union_buf->loc = buf->loc;
4518 union_buf->clear_in_mask = buf->clear_in_mask;
4519 union_buf->base = NULL_TREE;
4520 union_buf->alias_type = NULL_TREE;
4521 union_buf->gsi = NULL;
4522 union_buf->align = 0;
4523 union_buf->off = 0;
4524 union_buf->padding_bytes = 0;
4525 union_buf->sz = sz;
4526 union_buf->size = 0;
4527 if (sz + buf->size <= clear_padding_buf_size)
4528 union_buf->union_ptr = buf->buf + buf->size;
4529 else
4530 union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4531 memset (union_buf->union_ptr, ~0, sz);
4534 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4535 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4537 if (DECL_SIZE_UNIT (field) == NULL_TREE)
4539 if (TREE_TYPE (field) == error_mark_node)
4540 continue;
4541 gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4542 && !COMPLETE_TYPE_P (TREE_TYPE (field)));
4543 if (!buf->clear_in_mask && !for_auto_init)
4544 error_at (buf->loc, "flexible array member %qD does not have "
4545 "well defined padding bits for %qs",
4546 field, "__builtin_clear_padding");
4547 continue;
4549 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4550 gcc_assert (union_buf->size == 0);
4551 union_buf->off = start_off;
4552 union_buf->size = start_size;
4553 memset (union_buf->buf, ~0, start_size);
4554 clear_padding_type (union_buf, TREE_TYPE (field), fldsz, for_auto_init);
4555 clear_padding_add_padding (union_buf, sz - fldsz);
4556 clear_padding_flush (union_buf, true);
4559 if (buf == union_buf)
4561 buf->off = next_off;
4562 buf->size = next_off % UNITS_PER_WORD;
4563 buf->off -= buf->size;
4564 memset (buf->buf, ~0, buf->size);
4566 else if (sz + buf->size <= clear_padding_buf_size)
4567 buf->size += sz;
4568 else
4570 unsigned char *union_ptr = union_buf->union_ptr;
4571 while (sz)
4573 clear_padding_flush (buf, false);
4574 HOST_WIDE_INT this_sz
4575 = MIN ((unsigned HOST_WIDE_INT) sz,
4576 clear_padding_buf_size - buf->size);
4577 memcpy (buf->buf + buf->size, union_ptr, this_sz);
4578 buf->size += this_sz;
4579 union_ptr += this_sz;
4580 sz -= this_sz;
4582 XDELETE (union_buf->union_ptr);
4586 /* The only known floating point formats with padding bits are the
4587 IEEE extended ones. */
4589 static bool
4590 clear_padding_real_needs_padding_p (tree type)
4592 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4593 return (fmt->b == 2
4594 && fmt->signbit_ro == fmt->signbit_rw
4595 && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4598 /* _BitInt has padding bits if it isn't extended in the ABI and has smaller
4599 precision than bits in limb or corresponding number of limbs. */
4601 static bool
4602 clear_padding_bitint_needs_padding_p (tree type)
4604 struct bitint_info info;
4605 bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
4606 gcc_assert (ok);
4607 if (info.extended)
4608 return false;
4609 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
4610 if (TYPE_PRECISION (type) < GET_MODE_PRECISION (limb_mode))
4611 return true;
4612 else if (TYPE_PRECISION (type) == GET_MODE_PRECISION (limb_mode))
4613 return false;
4614 else
4615 return (((unsigned) TYPE_PRECISION (type))
4616 % GET_MODE_PRECISION (limb_mode)) != 0;
4619 /* Return true if TYPE might contain any padding bits. */
4621 bool
4622 clear_padding_type_may_have_padding_p (tree type)
4624 switch (TREE_CODE (type))
4626 case RECORD_TYPE:
4627 case UNION_TYPE:
4628 return true;
4629 case ARRAY_TYPE:
4630 case COMPLEX_TYPE:
4631 case VECTOR_TYPE:
4632 return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4633 case REAL_TYPE:
4634 return clear_padding_real_needs_padding_p (type);
4635 case BITINT_TYPE:
4636 return clear_padding_bitint_needs_padding_p (type);
4637 default:
4638 return false;
4642 /* Emit a runtime loop:
4643 for (; buf.base != end; buf.base += sz)
4644 __builtin_clear_padding (buf.base); */
4646 static void
4647 clear_padding_emit_loop (clear_padding_struct *buf, tree type,
4648 tree end, bool for_auto_init)
4650 tree l1 = create_artificial_label (buf->loc);
4651 tree l2 = create_artificial_label (buf->loc);
4652 tree l3 = create_artificial_label (buf->loc);
4653 gimple *g = gimple_build_goto (l2);
4654 gimple_set_location (g, buf->loc);
4655 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4656 g = gimple_build_label (l1);
4657 gimple_set_location (g, buf->loc);
4658 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4659 clear_padding_type (buf, type, buf->sz, for_auto_init);
4660 clear_padding_flush (buf, true);
4661 g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4662 size_int (buf->sz));
4663 gimple_set_location (g, buf->loc);
4664 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4665 g = gimple_build_label (l2);
4666 gimple_set_location (g, buf->loc);
4667 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4668 g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4669 gimple_set_location (g, buf->loc);
4670 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4671 g = gimple_build_label (l3);
4672 gimple_set_location (g, buf->loc);
4673 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4676 /* Clear padding bits for TYPE. Called recursively from
4677 gimple_fold_builtin_clear_padding. If FOR_AUTO_INIT is true,
4678 the __builtin_clear_padding is not called by the end user,
4679 instead, it's inserted by the compiler to initialize the
4680 paddings of automatic variable. Therefore, we should not
4681 emit the error messages for flexible array members to confuse
4682 the end user. */
4684 static void
4685 clear_padding_type (clear_padding_struct *buf, tree type,
4686 HOST_WIDE_INT sz, bool for_auto_init)
4688 switch (TREE_CODE (type))
4690 case RECORD_TYPE:
4691 HOST_WIDE_INT cur_pos;
4692 cur_pos = 0;
4693 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4694 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4696 tree ftype = TREE_TYPE (field);
4697 if (DECL_BIT_FIELD (field))
4699 HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
4700 if (fldsz == 0)
4701 continue;
4702 HOST_WIDE_INT pos = int_byte_position (field);
4703 if (pos >= sz)
4704 continue;
4705 HOST_WIDE_INT bpos
4706 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4707 bpos %= BITS_PER_UNIT;
4708 HOST_WIDE_INT end
4709 = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4710 if (pos + end > cur_pos)
4712 clear_padding_add_padding (buf, pos + end - cur_pos);
4713 cur_pos = pos + end;
4715 gcc_assert (cur_pos > pos
4716 && ((unsigned HOST_WIDE_INT) buf->size
4717 >= (unsigned HOST_WIDE_INT) cur_pos - pos));
4718 unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4719 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4720 sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4721 " in %qs", "__builtin_clear_padding");
4722 else if (BYTES_BIG_ENDIAN)
4724 /* Big endian. */
4725 if (bpos + fldsz <= BITS_PER_UNIT)
4726 *p &= ~(((1 << fldsz) - 1)
4727 << (BITS_PER_UNIT - bpos - fldsz));
4728 else
4730 if (bpos)
4732 *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4733 p++;
4734 fldsz -= BITS_PER_UNIT - bpos;
4736 memset (p, 0, fldsz / BITS_PER_UNIT);
4737 p += fldsz / BITS_PER_UNIT;
4738 fldsz %= BITS_PER_UNIT;
4739 if (fldsz)
4740 *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4743 else
4745 /* Little endian. */
4746 if (bpos + fldsz <= BITS_PER_UNIT)
4747 *p &= ~(((1 << fldsz) - 1) << bpos);
4748 else
4750 if (bpos)
4752 *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4753 p++;
4754 fldsz -= BITS_PER_UNIT - bpos;
4756 memset (p, 0, fldsz / BITS_PER_UNIT);
4757 p += fldsz / BITS_PER_UNIT;
4758 fldsz %= BITS_PER_UNIT;
4759 if (fldsz)
4760 *p &= ~((1 << fldsz) - 1);
4764 else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4766 if (ftype == error_mark_node)
4767 continue;
4768 gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4769 && !COMPLETE_TYPE_P (ftype));
4770 if (!buf->clear_in_mask && !for_auto_init)
4771 error_at (buf->loc, "flexible array member %qD does not "
4772 "have well defined padding bits for %qs",
4773 field, "__builtin_clear_padding");
4775 else if (is_empty_type (ftype))
4776 continue;
4777 else
4779 HOST_WIDE_INT pos = int_byte_position (field);
4780 if (pos >= sz)
4781 continue;
4782 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4783 gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4784 clear_padding_add_padding (buf, pos - cur_pos);
4785 cur_pos = pos;
4786 if (tree asbase = lang_hooks.types.classtype_as_base (field))
4787 ftype = asbase;
4788 clear_padding_type (buf, ftype, fldsz, for_auto_init);
4789 cur_pos += fldsz;
4792 gcc_assert (sz >= cur_pos);
4793 clear_padding_add_padding (buf, sz - cur_pos);
4794 break;
4795 case ARRAY_TYPE:
4796 HOST_WIDE_INT nelts, fldsz;
4797 fldsz = int_size_in_bytes (TREE_TYPE (type));
4798 if (fldsz == 0)
4799 break;
4800 nelts = sz / fldsz;
4801 if (nelts > 1
4802 && sz > 8 * UNITS_PER_WORD
4803 && buf->union_ptr == NULL
4804 && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4806 /* For sufficiently large array of more than one elements,
4807 emit a runtime loop to keep code size manageable. */
4808 tree base = buf->base;
4809 unsigned int prev_align = buf->align;
4810 HOST_WIDE_INT off = buf->off + buf->size;
4811 HOST_WIDE_INT prev_sz = buf->sz;
4812 clear_padding_flush (buf, true);
4813 tree elttype = TREE_TYPE (type);
4814 buf->base = create_tmp_var (build_pointer_type (elttype));
4815 tree end = make_ssa_name (TREE_TYPE (buf->base));
4816 gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4817 base, size_int (off));
4818 gimple_set_location (g, buf->loc);
4819 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4820 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4821 size_int (sz));
4822 gimple_set_location (g, buf->loc);
4823 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4824 buf->sz = fldsz;
4825 buf->align = TYPE_ALIGN (elttype);
4826 buf->off = 0;
4827 buf->size = 0;
4828 clear_padding_emit_loop (buf, elttype, end, for_auto_init);
4829 buf->base = base;
4830 buf->sz = prev_sz;
4831 buf->align = prev_align;
4832 buf->size = off % UNITS_PER_WORD;
4833 buf->off = off - buf->size;
4834 memset (buf->buf, 0, buf->size);
4835 break;
4837 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4838 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4839 break;
4840 case UNION_TYPE:
4841 clear_padding_union (buf, type, sz, for_auto_init);
4842 break;
4843 case REAL_TYPE:
4844 gcc_assert ((size_t) sz <= clear_padding_unit);
4845 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4846 clear_padding_flush (buf, false);
4847 if (clear_padding_real_needs_padding_p (type))
4849 /* Use native_interpret_real + native_encode_expr to figure out
4850 which bits are padding. */
4851 memset (buf->buf + buf->size, ~0, sz);
4852 tree cst = native_interpret_real (type, buf->buf + buf->size, sz);
4853 gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
4854 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4855 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4856 for (size_t i = 0; i < (size_t) sz; i++)
4857 buf->buf[buf->size + i] ^= ~0;
4859 else
4860 memset (buf->buf + buf->size, 0, sz);
4861 buf->size += sz;
4862 break;
4863 case COMPLEX_TYPE:
4864 fldsz = int_size_in_bytes (TREE_TYPE (type));
4865 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4866 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4867 break;
4868 case VECTOR_TYPE:
4869 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
4870 fldsz = int_size_in_bytes (TREE_TYPE (type));
4871 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4872 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4873 break;
4874 case NULLPTR_TYPE:
4875 gcc_assert ((size_t) sz <= clear_padding_unit);
4876 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4877 clear_padding_flush (buf, false);
4878 memset (buf->buf + buf->size, ~0, sz);
4879 buf->size += sz;
4880 break;
4881 case BITINT_TYPE:
4883 struct bitint_info info;
4884 bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
4885 gcc_assert (ok);
4886 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
4887 if (TYPE_PRECISION (type) <= GET_MODE_PRECISION (limb_mode))
4889 gcc_assert ((size_t) sz <= clear_padding_unit);
4890 if ((unsigned HOST_WIDE_INT) sz + buf->size
4891 > clear_padding_buf_size)
4892 clear_padding_flush (buf, false);
4893 if (!info.extended
4894 && TYPE_PRECISION (type) < GET_MODE_PRECISION (limb_mode))
4896 int tprec = GET_MODE_PRECISION (limb_mode);
4897 int prec = TYPE_PRECISION (type);
4898 tree t = build_nonstandard_integer_type (tprec, 1);
4899 tree cst = wide_int_to_tree (t, wi::mask (prec, true, tprec));
4900 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4901 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4903 else
4904 memset (buf->buf + buf->size, 0, sz);
4905 buf->size += sz;
4906 break;
4908 tree limbtype
4909 = build_nonstandard_integer_type (GET_MODE_PRECISION (limb_mode), 1);
4910 fldsz = int_size_in_bytes (limbtype);
4911 nelts = int_size_in_bytes (type) / fldsz;
4912 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4914 if (!info.extended
4915 && i == (info.big_endian ? 0 : nelts - 1)
4916 && (((unsigned) TYPE_PRECISION (type))
4917 % TYPE_PRECISION (limbtype)) != 0)
4919 int tprec = GET_MODE_PRECISION (limb_mode);
4920 int prec = (((unsigned) TYPE_PRECISION (type)) % tprec);
4921 tree cst = wide_int_to_tree (limbtype,
4922 wi::mask (prec, true, tprec));
4923 int len = native_encode_expr (cst, buf->buf + buf->size,
4924 fldsz);
4925 gcc_assert (len > 0 && (size_t) len == (size_t) fldsz);
4926 buf->size += fldsz;
4928 else
4929 clear_padding_type (buf, limbtype, fldsz, for_auto_init);
4931 break;
4933 default:
4934 gcc_assert ((size_t) sz <= clear_padding_unit);
4935 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4936 clear_padding_flush (buf, false);
4937 memset (buf->buf + buf->size, 0, sz);
4938 buf->size += sz;
4939 break;
4943 /* Clear padding bits of TYPE in MASK. */
4945 void
4946 clear_type_padding_in_mask (tree type, unsigned char *mask)
4948 clear_padding_struct buf;
4949 buf.loc = UNKNOWN_LOCATION;
4950 buf.clear_in_mask = true;
4951 buf.base = NULL_TREE;
4952 buf.alias_type = NULL_TREE;
4953 buf.gsi = NULL;
4954 buf.align = 0;
4955 buf.off = 0;
4956 buf.padding_bytes = 0;
4957 buf.sz = int_size_in_bytes (type);
4958 buf.size = 0;
4959 buf.union_ptr = mask;
4960 clear_padding_type (&buf, type, buf.sz, false);
4961 clear_padding_flush (&buf, true);
4964 /* Fold __builtin_clear_padding builtin. */
4966 static bool
4967 gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
4969 gimple *stmt = gsi_stmt (*gsi);
4970 gcc_assert (gimple_call_num_args (stmt) == 2);
4971 tree ptr = gimple_call_arg (stmt, 0);
4972 tree typearg = gimple_call_arg (stmt, 1);
4973 /* The 2nd argument of __builtin_clear_padding's value is used to
4974 distinguish whether this call is made by the user or by the compiler
4975 for automatic variable initialization. */
4976 bool for_auto_init = (bool) TREE_INT_CST_LOW (typearg);
4977 tree type = TREE_TYPE (TREE_TYPE (typearg));
4978 location_t loc = gimple_location (stmt);
4979 clear_padding_struct buf;
4980 gimple_stmt_iterator gsiprev = *gsi;
4981 /* This should be folded during the lower pass. */
4982 gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
4983 gcc_assert (COMPLETE_TYPE_P (type));
4984 gsi_prev (&gsiprev);
4986 buf.loc = loc;
4987 buf.clear_in_mask = false;
4988 buf.base = ptr;
4989 buf.alias_type = NULL_TREE;
4990 buf.gsi = gsi;
4991 buf.align = get_pointer_alignment (ptr);
4992 unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
4993 buf.align = MAX (buf.align, talign);
4994 buf.off = 0;
4995 buf.padding_bytes = 0;
4996 buf.size = 0;
4997 buf.sz = int_size_in_bytes (type);
4998 buf.union_ptr = NULL;
4999 if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
5000 sorry_at (loc, "%s not supported for variable length aggregates",
5001 "__builtin_clear_padding");
5002 /* The implementation currently assumes 8-bit host and target
5003 chars which is the case for all currently supported targets
5004 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
5005 else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
5006 sorry_at (loc, "%s not supported on this target",
5007 "__builtin_clear_padding");
5008 else if (!clear_padding_type_may_have_padding_p (type))
5010 else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
5012 tree sz = TYPE_SIZE_UNIT (type);
5013 tree elttype = type;
5014 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
5015 while (TREE_CODE (elttype) == ARRAY_TYPE
5016 && int_size_in_bytes (elttype) < 0)
5017 elttype = TREE_TYPE (elttype);
5018 HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
5019 gcc_assert (eltsz >= 0);
5020 if (eltsz)
5022 buf.base = create_tmp_var (build_pointer_type (elttype));
5023 tree end = make_ssa_name (TREE_TYPE (buf.base));
5024 gimple *g = gimple_build_assign (buf.base, ptr);
5025 gimple_set_location (g, loc);
5026 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5027 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
5028 gimple_set_location (g, loc);
5029 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5030 buf.sz = eltsz;
5031 buf.align = TYPE_ALIGN (elttype);
5032 buf.alias_type = build_pointer_type (elttype);
5033 clear_padding_emit_loop (&buf, elttype, end, for_auto_init);
5036 else
5038 if (!is_gimple_mem_ref_addr (buf.base))
5040 buf.base = make_ssa_name (TREE_TYPE (ptr));
5041 gimple *g = gimple_build_assign (buf.base, ptr);
5042 gimple_set_location (g, loc);
5043 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5045 buf.alias_type = build_pointer_type (type);
5046 clear_padding_type (&buf, type, buf.sz, for_auto_init);
5047 clear_padding_flush (&buf, true);
5050 gimple_stmt_iterator gsiprev2 = *gsi;
5051 gsi_prev (&gsiprev2);
5052 if (gsi_stmt (gsiprev) == gsi_stmt (gsiprev2))
5053 gsi_replace (gsi, gimple_build_nop (), true);
5054 else
5056 gsi_remove (gsi, true);
5057 *gsi = gsiprev2;
5059 return true;
5062 /* Fold the non-target builtin at *GSI and return whether any simplification
5063 was made. */
5065 static bool
5066 gimple_fold_builtin (gimple_stmt_iterator *gsi)
5068 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
5069 tree callee = gimple_call_fndecl (stmt);
5071 /* Give up for always_inline inline builtins until they are
5072 inlined. */
5073 if (avoid_folding_inline_builtin (callee))
5074 return false;
5076 unsigned n = gimple_call_num_args (stmt);
5077 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
5078 switch (fcode)
5080 case BUILT_IN_BCMP:
5081 return gimple_fold_builtin_bcmp (gsi);
5082 case BUILT_IN_BCOPY:
5083 return gimple_fold_builtin_bcopy (gsi);
5084 case BUILT_IN_BZERO:
5085 return gimple_fold_builtin_bzero (gsi);
5087 case BUILT_IN_MEMSET:
5088 return gimple_fold_builtin_memset (gsi,
5089 gimple_call_arg (stmt, 1),
5090 gimple_call_arg (stmt, 2));
5091 case BUILT_IN_MEMCPY:
5092 case BUILT_IN_MEMPCPY:
5093 case BUILT_IN_MEMMOVE:
5094 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
5095 gimple_call_arg (stmt, 1), fcode);
5096 case BUILT_IN_SPRINTF_CHK:
5097 case BUILT_IN_VSPRINTF_CHK:
5098 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
5099 case BUILT_IN_STRCAT_CHK:
5100 return gimple_fold_builtin_strcat_chk (gsi);
5101 case BUILT_IN_STRNCAT_CHK:
5102 return gimple_fold_builtin_strncat_chk (gsi);
5103 case BUILT_IN_STRLEN:
5104 return gimple_fold_builtin_strlen (gsi);
5105 case BUILT_IN_STRCPY:
5106 return gimple_fold_builtin_strcpy (gsi,
5107 gimple_call_arg (stmt, 0),
5108 gimple_call_arg (stmt, 1));
5109 case BUILT_IN_STRNCPY:
5110 return gimple_fold_builtin_strncpy (gsi,
5111 gimple_call_arg (stmt, 0),
5112 gimple_call_arg (stmt, 1),
5113 gimple_call_arg (stmt, 2));
5114 case BUILT_IN_STRCAT:
5115 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
5116 gimple_call_arg (stmt, 1));
5117 case BUILT_IN_STRNCAT:
5118 return gimple_fold_builtin_strncat (gsi);
5119 case BUILT_IN_INDEX:
5120 case BUILT_IN_STRCHR:
5121 return gimple_fold_builtin_strchr (gsi, false);
5122 case BUILT_IN_RINDEX:
5123 case BUILT_IN_STRRCHR:
5124 return gimple_fold_builtin_strchr (gsi, true);
5125 case BUILT_IN_STRSTR:
5126 return gimple_fold_builtin_strstr (gsi);
5127 case BUILT_IN_STRCMP:
5128 case BUILT_IN_STRCMP_EQ:
5129 case BUILT_IN_STRCASECMP:
5130 case BUILT_IN_STRNCMP:
5131 case BUILT_IN_STRNCMP_EQ:
5132 case BUILT_IN_STRNCASECMP:
5133 return gimple_fold_builtin_string_compare (gsi);
5134 case BUILT_IN_MEMCHR:
5135 return gimple_fold_builtin_memchr (gsi);
5136 case BUILT_IN_FPUTS:
5137 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5138 gimple_call_arg (stmt, 1), false);
5139 case BUILT_IN_FPUTS_UNLOCKED:
5140 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5141 gimple_call_arg (stmt, 1), true);
5142 case BUILT_IN_MEMCPY_CHK:
5143 case BUILT_IN_MEMPCPY_CHK:
5144 case BUILT_IN_MEMMOVE_CHK:
5145 case BUILT_IN_MEMSET_CHK:
5146 return gimple_fold_builtin_memory_chk (gsi,
5147 gimple_call_arg (stmt, 0),
5148 gimple_call_arg (stmt, 1),
5149 gimple_call_arg (stmt, 2),
5150 gimple_call_arg (stmt, 3),
5151 fcode);
5152 case BUILT_IN_STPCPY:
5153 return gimple_fold_builtin_stpcpy (gsi);
5154 case BUILT_IN_STRCPY_CHK:
5155 case BUILT_IN_STPCPY_CHK:
5156 return gimple_fold_builtin_stxcpy_chk (gsi,
5157 gimple_call_arg (stmt, 0),
5158 gimple_call_arg (stmt, 1),
5159 gimple_call_arg (stmt, 2),
5160 fcode);
5161 case BUILT_IN_STRNCPY_CHK:
5162 case BUILT_IN_STPNCPY_CHK:
5163 return gimple_fold_builtin_stxncpy_chk (gsi,
5164 gimple_call_arg (stmt, 0),
5165 gimple_call_arg (stmt, 1),
5166 gimple_call_arg (stmt, 2),
5167 gimple_call_arg (stmt, 3),
5168 fcode);
5169 case BUILT_IN_SNPRINTF_CHK:
5170 case BUILT_IN_VSNPRINTF_CHK:
5171 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
5173 case BUILT_IN_FPRINTF:
5174 case BUILT_IN_FPRINTF_UNLOCKED:
5175 case BUILT_IN_VFPRINTF:
5176 if (n == 2 || n == 3)
5177 return gimple_fold_builtin_fprintf (gsi,
5178 gimple_call_arg (stmt, 0),
5179 gimple_call_arg (stmt, 1),
5180 n == 3
5181 ? gimple_call_arg (stmt, 2)
5182 : NULL_TREE,
5183 fcode);
5184 break;
5185 case BUILT_IN_FPRINTF_CHK:
5186 case BUILT_IN_VFPRINTF_CHK:
5187 if (n == 3 || n == 4)
5188 return gimple_fold_builtin_fprintf (gsi,
5189 gimple_call_arg (stmt, 0),
5190 gimple_call_arg (stmt, 2),
5191 n == 4
5192 ? gimple_call_arg (stmt, 3)
5193 : NULL_TREE,
5194 fcode);
5195 break;
5196 case BUILT_IN_PRINTF:
5197 case BUILT_IN_PRINTF_UNLOCKED:
5198 case BUILT_IN_VPRINTF:
5199 if (n == 1 || n == 2)
5200 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
5201 n == 2
5202 ? gimple_call_arg (stmt, 1)
5203 : NULL_TREE, fcode);
5204 break;
5205 case BUILT_IN_PRINTF_CHK:
5206 case BUILT_IN_VPRINTF_CHK:
5207 if (n == 2 || n == 3)
5208 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
5209 n == 3
5210 ? gimple_call_arg (stmt, 2)
5211 : NULL_TREE, fcode);
5212 break;
5213 case BUILT_IN_ACC_ON_DEVICE:
5214 return gimple_fold_builtin_acc_on_device (gsi,
5215 gimple_call_arg (stmt, 0));
5216 case BUILT_IN_REALLOC:
5217 return gimple_fold_builtin_realloc (gsi);
5219 case BUILT_IN_CLEAR_PADDING:
5220 return gimple_fold_builtin_clear_padding (gsi);
5222 default:;
5225 /* Try the generic builtin folder. */
5226 bool ignore = (gimple_call_lhs (stmt) == NULL);
5227 tree result = fold_call_stmt (stmt, ignore);
5228 if (result)
5230 if (ignore)
5231 STRIP_NOPS (result);
5232 else
5233 result = fold_convert (gimple_call_return_type (stmt), result);
5234 gimplify_and_update_call_from_tree (gsi, result);
5235 return true;
5238 return false;
5241 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5242 function calls to constants, where possible. */
5244 static tree
5245 fold_internal_goacc_dim (const gimple *call)
5247 int axis = oacc_get_ifn_dim_arg (call);
5248 int size = oacc_get_fn_dim_size (current_function_decl, axis);
5249 tree result = NULL_TREE;
5250 tree type = TREE_TYPE (gimple_call_lhs (call));
5252 switch (gimple_call_internal_fn (call))
5254 case IFN_GOACC_DIM_POS:
5255 /* If the size is 1, we know the answer. */
5256 if (size == 1)
5257 result = build_int_cst (type, 0);
5258 break;
5259 case IFN_GOACC_DIM_SIZE:
5260 /* If the size is not dynamic, we know the answer. */
5261 if (size)
5262 result = build_int_cst (type, size);
5263 break;
5264 default:
5265 break;
5268 return result;
5271 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5272 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5273 &var where var is only addressable because of such calls. */
5275 bool
5276 optimize_atomic_compare_exchange_p (gimple *stmt)
5278 if (gimple_call_num_args (stmt) != 6
5279 || !flag_inline_atomics
5280 || !optimize
5281 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
5282 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5283 || !gimple_vdef (stmt)
5284 || !gimple_vuse (stmt))
5285 return false;
5287 tree fndecl = gimple_call_fndecl (stmt);
5288 switch (DECL_FUNCTION_CODE (fndecl))
5290 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
5291 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
5292 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
5293 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
5294 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
5295 break;
5296 default:
5297 return false;
5300 tree expected = gimple_call_arg (stmt, 1);
5301 if (TREE_CODE (expected) != ADDR_EXPR
5302 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
5303 return false;
5305 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
5306 if (!is_gimple_reg_type (etype)
5307 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
5308 || TREE_THIS_VOLATILE (etype)
5309 || VECTOR_TYPE_P (etype)
5310 || TREE_CODE (etype) == COMPLEX_TYPE
5311 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5312 might not preserve all the bits. See PR71716. */
5313 || SCALAR_FLOAT_TYPE_P (etype)
5314 || maybe_ne (TYPE_PRECISION (etype),
5315 GET_MODE_BITSIZE (TYPE_MODE (etype))))
5316 return false;
5318 tree weak = gimple_call_arg (stmt, 3);
5319 if (!integer_zerop (weak) && !integer_onep (weak))
5320 return false;
5322 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5323 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5324 machine_mode mode = TYPE_MODE (itype);
5326 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
5327 == CODE_FOR_nothing
5328 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5329 return false;
5331 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
5332 return false;
5334 return true;
5337 /* Fold
5338 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5339 into
5340 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5341 i = IMAGPART_EXPR <t>;
5342 r = (_Bool) i;
5343 e = REALPART_EXPR <t>; */
5345 void
5346 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5348 gimple *stmt = gsi_stmt (*gsi);
5349 tree fndecl = gimple_call_fndecl (stmt);
5350 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5351 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5352 tree ctype = build_complex_type (itype);
5353 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
5354 bool throws = false;
5355 edge e = NULL;
5356 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5357 expected);
5358 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5359 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5360 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5362 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
5363 build1 (VIEW_CONVERT_EXPR, itype,
5364 gimple_assign_lhs (g)));
5365 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5367 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
5368 + int_size_in_bytes (itype);
5369 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5370 gimple_call_arg (stmt, 0),
5371 gimple_assign_lhs (g),
5372 gimple_call_arg (stmt, 2),
5373 build_int_cst (integer_type_node, flag),
5374 gimple_call_arg (stmt, 4),
5375 gimple_call_arg (stmt, 5));
5376 tree lhs = make_ssa_name (ctype);
5377 gimple_call_set_lhs (g, lhs);
5378 gimple_move_vops (g, stmt);
5379 tree oldlhs = gimple_call_lhs (stmt);
5380 if (stmt_can_throw_internal (cfun, stmt))
5382 throws = true;
5383 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
5385 gimple_call_set_nothrow (as_a <gcall *> (g),
5386 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
5387 gimple_call_set_lhs (stmt, NULL_TREE);
5388 gsi_replace (gsi, g, true);
5389 if (oldlhs)
5391 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
5392 build1 (IMAGPART_EXPR, itype, lhs));
5393 if (throws)
5395 gsi_insert_on_edge_immediate (e, g);
5396 *gsi = gsi_for_stmt (g);
5398 else
5399 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5400 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
5401 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5403 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
5404 build1 (REALPART_EXPR, itype, lhs));
5405 if (throws && oldlhs == NULL_TREE)
5407 gsi_insert_on_edge_immediate (e, g);
5408 *gsi = gsi_for_stmt (g);
5410 else
5411 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5412 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5414 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5415 VIEW_CONVERT_EXPR,
5416 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5417 gimple_assign_lhs (g)));
5418 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5420 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
5421 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5422 *gsi = gsiret;
5425 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5426 doesn't fit into TYPE. The test for overflow should be regardless of
5427 -fwrapv, and even for unsigned types. */
5429 bool
5430 arith_overflowed_p (enum tree_code code, const_tree type,
5431 const_tree arg0, const_tree arg1)
5433 widest2_int warg0 = widest2_int_cst (arg0);
5434 widest2_int warg1 = widest2_int_cst (arg1);
5435 widest2_int wres;
5436 switch (code)
5438 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
5439 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
5440 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
5441 default: gcc_unreachable ();
5443 signop sign = TYPE_SIGN (type);
5444 if (sign == UNSIGNED && wi::neg_p (wres))
5445 return true;
5446 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
5449 /* If IFN_{MASK,LEN,MASK_LEN}_LOAD/STORE call CALL is unconditional,
5450 return a MEM_REF for the memory it references, otherwise return null.
5451 VECTYPE is the type of the memory vector. MASK_P indicates it's for
5452 MASK if true, otherwise it's for LEN. */
5454 static tree
5455 gimple_fold_partial_load_store_mem_ref (gcall *call, tree vectype, bool mask_p)
5457 tree ptr = gimple_call_arg (call, 0);
5458 tree alias_align = gimple_call_arg (call, 1);
5459 if (!tree_fits_uhwi_p (alias_align))
5460 return NULL_TREE;
5462 if (mask_p)
5464 tree mask = gimple_call_arg (call, 2);
5465 if (!integer_all_onesp (mask))
5466 return NULL_TREE;
5468 else
5470 internal_fn ifn = gimple_call_internal_fn (call);
5471 int len_index = internal_fn_len_index (ifn);
5472 tree basic_len = gimple_call_arg (call, len_index);
5473 if (!poly_int_tree_p (basic_len))
5474 return NULL_TREE;
5475 tree bias = gimple_call_arg (call, len_index + 1);
5476 gcc_assert (TREE_CODE (bias) == INTEGER_CST);
5477 /* For LEN_LOAD/LEN_STORE/MASK_LEN_LOAD/MASK_LEN_STORE,
5478 we don't fold when (bias + len) != VF. */
5479 if (maybe_ne (wi::to_poly_widest (basic_len) + wi::to_widest (bias),
5480 GET_MODE_NUNITS (TYPE_MODE (vectype))))
5481 return NULL_TREE;
5483 /* For MASK_LEN_{LOAD,STORE}, we should also check whether
5484 the mask is all ones mask. */
5485 if (ifn == IFN_MASK_LEN_LOAD || ifn == IFN_MASK_LEN_STORE)
5487 tree mask = gimple_call_arg (call, internal_fn_mask_index (ifn));
5488 if (!integer_all_onesp (mask))
5489 return NULL_TREE;
5493 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
5494 if (TYPE_ALIGN (vectype) != align)
5495 vectype = build_aligned_type (vectype, align);
5496 tree offset = build_zero_cst (TREE_TYPE (alias_align));
5497 return fold_build2 (MEM_REF, vectype, ptr, offset);
5500 /* Try to fold IFN_{MASK,LEN}_LOAD call CALL. Return true on success.
5501 MASK_P indicates it's for MASK if true, otherwise it's for LEN. */
5503 static bool
5504 gimple_fold_partial_load (gimple_stmt_iterator *gsi, gcall *call, bool mask_p)
5506 tree lhs = gimple_call_lhs (call);
5507 if (!lhs)
5508 return false;
5510 if (tree rhs
5511 = gimple_fold_partial_load_store_mem_ref (call, TREE_TYPE (lhs), mask_p))
5513 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5514 gimple_set_location (new_stmt, gimple_location (call));
5515 gimple_move_vops (new_stmt, call);
5516 gsi_replace (gsi, new_stmt, false);
5517 return true;
5519 return false;
5522 /* Try to fold IFN_{MASK,LEN}_STORE call CALL. Return true on success.
5523 MASK_P indicates it's for MASK if true, otherwise it's for LEN. */
5525 static bool
5526 gimple_fold_partial_store (gimple_stmt_iterator *gsi, gcall *call,
5527 bool mask_p)
5529 internal_fn ifn = gimple_call_internal_fn (call);
5530 tree rhs = gimple_call_arg (call, internal_fn_stored_value_index (ifn));
5531 if (tree lhs
5532 = gimple_fold_partial_load_store_mem_ref (call, TREE_TYPE (rhs), mask_p))
5534 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5535 gimple_set_location (new_stmt, gimple_location (call));
5536 gimple_move_vops (new_stmt, call);
5537 gsi_replace (gsi, new_stmt, false);
5538 return true;
5540 return false;
5543 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5544 The statement may be replaced by another statement, e.g., if the call
5545 simplifies to a constant value. Return true if any changes were made.
5546 It is assumed that the operands have been previously folded. */
5548 static bool
5549 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
5551 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
5552 tree callee;
5553 bool changed = false;
5555 /* Check for virtual calls that became direct calls. */
5556 callee = gimple_call_fn (stmt);
5557 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
5559 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
5561 if (dump_file && virtual_method_call_p (callee)
5562 && !possible_polymorphic_call_target_p
5563 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
5564 (OBJ_TYPE_REF_EXPR (callee)))))
5566 fprintf (dump_file,
5567 "Type inheritance inconsistent devirtualization of ");
5568 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5569 fprintf (dump_file, " to ");
5570 print_generic_expr (dump_file, callee, TDF_SLIM);
5571 fprintf (dump_file, "\n");
5574 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
5575 changed = true;
5577 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
5579 bool final;
5580 vec <cgraph_node *>targets
5581 = possible_polymorphic_call_targets (callee, stmt, &final);
5582 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5584 tree lhs = gimple_call_lhs (stmt);
5585 if (dump_enabled_p ())
5587 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5588 "folding virtual function call to %s\n",
5589 targets.length () == 1
5590 ? targets[0]->name ()
5591 : "__builtin_unreachable");
5593 if (targets.length () == 1)
5595 tree fndecl = targets[0]->decl;
5596 gimple_call_set_fndecl (stmt, fndecl);
5597 changed = true;
5598 /* If changing the call to __cxa_pure_virtual
5599 or similar noreturn function, adjust gimple_call_fntype
5600 too. */
5601 if (gimple_call_noreturn_p (stmt)
5602 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
5603 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
5604 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5605 == void_type_node))
5606 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
5607 /* If the call becomes noreturn, remove the lhs. */
5608 if (lhs
5609 && gimple_call_noreturn_p (stmt)
5610 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
5611 || should_remove_lhs_p (lhs)))
5613 if (TREE_CODE (lhs) == SSA_NAME)
5615 tree var = create_tmp_var (TREE_TYPE (lhs));
5616 tree def = get_or_create_ssa_default_def (cfun, var);
5617 gimple *new_stmt = gimple_build_assign (lhs, def);
5618 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
5620 gimple_call_set_lhs (stmt, NULL_TREE);
5622 maybe_remove_unused_call_args (cfun, stmt);
5624 else
5626 location_t loc = gimple_location (stmt);
5627 gimple *new_stmt = gimple_build_builtin_unreachable (loc);
5628 gimple_call_set_ctrl_altering (new_stmt, false);
5629 /* If the call had a SSA name as lhs morph that into
5630 an uninitialized value. */
5631 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5633 tree var = create_tmp_var (TREE_TYPE (lhs));
5634 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
5635 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5636 set_ssa_default_def (cfun, var, lhs);
5638 gimple_move_vops (new_stmt, stmt);
5639 gsi_replace (gsi, new_stmt, false);
5640 return true;
5646 /* Check for indirect calls that became direct calls, and then
5647 no longer require a static chain. */
5648 if (gimple_call_chain (stmt))
5650 tree fn = gimple_call_fndecl (stmt);
5651 if (fn && !DECL_STATIC_CHAIN (fn))
5653 gimple_call_set_chain (stmt, NULL);
5654 changed = true;
5658 if (inplace)
5659 return changed;
5661 /* Check for builtins that CCP can handle using information not
5662 available in the generic fold routines. */
5663 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5665 if (gimple_fold_builtin (gsi))
5666 changed = true;
5668 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
5670 changed |= targetm.gimple_fold_builtin (gsi);
5672 else if (gimple_call_internal_p (stmt))
5674 enum tree_code subcode = ERROR_MARK;
5675 tree result = NULL_TREE;
5676 bool cplx_result = false;
5677 bool uaddc_usubc = false;
5678 tree overflow = NULL_TREE;
5679 switch (gimple_call_internal_fn (stmt))
5681 case IFN_BUILTIN_EXPECT:
5682 result = fold_builtin_expect (gimple_location (stmt),
5683 gimple_call_arg (stmt, 0),
5684 gimple_call_arg (stmt, 1),
5685 gimple_call_arg (stmt, 2),
5686 NULL_TREE);
5687 break;
5688 case IFN_UBSAN_OBJECT_SIZE:
5690 tree offset = gimple_call_arg (stmt, 1);
5691 tree objsize = gimple_call_arg (stmt, 2);
5692 if (integer_all_onesp (objsize)
5693 || (TREE_CODE (offset) == INTEGER_CST
5694 && TREE_CODE (objsize) == INTEGER_CST
5695 && tree_int_cst_le (offset, objsize)))
5697 replace_call_with_value (gsi, NULL_TREE);
5698 return true;
5701 break;
5702 case IFN_UBSAN_PTR:
5703 if (integer_zerop (gimple_call_arg (stmt, 1)))
5705 replace_call_with_value (gsi, NULL_TREE);
5706 return true;
5708 break;
5709 case IFN_UBSAN_BOUNDS:
5711 tree index = gimple_call_arg (stmt, 1);
5712 tree bound = gimple_call_arg (stmt, 2);
5713 if (TREE_CODE (index) == INTEGER_CST
5714 && TREE_CODE (bound) == INTEGER_CST)
5716 index = fold_convert (TREE_TYPE (bound), index);
5717 if (TREE_CODE (index) == INTEGER_CST
5718 && tree_int_cst_lt (index, bound))
5720 replace_call_with_value (gsi, NULL_TREE);
5721 return true;
5725 break;
5726 case IFN_GOACC_DIM_SIZE:
5727 case IFN_GOACC_DIM_POS:
5728 result = fold_internal_goacc_dim (stmt);
5729 break;
5730 case IFN_UBSAN_CHECK_ADD:
5731 subcode = PLUS_EXPR;
5732 break;
5733 case IFN_UBSAN_CHECK_SUB:
5734 subcode = MINUS_EXPR;
5735 break;
5736 case IFN_UBSAN_CHECK_MUL:
5737 subcode = MULT_EXPR;
5738 break;
5739 case IFN_ADD_OVERFLOW:
5740 subcode = PLUS_EXPR;
5741 cplx_result = true;
5742 break;
5743 case IFN_SUB_OVERFLOW:
5744 subcode = MINUS_EXPR;
5745 cplx_result = true;
5746 break;
5747 case IFN_MUL_OVERFLOW:
5748 subcode = MULT_EXPR;
5749 cplx_result = true;
5750 break;
5751 case IFN_UADDC:
5752 subcode = PLUS_EXPR;
5753 cplx_result = true;
5754 uaddc_usubc = true;
5755 break;
5756 case IFN_USUBC:
5757 subcode = MINUS_EXPR;
5758 cplx_result = true;
5759 uaddc_usubc = true;
5760 break;
5761 case IFN_MASK_LOAD:
5762 changed |= gimple_fold_partial_load (gsi, stmt, true);
5763 break;
5764 case IFN_MASK_STORE:
5765 changed |= gimple_fold_partial_store (gsi, stmt, true);
5766 break;
5767 case IFN_LEN_LOAD:
5768 case IFN_MASK_LEN_LOAD:
5769 changed |= gimple_fold_partial_load (gsi, stmt, false);
5770 break;
5771 case IFN_LEN_STORE:
5772 case IFN_MASK_LEN_STORE:
5773 changed |= gimple_fold_partial_store (gsi, stmt, false);
5774 break;
5775 default:
5776 break;
5778 if (subcode != ERROR_MARK)
5780 tree arg0 = gimple_call_arg (stmt, 0);
5781 tree arg1 = gimple_call_arg (stmt, 1);
5782 tree arg2 = NULL_TREE;
5783 tree type = TREE_TYPE (arg0);
5784 if (cplx_result)
5786 tree lhs = gimple_call_lhs (stmt);
5787 if (lhs == NULL_TREE)
5788 type = NULL_TREE;
5789 else
5790 type = TREE_TYPE (TREE_TYPE (lhs));
5791 if (uaddc_usubc)
5792 arg2 = gimple_call_arg (stmt, 2);
5794 if (type == NULL_TREE)
5796 else if (uaddc_usubc)
5798 if (!integer_zerop (arg2))
5800 /* x = y + 0 + 0; x = y - 0 - 0; */
5801 else if (integer_zerop (arg1))
5802 result = arg0;
5803 /* x = 0 + y + 0; */
5804 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
5805 result = arg1;
5806 /* x = y - y - 0; */
5807 else if (subcode == MINUS_EXPR
5808 && operand_equal_p (arg0, arg1, 0))
5809 result = integer_zero_node;
5811 /* x = y + 0; x = y - 0; x = y * 0; */
5812 else if (integer_zerop (arg1))
5813 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
5814 /* x = 0 + y; x = 0 * y; */
5815 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
5816 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
5817 /* x = y - y; */
5818 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
5819 result = integer_zero_node;
5820 /* x = y * 1; x = 1 * y; */
5821 else if (subcode == MULT_EXPR && integer_onep (arg1))
5822 result = arg0;
5823 else if (subcode == MULT_EXPR && integer_onep (arg0))
5824 result = arg1;
5825 if (result)
5827 if (result == integer_zero_node)
5828 result = build_zero_cst (type);
5829 else if (cplx_result && TREE_TYPE (result) != type)
5831 if (TREE_CODE (result) == INTEGER_CST)
5833 if (arith_overflowed_p (PLUS_EXPR, type, result,
5834 integer_zero_node))
5835 overflow = build_one_cst (type);
5837 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
5838 && TYPE_UNSIGNED (type))
5839 || (TYPE_PRECISION (type)
5840 < (TYPE_PRECISION (TREE_TYPE (result))
5841 + (TYPE_UNSIGNED (TREE_TYPE (result))
5842 && !TYPE_UNSIGNED (type)))))
5843 result = NULL_TREE;
5844 if (result)
5845 result = fold_convert (type, result);
5850 if (result)
5852 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
5853 result = drop_tree_overflow (result);
5854 if (cplx_result)
5856 if (overflow == NULL_TREE)
5857 overflow = build_zero_cst (TREE_TYPE (result));
5858 tree ctype = build_complex_type (TREE_TYPE (result));
5859 if (TREE_CODE (result) == INTEGER_CST
5860 && TREE_CODE (overflow) == INTEGER_CST)
5861 result = build_complex (ctype, result, overflow);
5862 else
5863 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
5864 ctype, result, overflow);
5866 gimplify_and_update_call_from_tree (gsi, result);
5867 changed = true;
5871 return changed;
5875 /* Return true whether NAME has a use on STMT. Note this can return
5876 false even though there's a use on STMT if SSA operands are not
5877 up-to-date. */
5879 static bool
5880 has_use_on_stmt (tree name, gimple *stmt)
5882 ssa_op_iter iter;
5883 tree op;
5884 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5885 if (op == name)
5886 return true;
5887 return false;
5890 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5891 gimple_simplify.
5893 Replaces *GSI with the simplification result in RCODE and OPS
5894 and the associated statements in *SEQ. Does the replacement
5895 according to INPLACE and returns true if the operation succeeded. */
5897 static bool
5898 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5899 gimple_match_op *res_op,
5900 gimple_seq *seq, bool inplace)
5902 gimple *stmt = gsi_stmt (*gsi);
5903 tree *ops = res_op->ops;
5904 unsigned int num_ops = res_op->num_ops;
5906 /* Play safe and do not allow abnormals to be mentioned in
5907 newly created statements. See also maybe_push_res_to_seq.
5908 As an exception allow such uses if there was a use of the
5909 same SSA name on the old stmt. */
5910 for (unsigned int i = 0; i < num_ops; ++i)
5911 if (TREE_CODE (ops[i]) == SSA_NAME
5912 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
5913 && !has_use_on_stmt (ops[i], stmt))
5914 return false;
5916 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
5917 for (unsigned int i = 0; i < 2; ++i)
5918 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
5919 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
5920 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
5921 return false;
5923 /* Don't insert new statements when INPLACE is true, even if we could
5924 reuse STMT for the final statement. */
5925 if (inplace && !gimple_seq_empty_p (*seq))
5926 return false;
5928 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
5930 gcc_assert (res_op->code.is_tree_code ());
5931 auto code = tree_code (res_op->code);
5932 if (TREE_CODE_CLASS (code) == tcc_comparison
5933 /* GIMPLE_CONDs condition may not throw. */
5934 && (!flag_exceptions
5935 || !cfun->can_throw_non_call_exceptions
5936 || !operation_could_trap_p (code,
5937 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
5938 false, NULL_TREE)))
5939 gimple_cond_set_condition (cond_stmt, code, ops[0], ops[1]);
5940 else if (code == SSA_NAME)
5941 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
5942 build_zero_cst (TREE_TYPE (ops[0])));
5943 else if (code == INTEGER_CST)
5945 if (integer_zerop (ops[0]))
5946 gimple_cond_make_false (cond_stmt);
5947 else
5948 gimple_cond_make_true (cond_stmt);
5950 else if (!inplace)
5952 tree res = maybe_push_res_to_seq (res_op, seq);
5953 if (!res)
5954 return false;
5955 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
5956 build_zero_cst (TREE_TYPE (res)));
5958 else
5959 return false;
5960 if (dump_file && (dump_flags & TDF_DETAILS))
5962 fprintf (dump_file, "gimple_simplified to ");
5963 if (!gimple_seq_empty_p (*seq))
5964 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5965 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5966 0, TDF_SLIM);
5968 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5969 return true;
5971 else if (is_gimple_assign (stmt)
5972 && res_op->code.is_tree_code ())
5974 auto code = tree_code (res_op->code);
5975 if (!inplace
5976 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (code))
5978 maybe_build_generic_op (res_op);
5979 gimple_assign_set_rhs_with_ops (gsi, code,
5980 res_op->op_or_null (0),
5981 res_op->op_or_null (1),
5982 res_op->op_or_null (2));
5983 if (dump_file && (dump_flags & TDF_DETAILS))
5985 fprintf (dump_file, "gimple_simplified to ");
5986 if (!gimple_seq_empty_p (*seq))
5987 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5988 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5989 0, TDF_SLIM);
5991 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5992 return true;
5995 else if (res_op->code.is_fn_code ()
5996 && gimple_call_combined_fn (stmt) == combined_fn (res_op->code))
5998 gcc_assert (num_ops == gimple_call_num_args (stmt));
5999 for (unsigned int i = 0; i < num_ops; ++i)
6000 gimple_call_set_arg (stmt, i, ops[i]);
6001 if (dump_file && (dump_flags & TDF_DETAILS))
6003 fprintf (dump_file, "gimple_simplified to ");
6004 if (!gimple_seq_empty_p (*seq))
6005 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
6006 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
6008 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
6009 return true;
6011 else if (!inplace)
6013 if (gimple_has_lhs (stmt))
6015 tree lhs = gimple_get_lhs (stmt);
6016 if (!maybe_push_res_to_seq (res_op, seq, lhs))
6017 return false;
6018 if (dump_file && (dump_flags & TDF_DETAILS))
6020 fprintf (dump_file, "gimple_simplified to ");
6021 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
6023 gsi_replace_with_seq_vops (gsi, *seq);
6024 return true;
6026 else
6027 gcc_unreachable ();
6030 return false;
6033 /* Canonicalize MEM_REFs invariant address operand after propagation. */
6035 static bool
6036 maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
6038 bool res = false;
6039 tree *orig_t = t;
6041 if (TREE_CODE (*t) == ADDR_EXPR)
6042 t = &TREE_OPERAND (*t, 0);
6044 /* The C and C++ frontends use an ARRAY_REF for indexing with their
6045 generic vector extension. The actual vector referenced is
6046 view-converted to an array type for this purpose. If the index
6047 is constant the canonical representation in the middle-end is a
6048 BIT_FIELD_REF so re-write the former to the latter here. */
6049 if (TREE_CODE (*t) == ARRAY_REF
6050 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
6051 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
6052 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
6054 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
6055 if (VECTOR_TYPE_P (vtype))
6057 tree low = array_ref_low_bound (*t);
6058 if (TREE_CODE (low) == INTEGER_CST)
6060 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
6062 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
6063 wi::to_widest (low));
6064 idx = wi::mul (idx, wi::to_widest
6065 (TYPE_SIZE (TREE_TYPE (*t))));
6066 widest_int ext
6067 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
6068 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
6070 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
6071 TREE_TYPE (*t),
6072 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
6073 TYPE_SIZE (TREE_TYPE (*t)),
6074 wide_int_to_tree (bitsizetype, idx));
6075 res = true;
6082 while (handled_component_p (*t))
6083 t = &TREE_OPERAND (*t, 0);
6085 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
6086 of invariant addresses into a SSA name MEM_REF address. */
6087 if (TREE_CODE (*t) == MEM_REF
6088 || TREE_CODE (*t) == TARGET_MEM_REF)
6090 tree addr = TREE_OPERAND (*t, 0);
6091 if (TREE_CODE (addr) == ADDR_EXPR
6092 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
6093 || handled_component_p (TREE_OPERAND (addr, 0))))
6095 tree base;
6096 poly_int64 coffset;
6097 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
6098 &coffset);
6099 if (!base)
6101 if (is_debug)
6102 return false;
6103 gcc_unreachable ();
6106 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
6107 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
6108 TREE_OPERAND (*t, 1),
6109 size_int (coffset));
6110 res = true;
6112 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
6113 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
6116 /* Canonicalize back MEM_REFs to plain reference trees if the object
6117 accessed is a decl that has the same access semantics as the MEM_REF. */
6118 if (TREE_CODE (*t) == MEM_REF
6119 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
6120 && integer_zerop (TREE_OPERAND (*t, 1))
6121 && MR_DEPENDENCE_CLIQUE (*t) == 0)
6123 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6124 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
6125 if (/* Same volatile qualification. */
6126 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
6127 /* Same TBAA behavior with -fstrict-aliasing. */
6128 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
6129 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
6130 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
6131 /* Same alignment. */
6132 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
6133 /* We have to look out here to not drop a required conversion
6134 from the rhs to the lhs if *t appears on the lhs or vice-versa
6135 if it appears on the rhs. Thus require strict type
6136 compatibility. */
6137 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
6139 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6140 res = true;
6144 else if (TREE_CODE (*orig_t) == ADDR_EXPR
6145 && TREE_CODE (*t) == MEM_REF
6146 && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
6148 tree base;
6149 poly_int64 coffset;
6150 base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
6151 &coffset);
6152 if (base)
6154 gcc_assert (TREE_CODE (base) == MEM_REF);
6155 poly_int64 moffset;
6156 if (mem_ref_offset (base).to_shwi (&moffset))
6158 coffset += moffset;
6159 if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
6161 coffset += moffset;
6162 *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
6163 return true;
6169 /* Canonicalize TARGET_MEM_REF in particular with respect to
6170 the indexes becoming constant. */
6171 else if (TREE_CODE (*t) == TARGET_MEM_REF)
6173 tree tem = maybe_fold_tmr (*t);
6174 if (tem)
6176 *t = tem;
6177 if (TREE_CODE (*orig_t) == ADDR_EXPR)
6178 recompute_tree_invariant_for_addr_expr (*orig_t);
6179 res = true;
6183 return res;
6186 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6187 distinguishes both cases. */
6189 static bool
6190 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
6192 bool changed = false;
6193 gimple *stmt = gsi_stmt (*gsi);
6194 bool nowarning = warning_suppressed_p (stmt, OPT_Wstrict_overflow);
6195 unsigned i;
6196 fold_defer_overflow_warnings ();
6198 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6199 after propagation.
6200 ??? This shouldn't be done in generic folding but in the
6201 propagation helpers which also know whether an address was
6202 propagated.
6203 Also canonicalize operand order. */
6204 switch (gimple_code (stmt))
6206 case GIMPLE_ASSIGN:
6207 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
6209 tree *rhs = gimple_assign_rhs1_ptr (stmt);
6210 if ((REFERENCE_CLASS_P (*rhs)
6211 || TREE_CODE (*rhs) == ADDR_EXPR)
6212 && maybe_canonicalize_mem_ref_addr (rhs))
6213 changed = true;
6214 tree *lhs = gimple_assign_lhs_ptr (stmt);
6215 if (REFERENCE_CLASS_P (*lhs)
6216 && maybe_canonicalize_mem_ref_addr (lhs))
6217 changed = true;
6218 /* Canonicalize &MEM[ssa_n, CST] to ssa_n p+ CST.
6219 This cannot be done in maybe_canonicalize_mem_ref_addr
6220 as the gimple now has two operands rather than one.
6221 The same reason why this can't be done in
6222 maybe_canonicalize_mem_ref_addr is the same reason why
6223 this can't be done inplace. */
6224 if (!inplace && TREE_CODE (*rhs) == ADDR_EXPR)
6226 tree inner = TREE_OPERAND (*rhs, 0);
6227 if (TREE_CODE (inner) == MEM_REF
6228 && TREE_CODE (TREE_OPERAND (inner, 0)) == SSA_NAME
6229 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6231 tree ptr = TREE_OPERAND (inner, 0);
6232 tree addon = TREE_OPERAND (inner, 1);
6233 addon = fold_convert (sizetype, addon);
6234 gimple_assign_set_rhs_with_ops (gsi, POINTER_PLUS_EXPR,
6235 ptr, addon);
6236 changed = true;
6237 stmt = gsi_stmt (*gsi);
6241 else
6243 /* Canonicalize operand order. */
6244 enum tree_code code = gimple_assign_rhs_code (stmt);
6245 if (TREE_CODE_CLASS (code) == tcc_comparison
6246 || commutative_tree_code (code)
6247 || commutative_ternary_tree_code (code))
6249 tree rhs1 = gimple_assign_rhs1 (stmt);
6250 tree rhs2 = gimple_assign_rhs2 (stmt);
6251 if (tree_swap_operands_p (rhs1, rhs2))
6253 gimple_assign_set_rhs1 (stmt, rhs2);
6254 gimple_assign_set_rhs2 (stmt, rhs1);
6255 if (TREE_CODE_CLASS (code) == tcc_comparison)
6256 gimple_assign_set_rhs_code (stmt,
6257 swap_tree_comparison (code));
6258 changed = true;
6262 break;
6263 case GIMPLE_CALL:
6265 gcall *call = as_a<gcall *> (stmt);
6266 for (i = 0; i < gimple_call_num_args (call); ++i)
6268 tree *arg = gimple_call_arg_ptr (call, i);
6269 if (REFERENCE_CLASS_P (*arg)
6270 && maybe_canonicalize_mem_ref_addr (arg))
6271 changed = true;
6273 tree *lhs = gimple_call_lhs_ptr (call);
6274 if (*lhs
6275 && REFERENCE_CLASS_P (*lhs)
6276 && maybe_canonicalize_mem_ref_addr (lhs))
6277 changed = true;
6278 if (*lhs)
6280 combined_fn cfn = gimple_call_combined_fn (call);
6281 internal_fn ifn = associated_internal_fn (cfn, TREE_TYPE (*lhs));
6282 int opno = first_commutative_argument (ifn);
6283 if (opno >= 0)
6285 tree arg1 = gimple_call_arg (call, opno);
6286 tree arg2 = gimple_call_arg (call, opno + 1);
6287 if (tree_swap_operands_p (arg1, arg2))
6289 gimple_call_set_arg (call, opno, arg2);
6290 gimple_call_set_arg (call, opno + 1, arg1);
6291 changed = true;
6295 break;
6297 case GIMPLE_ASM:
6299 gasm *asm_stmt = as_a <gasm *> (stmt);
6300 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
6302 tree link = gimple_asm_output_op (asm_stmt, i);
6303 tree op = TREE_VALUE (link);
6304 if (REFERENCE_CLASS_P (op)
6305 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6306 changed = true;
6308 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
6310 tree link = gimple_asm_input_op (asm_stmt, i);
6311 tree op = TREE_VALUE (link);
6312 if ((REFERENCE_CLASS_P (op)
6313 || TREE_CODE (op) == ADDR_EXPR)
6314 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6315 changed = true;
6318 break;
6319 case GIMPLE_DEBUG:
6320 if (gimple_debug_bind_p (stmt))
6322 tree *val = gimple_debug_bind_get_value_ptr (stmt);
6323 if (*val
6324 && (REFERENCE_CLASS_P (*val)
6325 || TREE_CODE (*val) == ADDR_EXPR)
6326 && maybe_canonicalize_mem_ref_addr (val, true))
6327 changed = true;
6329 break;
6330 case GIMPLE_COND:
6332 /* Canonicalize operand order. */
6333 tree lhs = gimple_cond_lhs (stmt);
6334 tree rhs = gimple_cond_rhs (stmt);
6335 if (tree_swap_operands_p (lhs, rhs))
6337 gcond *gc = as_a <gcond *> (stmt);
6338 gimple_cond_set_lhs (gc, rhs);
6339 gimple_cond_set_rhs (gc, lhs);
6340 gimple_cond_set_code (gc,
6341 swap_tree_comparison (gimple_cond_code (gc)));
6342 changed = true;
6345 default:;
6348 /* Dispatch to pattern-based folding. */
6349 if (!inplace
6350 || is_gimple_assign (stmt)
6351 || gimple_code (stmt) == GIMPLE_COND)
6353 gimple_seq seq = NULL;
6354 gimple_match_op res_op;
6355 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
6356 valueize, valueize))
6358 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
6359 changed = true;
6360 else
6361 gimple_seq_discard (seq);
6365 stmt = gsi_stmt (*gsi);
6367 /* Fold the main computation performed by the statement. */
6368 switch (gimple_code (stmt))
6370 case GIMPLE_ASSIGN:
6372 /* Try to canonicalize for boolean-typed X the comparisons
6373 X == 0, X == 1, X != 0, and X != 1. */
6374 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
6375 || gimple_assign_rhs_code (stmt) == NE_EXPR)
6377 tree lhs = gimple_assign_lhs (stmt);
6378 tree op1 = gimple_assign_rhs1 (stmt);
6379 tree op2 = gimple_assign_rhs2 (stmt);
6380 tree type = TREE_TYPE (op1);
6382 /* Check whether the comparison operands are of the same boolean
6383 type as the result type is.
6384 Check that second operand is an integer-constant with value
6385 one or zero. */
6386 if (TREE_CODE (op2) == INTEGER_CST
6387 && (integer_zerop (op2) || integer_onep (op2))
6388 && useless_type_conversion_p (TREE_TYPE (lhs), type))
6390 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
6391 bool is_logical_not = false;
6393 /* X == 0 and X != 1 is a logical-not.of X
6394 X == 1 and X != 0 is X */
6395 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
6396 || (cmp_code == NE_EXPR && integer_onep (op2)))
6397 is_logical_not = true;
6399 if (is_logical_not == false)
6400 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
6401 /* Only for one-bit precision typed X the transformation
6402 !X -> ~X is valied. */
6403 else if (TYPE_PRECISION (type) == 1)
6404 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
6405 /* Otherwise we use !X -> X ^ 1. */
6406 else
6407 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
6408 build_int_cst (type, 1));
6409 changed = true;
6410 break;
6414 unsigned old_num_ops = gimple_num_ops (stmt);
6415 tree lhs = gimple_assign_lhs (stmt);
6416 tree new_rhs = fold_gimple_assign (gsi);
6417 if (new_rhs
6418 && !useless_type_conversion_p (TREE_TYPE (lhs),
6419 TREE_TYPE (new_rhs)))
6420 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6421 if (new_rhs
6422 && (!inplace
6423 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6425 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6426 changed = true;
6428 break;
6431 case GIMPLE_CALL:
6432 changed |= gimple_fold_call (gsi, inplace);
6433 break;
6435 case GIMPLE_DEBUG:
6436 if (gimple_debug_bind_p (stmt))
6438 tree val = gimple_debug_bind_get_value (stmt);
6439 if (val && REFERENCE_CLASS_P (val))
6441 tree tem = maybe_fold_reference (val);
6442 if (tem)
6444 gimple_debug_bind_set_value (stmt, tem);
6445 changed = true;
6449 break;
6451 case GIMPLE_RETURN:
6453 greturn *ret_stmt = as_a<greturn *> (stmt);
6454 tree ret = gimple_return_retval(ret_stmt);
6456 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6458 tree val = valueize (ret);
6459 if (val && val != ret
6460 && may_propagate_copy (ret, val))
6462 gimple_return_set_retval (ret_stmt, val);
6463 changed = true;
6467 break;
6469 default:;
6472 stmt = gsi_stmt (*gsi);
6474 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
6475 return changed;
6478 /* Valueziation callback that ends up not following SSA edges. */
6480 tree
6481 no_follow_ssa_edges (tree)
6483 return NULL_TREE;
6486 /* Valueization callback that ends up following single-use SSA edges only. */
6488 tree
6489 follow_single_use_edges (tree val)
6491 if (TREE_CODE (val) == SSA_NAME
6492 && !has_single_use (val))
6493 return NULL_TREE;
6494 return val;
6497 /* Valueization callback that follows all SSA edges. */
6499 tree
6500 follow_all_ssa_edges (tree val)
6502 return val;
6505 /* Fold the statement pointed to by GSI. In some cases, this function may
6506 replace the whole statement with a new one. Returns true iff folding
6507 makes any changes.
6508 The statement pointed to by GSI should be in valid gimple form but may
6509 be in unfolded state as resulting from for example constant propagation
6510 which can produce *&x = 0. */
6512 bool
6513 fold_stmt (gimple_stmt_iterator *gsi)
6515 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
6518 bool
6519 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
6521 return fold_stmt_1 (gsi, false, valueize);
6524 /* Perform the minimal folding on statement *GSI. Only operations like
6525 *&x created by constant propagation are handled. The statement cannot
6526 be replaced with a new one. Return true if the statement was
6527 changed, false otherwise.
6528 The statement *GSI should be in valid gimple form but may
6529 be in unfolded state as resulting from for example constant propagation
6530 which can produce *&x = 0. */
6532 bool
6533 fold_stmt_inplace (gimple_stmt_iterator *gsi)
6535 gimple *stmt = gsi_stmt (*gsi);
6536 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
6537 gcc_assert (gsi_stmt (*gsi) == stmt);
6538 return changed;
6541 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6542 if EXPR is null or we don't know how.
6543 If non-null, the result always has boolean type. */
6545 static tree
6546 canonicalize_bool (tree expr, bool invert)
6548 if (!expr)
6549 return NULL_TREE;
6550 else if (invert)
6552 if (integer_nonzerop (expr))
6553 return boolean_false_node;
6554 else if (integer_zerop (expr))
6555 return boolean_true_node;
6556 else if (TREE_CODE (expr) == SSA_NAME)
6557 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
6558 build_int_cst (TREE_TYPE (expr), 0));
6559 else if (COMPARISON_CLASS_P (expr))
6560 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
6561 boolean_type_node,
6562 TREE_OPERAND (expr, 0),
6563 TREE_OPERAND (expr, 1));
6564 else
6565 return NULL_TREE;
6567 else
6569 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6570 return expr;
6571 if (integer_nonzerop (expr))
6572 return boolean_true_node;
6573 else if (integer_zerop (expr))
6574 return boolean_false_node;
6575 else if (TREE_CODE (expr) == SSA_NAME)
6576 return fold_build2 (NE_EXPR, boolean_type_node, expr,
6577 build_int_cst (TREE_TYPE (expr), 0));
6578 else if (COMPARISON_CLASS_P (expr))
6579 return fold_build2 (TREE_CODE (expr),
6580 boolean_type_node,
6581 TREE_OPERAND (expr, 0),
6582 TREE_OPERAND (expr, 1));
6583 else
6584 return NULL_TREE;
6588 /* Check to see if a boolean expression EXPR is logically equivalent to the
6589 comparison (OP1 CODE OP2). Check for various identities involving
6590 SSA_NAMEs. */
6592 static bool
6593 same_bool_comparison_p (const_tree expr, enum tree_code code,
6594 const_tree op1, const_tree op2)
6596 gimple *s;
6598 /* The obvious case. */
6599 if (TREE_CODE (expr) == code
6600 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
6601 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
6602 return true;
6604 /* Check for comparing (name, name != 0) and the case where expr
6605 is an SSA_NAME with a definition matching the comparison. */
6606 if (TREE_CODE (expr) == SSA_NAME
6607 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6609 if (operand_equal_p (expr, op1, 0))
6610 return ((code == NE_EXPR && integer_zerop (op2))
6611 || (code == EQ_EXPR && integer_nonzerop (op2)));
6612 s = SSA_NAME_DEF_STMT (expr);
6613 if (is_gimple_assign (s)
6614 && gimple_assign_rhs_code (s) == code
6615 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
6616 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
6617 return true;
6620 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6621 of name is a comparison, recurse. */
6622 if (TREE_CODE (op1) == SSA_NAME
6623 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
6625 s = SSA_NAME_DEF_STMT (op1);
6626 if (is_gimple_assign (s)
6627 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
6629 enum tree_code c = gimple_assign_rhs_code (s);
6630 if ((c == NE_EXPR && integer_zerop (op2))
6631 || (c == EQ_EXPR && integer_nonzerop (op2)))
6632 return same_bool_comparison_p (expr, c,
6633 gimple_assign_rhs1 (s),
6634 gimple_assign_rhs2 (s));
6635 if ((c == EQ_EXPR && integer_zerop (op2))
6636 || (c == NE_EXPR && integer_nonzerop (op2)))
6637 return same_bool_comparison_p (expr,
6638 invert_tree_comparison (c, false),
6639 gimple_assign_rhs1 (s),
6640 gimple_assign_rhs2 (s));
6643 return false;
6646 /* Check to see if two boolean expressions OP1 and OP2 are logically
6647 equivalent. */
6649 static bool
6650 same_bool_result_p (const_tree op1, const_tree op2)
6652 /* Simple cases first. */
6653 if (operand_equal_p (op1, op2, 0))
6654 return true;
6656 /* Check the cases where at least one of the operands is a comparison.
6657 These are a bit smarter than operand_equal_p in that they apply some
6658 identifies on SSA_NAMEs. */
6659 if (COMPARISON_CLASS_P (op2)
6660 && same_bool_comparison_p (op1, TREE_CODE (op2),
6661 TREE_OPERAND (op2, 0),
6662 TREE_OPERAND (op2, 1)))
6663 return true;
6664 if (COMPARISON_CLASS_P (op1)
6665 && same_bool_comparison_p (op2, TREE_CODE (op1),
6666 TREE_OPERAND (op1, 0),
6667 TREE_OPERAND (op1, 1)))
6668 return true;
6670 /* Default case. */
6671 return false;
6674 /* Forward declarations for some mutually recursive functions. */
6676 static tree
6677 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6678 enum tree_code code2, tree op2a, tree op2b, basic_block);
6679 static tree
6680 and_var_with_comparison (tree type, tree var, bool invert,
6681 enum tree_code code2, tree op2a, tree op2b,
6682 basic_block);
6683 static tree
6684 and_var_with_comparison_1 (tree type, gimple *stmt,
6685 enum tree_code code2, tree op2a, tree op2b,
6686 basic_block);
6687 static tree
6688 or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
6689 enum tree_code code2, tree op2a, tree op2b,
6690 basic_block);
6691 static tree
6692 or_var_with_comparison (tree, tree var, bool invert,
6693 enum tree_code code2, tree op2a, tree op2b,
6694 basic_block);
6695 static tree
6696 or_var_with_comparison_1 (tree, gimple *stmt,
6697 enum tree_code code2, tree op2a, tree op2b,
6698 basic_block);
6700 /* Helper function for and_comparisons_1: try to simplify the AND of the
6701 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6702 If INVERT is true, invert the value of the VAR before doing the AND.
6703 Return NULL_EXPR if we can't simplify this to a single expression. */
6705 static tree
6706 and_var_with_comparison (tree type, tree var, bool invert,
6707 enum tree_code code2, tree op2a, tree op2b,
6708 basic_block outer_cond_bb)
6710 tree t;
6711 gimple *stmt = SSA_NAME_DEF_STMT (var);
6713 /* We can only deal with variables whose definitions are assignments. */
6714 if (!is_gimple_assign (stmt))
6715 return NULL_TREE;
6717 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6718 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6719 Then we only have to consider the simpler non-inverted cases. */
6720 if (invert)
6721 t = or_var_with_comparison_1 (type, stmt,
6722 invert_tree_comparison (code2, false),
6723 op2a, op2b, outer_cond_bb);
6724 else
6725 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b,
6726 outer_cond_bb);
6727 return canonicalize_bool (t, invert);
6730 /* Try to simplify the AND of the ssa variable defined by the assignment
6731 STMT with the comparison specified by (OP2A CODE2 OP2B).
6732 Return NULL_EXPR if we can't simplify this to a single expression. */
6734 static tree
6735 and_var_with_comparison_1 (tree type, gimple *stmt,
6736 enum tree_code code2, tree op2a, tree op2b,
6737 basic_block outer_cond_bb)
6739 tree var = gimple_assign_lhs (stmt);
6740 tree true_test_var = NULL_TREE;
6741 tree false_test_var = NULL_TREE;
6742 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6744 /* Check for identities like (var AND (var == 0)) => false. */
6745 if (TREE_CODE (op2a) == SSA_NAME
6746 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6748 if ((code2 == NE_EXPR && integer_zerop (op2b))
6749 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6751 true_test_var = op2a;
6752 if (var == true_test_var)
6753 return var;
6755 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6756 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6758 false_test_var = op2a;
6759 if (var == false_test_var)
6760 return boolean_false_node;
6764 /* If the definition is a comparison, recurse on it. */
6765 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6767 tree t = and_comparisons_1 (type, innercode,
6768 gimple_assign_rhs1 (stmt),
6769 gimple_assign_rhs2 (stmt),
6770 code2,
6771 op2a,
6772 op2b, outer_cond_bb);
6773 if (t)
6774 return t;
6777 /* If the definition is an AND or OR expression, we may be able to
6778 simplify by reassociating. */
6779 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6780 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
6782 tree inner1 = gimple_assign_rhs1 (stmt);
6783 tree inner2 = gimple_assign_rhs2 (stmt);
6784 gimple *s;
6785 tree t;
6786 tree partial = NULL_TREE;
6787 bool is_and = (innercode == BIT_AND_EXPR);
6789 /* Check for boolean identities that don't require recursive examination
6790 of inner1/inner2:
6791 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6792 inner1 AND (inner1 OR inner2) => inner1
6793 !inner1 AND (inner1 AND inner2) => false
6794 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6795 Likewise for similar cases involving inner2. */
6796 if (inner1 == true_test_var)
6797 return (is_and ? var : inner1);
6798 else if (inner2 == true_test_var)
6799 return (is_and ? var : inner2);
6800 else if (inner1 == false_test_var)
6801 return (is_and
6802 ? boolean_false_node
6803 : and_var_with_comparison (type, inner2, false, code2, op2a,
6804 op2b, outer_cond_bb));
6805 else if (inner2 == false_test_var)
6806 return (is_and
6807 ? boolean_false_node
6808 : and_var_with_comparison (type, inner1, false, code2, op2a,
6809 op2b, outer_cond_bb));
6811 /* Next, redistribute/reassociate the AND across the inner tests.
6812 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6813 if (TREE_CODE (inner1) == SSA_NAME
6814 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6815 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6816 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6817 gimple_assign_rhs1 (s),
6818 gimple_assign_rhs2 (s),
6819 code2, op2a, op2b,
6820 outer_cond_bb)))
6822 /* Handle the AND case, where we are reassociating:
6823 (inner1 AND inner2) AND (op2a code2 op2b)
6824 => (t AND inner2)
6825 If the partial result t is a constant, we win. Otherwise
6826 continue on to try reassociating with the other inner test. */
6827 if (is_and)
6829 if (integer_onep (t))
6830 return inner2;
6831 else if (integer_zerop (t))
6832 return boolean_false_node;
6835 /* Handle the OR case, where we are redistributing:
6836 (inner1 OR inner2) AND (op2a code2 op2b)
6837 => (t OR (inner2 AND (op2a code2 op2b))) */
6838 else if (integer_onep (t))
6839 return boolean_true_node;
6841 /* Save partial result for later. */
6842 partial = t;
6845 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6846 if (TREE_CODE (inner2) == SSA_NAME
6847 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6848 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6849 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6850 gimple_assign_rhs1 (s),
6851 gimple_assign_rhs2 (s),
6852 code2, op2a, op2b,
6853 outer_cond_bb)))
6855 /* Handle the AND case, where we are reassociating:
6856 (inner1 AND inner2) AND (op2a code2 op2b)
6857 => (inner1 AND t) */
6858 if (is_and)
6860 if (integer_onep (t))
6861 return inner1;
6862 else if (integer_zerop (t))
6863 return boolean_false_node;
6864 /* If both are the same, we can apply the identity
6865 (x AND x) == x. */
6866 else if (partial && same_bool_result_p (t, partial))
6867 return t;
6870 /* Handle the OR case. where we are redistributing:
6871 (inner1 OR inner2) AND (op2a code2 op2b)
6872 => (t OR (inner1 AND (op2a code2 op2b)))
6873 => (t OR partial) */
6874 else
6876 if (integer_onep (t))
6877 return boolean_true_node;
6878 else if (partial)
6880 /* We already got a simplification for the other
6881 operand to the redistributed OR expression. The
6882 interesting case is when at least one is false.
6883 Or, if both are the same, we can apply the identity
6884 (x OR x) == x. */
6885 if (integer_zerop (partial))
6886 return t;
6887 else if (integer_zerop (t))
6888 return partial;
6889 else if (same_bool_result_p (t, partial))
6890 return t;
6895 return NULL_TREE;
6898 /* Try to simplify the AND of two comparisons defined by
6899 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6900 If this can be done without constructing an intermediate value,
6901 return the resulting tree; otherwise NULL_TREE is returned.
6902 This function is deliberately asymmetric as it recurses on SSA_DEFs
6903 in the first comparison but not the second. */
6905 static tree
6906 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6907 enum tree_code code2, tree op2a, tree op2b,
6908 basic_block outer_cond_bb)
6910 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6912 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6913 if (operand_equal_p (op1a, op2a, 0)
6914 && operand_equal_p (op1b, op2b, 0))
6916 /* Result will be either NULL_TREE, or a combined comparison. */
6917 tree t = combine_comparisons (UNKNOWN_LOCATION,
6918 TRUTH_ANDIF_EXPR, code1, code2,
6919 truth_type, op1a, op1b);
6920 if (t)
6921 return t;
6924 /* Likewise the swapped case of the above. */
6925 if (operand_equal_p (op1a, op2b, 0)
6926 && operand_equal_p (op1b, op2a, 0))
6928 /* Result will be either NULL_TREE, or a combined comparison. */
6929 tree t = combine_comparisons (UNKNOWN_LOCATION,
6930 TRUTH_ANDIF_EXPR, code1,
6931 swap_tree_comparison (code2),
6932 truth_type, op1a, op1b);
6933 if (t)
6934 return t;
6937 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6938 NAME's definition is a truth value. See if there are any simplifications
6939 that can be done against the NAME's definition. */
6940 if (TREE_CODE (op1a) == SSA_NAME
6941 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6942 && (integer_zerop (op1b) || integer_onep (op1b)))
6944 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6945 || (code1 == NE_EXPR && integer_onep (op1b)));
6946 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6947 switch (gimple_code (stmt))
6949 case GIMPLE_ASSIGN:
6950 /* Try to simplify by copy-propagating the definition. */
6951 return and_var_with_comparison (type, op1a, invert, code2, op2a,
6952 op2b, outer_cond_bb);
6954 case GIMPLE_PHI:
6955 /* If every argument to the PHI produces the same result when
6956 ANDed with the second comparison, we win.
6957 Do not do this unless the type is bool since we need a bool
6958 result here anyway. */
6959 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6961 tree result = NULL_TREE;
6962 unsigned i;
6963 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6965 tree arg = gimple_phi_arg_def (stmt, i);
6967 /* If this PHI has itself as an argument, ignore it.
6968 If all the other args produce the same result,
6969 we're still OK. */
6970 if (arg == gimple_phi_result (stmt))
6971 continue;
6972 else if (TREE_CODE (arg) == INTEGER_CST)
6974 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
6976 if (!result)
6977 result = boolean_false_node;
6978 else if (!integer_zerop (result))
6979 return NULL_TREE;
6981 else if (!result)
6982 result = fold_build2 (code2, boolean_type_node,
6983 op2a, op2b);
6984 else if (!same_bool_comparison_p (result,
6985 code2, op2a, op2b))
6986 return NULL_TREE;
6988 else if (TREE_CODE (arg) == SSA_NAME
6989 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6991 tree temp;
6992 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6993 /* In simple cases we can look through PHI nodes,
6994 but we have to be careful with loops.
6995 See PR49073. */
6996 if (! dom_info_available_p (CDI_DOMINATORS)
6997 || gimple_bb (def_stmt) == gimple_bb (stmt)
6998 || dominated_by_p (CDI_DOMINATORS,
6999 gimple_bb (def_stmt),
7000 gimple_bb (stmt)))
7001 return NULL_TREE;
7002 temp = and_var_with_comparison (type, arg, invert, code2,
7003 op2a, op2b,
7004 outer_cond_bb);
7005 if (!temp)
7006 return NULL_TREE;
7007 else if (!result)
7008 result = temp;
7009 else if (!same_bool_result_p (result, temp))
7010 return NULL_TREE;
7012 else
7013 return NULL_TREE;
7015 return result;
7018 default:
7019 break;
7022 return NULL_TREE;
7025 static basic_block fosa_bb;
7026 static vec<std::pair<tree, flow_sensitive_info_storage> > *fosa_unwind;
7027 static tree
7028 follow_outer_ssa_edges (tree val)
7030 if (TREE_CODE (val) == SSA_NAME
7031 && !SSA_NAME_IS_DEFAULT_DEF (val))
7033 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (val));
7034 if (!def_bb
7035 || def_bb == fosa_bb
7036 || (dom_info_available_p (CDI_DOMINATORS)
7037 && (def_bb == fosa_bb
7038 || dominated_by_p (CDI_DOMINATORS, fosa_bb, def_bb))))
7039 return val;
7040 /* We cannot temporarily rewrite stmts with undefined overflow
7041 behavior, so avoid expanding them. */
7042 if ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (val))
7043 || POINTER_TYPE_P (TREE_TYPE (val)))
7044 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (val)))
7045 return NULL_TREE;
7046 flow_sensitive_info_storage storage;
7047 storage.save_and_clear (val);
7048 /* If the definition does not dominate fosa_bb temporarily reset
7049 flow-sensitive info. */
7050 fosa_unwind->safe_push (std::make_pair (val, storage));
7051 return val;
7053 return val;
7056 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
7057 : try to simplify the AND/OR of the ssa variable VAR with the comparison
7058 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
7059 simplify this to a single expression. As we are going to lower the cost
7060 of building SSA names / gimple stmts significantly, we need to allocate
7061 them ont the stack. This will cause the code to be a bit ugly. */
7063 static tree
7064 maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
7065 enum tree_code code1,
7066 tree op1a, tree op1b,
7067 enum tree_code code2, tree op2a,
7068 tree op2b,
7069 basic_block outer_cond_bb)
7071 /* Allocate gimple stmt1 on the stack. */
7072 gassign *stmt1
7073 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
7074 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
7075 gimple_assign_set_rhs_code (stmt1, code1);
7076 gimple_assign_set_rhs1 (stmt1, op1a);
7077 gimple_assign_set_rhs2 (stmt1, op1b);
7078 gimple_set_bb (stmt1, NULL);
7080 /* Allocate gimple stmt2 on the stack. */
7081 gassign *stmt2
7082 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
7083 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
7084 gimple_assign_set_rhs_code (stmt2, code2);
7085 gimple_assign_set_rhs1 (stmt2, op2a);
7086 gimple_assign_set_rhs2 (stmt2, op2b);
7087 gimple_set_bb (stmt2, NULL);
7089 /* Allocate SSA names(lhs1) on the stack. */
7090 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
7091 memset (lhs1, 0, sizeof (tree_ssa_name));
7092 TREE_SET_CODE (lhs1, SSA_NAME);
7093 TREE_TYPE (lhs1) = type;
7094 init_ssa_name_imm_use (lhs1);
7096 /* Allocate SSA names(lhs2) on the stack. */
7097 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
7098 memset (lhs2, 0, sizeof (tree_ssa_name));
7099 TREE_SET_CODE (lhs2, SSA_NAME);
7100 TREE_TYPE (lhs2) = type;
7101 init_ssa_name_imm_use (lhs2);
7103 gimple_assign_set_lhs (stmt1, lhs1);
7104 gimple_assign_set_lhs (stmt2, lhs2);
7106 gimple_match_op op (gimple_match_cond::UNCOND, code,
7107 type, gimple_assign_lhs (stmt1),
7108 gimple_assign_lhs (stmt2));
7109 fosa_bb = outer_cond_bb;
7110 auto_vec<std::pair<tree, flow_sensitive_info_storage>, 8> unwind_stack;
7111 fosa_unwind = &unwind_stack;
7112 if (op.resimplify (NULL, (!outer_cond_bb
7113 ? follow_all_ssa_edges : follow_outer_ssa_edges)))
7115 fosa_unwind = NULL;
7116 for (auto p : unwind_stack)
7117 p.second.restore (p.first);
7118 if (gimple_simplified_result_is_gimple_val (&op))
7120 tree res = op.ops[0];
7121 if (res == lhs1)
7122 return build2 (code1, type, op1a, op1b);
7123 else if (res == lhs2)
7124 return build2 (code2, type, op2a, op2b);
7125 else
7126 return res;
7128 else if (op.code.is_tree_code ()
7129 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
7131 tree op0 = op.ops[0];
7132 tree op1 = op.ops[1];
7133 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
7134 return NULL_TREE; /* not simple */
7136 return build2 ((enum tree_code)op.code, op.type, op0, op1);
7139 fosa_unwind = NULL;
7140 for (auto p : unwind_stack)
7141 p.second.restore (p.first);
7143 return NULL_TREE;
7146 /* Try to simplify the AND of two comparisons, specified by
7147 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7148 If this can be simplified to a single expression (without requiring
7149 introducing more SSA variables to hold intermediate values),
7150 return the resulting tree. Otherwise return NULL_TREE.
7151 If the result expression is non-null, it has boolean type. */
7153 tree
7154 maybe_fold_and_comparisons (tree type,
7155 enum tree_code code1, tree op1a, tree op1b,
7156 enum tree_code code2, tree op2a, tree op2b,
7157 basic_block outer_cond_bb)
7159 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b,
7160 outer_cond_bb))
7161 return t;
7163 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b,
7164 outer_cond_bb))
7165 return t;
7167 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
7168 op1a, op1b, code2, op2a,
7169 op2b, outer_cond_bb))
7170 return t;
7172 return NULL_TREE;
7175 /* Helper function for or_comparisons_1: try to simplify the OR of the
7176 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
7177 If INVERT is true, invert the value of VAR before doing the OR.
7178 Return NULL_EXPR if we can't simplify this to a single expression. */
7180 static tree
7181 or_var_with_comparison (tree type, tree var, bool invert,
7182 enum tree_code code2, tree op2a, tree op2b,
7183 basic_block outer_cond_bb)
7185 tree t;
7186 gimple *stmt = SSA_NAME_DEF_STMT (var);
7188 /* We can only deal with variables whose definitions are assignments. */
7189 if (!is_gimple_assign (stmt))
7190 return NULL_TREE;
7192 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
7193 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
7194 Then we only have to consider the simpler non-inverted cases. */
7195 if (invert)
7196 t = and_var_with_comparison_1 (type, stmt,
7197 invert_tree_comparison (code2, false),
7198 op2a, op2b, outer_cond_bb);
7199 else
7200 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b,
7201 outer_cond_bb);
7202 return canonicalize_bool (t, invert);
7205 /* Try to simplify the OR of the ssa variable defined by the assignment
7206 STMT with the comparison specified by (OP2A CODE2 OP2B).
7207 Return NULL_EXPR if we can't simplify this to a single expression. */
7209 static tree
7210 or_var_with_comparison_1 (tree type, gimple *stmt,
7211 enum tree_code code2, tree op2a, tree op2b,
7212 basic_block outer_cond_bb)
7214 tree var = gimple_assign_lhs (stmt);
7215 tree true_test_var = NULL_TREE;
7216 tree false_test_var = NULL_TREE;
7217 enum tree_code innercode = gimple_assign_rhs_code (stmt);
7219 /* Check for identities like (var OR (var != 0)) => true . */
7220 if (TREE_CODE (op2a) == SSA_NAME
7221 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
7223 if ((code2 == NE_EXPR && integer_zerop (op2b))
7224 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
7226 true_test_var = op2a;
7227 if (var == true_test_var)
7228 return var;
7230 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
7231 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
7233 false_test_var = op2a;
7234 if (var == false_test_var)
7235 return boolean_true_node;
7239 /* If the definition is a comparison, recurse on it. */
7240 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
7242 tree t = or_comparisons_1 (type, innercode,
7243 gimple_assign_rhs1 (stmt),
7244 gimple_assign_rhs2 (stmt),
7245 code2, op2a, op2b, outer_cond_bb);
7246 if (t)
7247 return t;
7250 /* If the definition is an AND or OR expression, we may be able to
7251 simplify by reassociating. */
7252 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
7253 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
7255 tree inner1 = gimple_assign_rhs1 (stmt);
7256 tree inner2 = gimple_assign_rhs2 (stmt);
7257 gimple *s;
7258 tree t;
7259 tree partial = NULL_TREE;
7260 bool is_or = (innercode == BIT_IOR_EXPR);
7262 /* Check for boolean identities that don't require recursive examination
7263 of inner1/inner2:
7264 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7265 inner1 OR (inner1 AND inner2) => inner1
7266 !inner1 OR (inner1 OR inner2) => true
7267 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7269 if (inner1 == true_test_var)
7270 return (is_or ? var : inner1);
7271 else if (inner2 == true_test_var)
7272 return (is_or ? var : inner2);
7273 else if (inner1 == false_test_var)
7274 return (is_or
7275 ? boolean_true_node
7276 : or_var_with_comparison (type, inner2, false, code2, op2a,
7277 op2b, outer_cond_bb));
7278 else if (inner2 == false_test_var)
7279 return (is_or
7280 ? boolean_true_node
7281 : or_var_with_comparison (type, inner1, false, code2, op2a,
7282 op2b, outer_cond_bb));
7284 /* Next, redistribute/reassociate the OR across the inner tests.
7285 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7286 if (TREE_CODE (inner1) == SSA_NAME
7287 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
7288 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7289 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7290 gimple_assign_rhs1 (s),
7291 gimple_assign_rhs2 (s),
7292 code2, op2a, op2b,
7293 outer_cond_bb)))
7295 /* Handle the OR case, where we are reassociating:
7296 (inner1 OR inner2) OR (op2a code2 op2b)
7297 => (t OR inner2)
7298 If the partial result t is a constant, we win. Otherwise
7299 continue on to try reassociating with the other inner test. */
7300 if (is_or)
7302 if (integer_onep (t))
7303 return boolean_true_node;
7304 else if (integer_zerop (t))
7305 return inner2;
7308 /* Handle the AND case, where we are redistributing:
7309 (inner1 AND inner2) OR (op2a code2 op2b)
7310 => (t AND (inner2 OR (op2a code op2b))) */
7311 else if (integer_zerop (t))
7312 return boolean_false_node;
7314 /* Save partial result for later. */
7315 partial = t;
7318 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7319 if (TREE_CODE (inner2) == SSA_NAME
7320 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
7321 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7322 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7323 gimple_assign_rhs1 (s),
7324 gimple_assign_rhs2 (s),
7325 code2, op2a, op2b,
7326 outer_cond_bb)))
7328 /* Handle the OR case, where we are reassociating:
7329 (inner1 OR inner2) OR (op2a code2 op2b)
7330 => (inner1 OR t)
7331 => (t OR partial) */
7332 if (is_or)
7334 if (integer_zerop (t))
7335 return inner1;
7336 else if (integer_onep (t))
7337 return boolean_true_node;
7338 /* If both are the same, we can apply the identity
7339 (x OR x) == x. */
7340 else if (partial && same_bool_result_p (t, partial))
7341 return t;
7344 /* Handle the AND case, where we are redistributing:
7345 (inner1 AND inner2) OR (op2a code2 op2b)
7346 => (t AND (inner1 OR (op2a code2 op2b)))
7347 => (t AND partial) */
7348 else
7350 if (integer_zerop (t))
7351 return boolean_false_node;
7352 else if (partial)
7354 /* We already got a simplification for the other
7355 operand to the redistributed AND expression. The
7356 interesting case is when at least one is true.
7357 Or, if both are the same, we can apply the identity
7358 (x AND x) == x. */
7359 if (integer_onep (partial))
7360 return t;
7361 else if (integer_onep (t))
7362 return partial;
7363 else if (same_bool_result_p (t, partial))
7364 return t;
7369 return NULL_TREE;
7372 /* Try to simplify the OR of two comparisons defined by
7373 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7374 If this can be done without constructing an intermediate value,
7375 return the resulting tree; otherwise NULL_TREE is returned.
7376 This function is deliberately asymmetric as it recurses on SSA_DEFs
7377 in the first comparison but not the second. */
7379 static tree
7380 or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
7381 enum tree_code code2, tree op2a, tree op2b,
7382 basic_block outer_cond_bb)
7384 tree truth_type = truth_type_for (TREE_TYPE (op1a));
7386 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7387 if (operand_equal_p (op1a, op2a, 0)
7388 && operand_equal_p (op1b, op2b, 0))
7390 /* Result will be either NULL_TREE, or a combined comparison. */
7391 tree t = combine_comparisons (UNKNOWN_LOCATION,
7392 TRUTH_ORIF_EXPR, code1, code2,
7393 truth_type, op1a, op1b);
7394 if (t)
7395 return t;
7398 /* Likewise the swapped case of the above. */
7399 if (operand_equal_p (op1a, op2b, 0)
7400 && operand_equal_p (op1b, op2a, 0))
7402 /* Result will be either NULL_TREE, or a combined comparison. */
7403 tree t = combine_comparisons (UNKNOWN_LOCATION,
7404 TRUTH_ORIF_EXPR, code1,
7405 swap_tree_comparison (code2),
7406 truth_type, op1a, op1b);
7407 if (t)
7408 return t;
7411 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7412 NAME's definition is a truth value. See if there are any simplifications
7413 that can be done against the NAME's definition. */
7414 if (TREE_CODE (op1a) == SSA_NAME
7415 && (code1 == NE_EXPR || code1 == EQ_EXPR)
7416 && (integer_zerop (op1b) || integer_onep (op1b)))
7418 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7419 || (code1 == NE_EXPR && integer_onep (op1b)));
7420 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
7421 switch (gimple_code (stmt))
7423 case GIMPLE_ASSIGN:
7424 /* Try to simplify by copy-propagating the definition. */
7425 return or_var_with_comparison (type, op1a, invert, code2, op2a,
7426 op2b, outer_cond_bb);
7428 case GIMPLE_PHI:
7429 /* If every argument to the PHI produces the same result when
7430 ORed with the second comparison, we win.
7431 Do not do this unless the type is bool since we need a bool
7432 result here anyway. */
7433 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7435 tree result = NULL_TREE;
7436 unsigned i;
7437 for (i = 0; i < gimple_phi_num_args (stmt); i++)
7439 tree arg = gimple_phi_arg_def (stmt, i);
7441 /* If this PHI has itself as an argument, ignore it.
7442 If all the other args produce the same result,
7443 we're still OK. */
7444 if (arg == gimple_phi_result (stmt))
7445 continue;
7446 else if (TREE_CODE (arg) == INTEGER_CST)
7448 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
7450 if (!result)
7451 result = boolean_true_node;
7452 else if (!integer_onep (result))
7453 return NULL_TREE;
7455 else if (!result)
7456 result = fold_build2 (code2, boolean_type_node,
7457 op2a, op2b);
7458 else if (!same_bool_comparison_p (result,
7459 code2, op2a, op2b))
7460 return NULL_TREE;
7462 else if (TREE_CODE (arg) == SSA_NAME
7463 && !SSA_NAME_IS_DEFAULT_DEF (arg))
7465 tree temp;
7466 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
7467 /* In simple cases we can look through PHI nodes,
7468 but we have to be careful with loops.
7469 See PR49073. */
7470 if (! dom_info_available_p (CDI_DOMINATORS)
7471 || gimple_bb (def_stmt) == gimple_bb (stmt)
7472 || dominated_by_p (CDI_DOMINATORS,
7473 gimple_bb (def_stmt),
7474 gimple_bb (stmt)))
7475 return NULL_TREE;
7476 temp = or_var_with_comparison (type, arg, invert, code2,
7477 op2a, op2b, outer_cond_bb);
7478 if (!temp)
7479 return NULL_TREE;
7480 else if (!result)
7481 result = temp;
7482 else if (!same_bool_result_p (result, temp))
7483 return NULL_TREE;
7485 else
7486 return NULL_TREE;
7488 return result;
7491 default:
7492 break;
7495 return NULL_TREE;
7498 /* Try to simplify the OR of two comparisons, specified by
7499 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7500 If this can be simplified to a single expression (without requiring
7501 introducing more SSA variables to hold intermediate values),
7502 return the resulting tree. Otherwise return NULL_TREE.
7503 If the result expression is non-null, it has boolean type. */
7505 tree
7506 maybe_fold_or_comparisons (tree type,
7507 enum tree_code code1, tree op1a, tree op1b,
7508 enum tree_code code2, tree op2a, tree op2b,
7509 basic_block outer_cond_bb)
7511 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b,
7512 outer_cond_bb))
7513 return t;
7515 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b,
7516 outer_cond_bb))
7517 return t;
7519 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
7520 op1a, op1b, code2, op2a,
7521 op2b, outer_cond_bb))
7522 return t;
7524 return NULL_TREE;
7527 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7529 Either NULL_TREE, a simplified but non-constant or a constant
7530 is returned.
7532 ??? This should go into a gimple-fold-inline.h file to be eventually
7533 privatized with the single valueize function used in the various TUs
7534 to avoid the indirect function call overhead. */
7536 tree
7537 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
7538 tree (*gvalueize) (tree))
7540 gimple_match_op res_op;
7541 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7542 edges if there are intermediate VARYING defs. For this reason
7543 do not follow SSA edges here even though SCCVN can technically
7544 just deal fine with that. */
7545 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
7547 tree res = NULL_TREE;
7548 if (gimple_simplified_result_is_gimple_val (&res_op))
7549 res = res_op.ops[0];
7550 else if (mprts_hook)
7551 res = mprts_hook (&res_op);
7552 if (res)
7554 if (dump_file && dump_flags & TDF_DETAILS)
7556 fprintf (dump_file, "Match-and-simplified ");
7557 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
7558 fprintf (dump_file, " to ");
7559 print_generic_expr (dump_file, res);
7560 fprintf (dump_file, "\n");
7562 return res;
7566 location_t loc = gimple_location (stmt);
7567 switch (gimple_code (stmt))
7569 case GIMPLE_ASSIGN:
7571 enum tree_code subcode = gimple_assign_rhs_code (stmt);
7573 switch (get_gimple_rhs_class (subcode))
7575 case GIMPLE_SINGLE_RHS:
7577 tree rhs = gimple_assign_rhs1 (stmt);
7578 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
7580 if (TREE_CODE (rhs) == SSA_NAME)
7582 /* If the RHS is an SSA_NAME, return its known constant value,
7583 if any. */
7584 return (*valueize) (rhs);
7586 /* Handle propagating invariant addresses into address
7587 operations. */
7588 else if (TREE_CODE (rhs) == ADDR_EXPR
7589 && !is_gimple_min_invariant (rhs))
7591 poly_int64 offset = 0;
7592 tree base;
7593 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
7594 &offset,
7595 valueize);
7596 if (base
7597 && (CONSTANT_CLASS_P (base)
7598 || decl_address_invariant_p (base)))
7599 return build_invariant_address (TREE_TYPE (rhs),
7600 base, offset);
7602 else if (TREE_CODE (rhs) == CONSTRUCTOR
7603 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
7604 && known_eq (CONSTRUCTOR_NELTS (rhs),
7605 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
7607 unsigned i, nelts;
7608 tree val;
7610 nelts = CONSTRUCTOR_NELTS (rhs);
7611 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
7612 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7614 val = (*valueize) (val);
7615 if (TREE_CODE (val) == INTEGER_CST
7616 || TREE_CODE (val) == REAL_CST
7617 || TREE_CODE (val) == FIXED_CST)
7618 vec.quick_push (val);
7619 else
7620 return NULL_TREE;
7623 return vec.build ();
7625 if (subcode == OBJ_TYPE_REF)
7627 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
7628 /* If callee is constant, we can fold away the wrapper. */
7629 if (is_gimple_min_invariant (val))
7630 return val;
7633 if (kind == tcc_reference)
7635 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
7636 || TREE_CODE (rhs) == REALPART_EXPR
7637 || TREE_CODE (rhs) == IMAGPART_EXPR)
7638 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7640 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7641 return fold_unary_loc (EXPR_LOCATION (rhs),
7642 TREE_CODE (rhs),
7643 TREE_TYPE (rhs), val);
7645 else if (TREE_CODE (rhs) == BIT_FIELD_REF
7646 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7648 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7649 return fold_ternary_loc (EXPR_LOCATION (rhs),
7650 TREE_CODE (rhs),
7651 TREE_TYPE (rhs), val,
7652 TREE_OPERAND (rhs, 1),
7653 TREE_OPERAND (rhs, 2));
7655 else if (TREE_CODE (rhs) == MEM_REF
7656 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7658 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7659 if (TREE_CODE (val) == ADDR_EXPR
7660 && is_gimple_min_invariant (val))
7662 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
7663 unshare_expr (val),
7664 TREE_OPERAND (rhs, 1));
7665 if (tem)
7666 rhs = tem;
7669 return fold_const_aggregate_ref_1 (rhs, valueize);
7671 else if (kind == tcc_declaration)
7672 return get_symbol_constant_value (rhs);
7673 return rhs;
7676 case GIMPLE_UNARY_RHS:
7677 return NULL_TREE;
7679 case GIMPLE_BINARY_RHS:
7680 /* Translate &x + CST into an invariant form suitable for
7681 further propagation. */
7682 if (subcode == POINTER_PLUS_EXPR)
7684 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7685 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7686 if (TREE_CODE (op0) == ADDR_EXPR
7687 && TREE_CODE (op1) == INTEGER_CST)
7689 tree off = fold_convert (ptr_type_node, op1);
7690 return build1_loc
7691 (loc, ADDR_EXPR, TREE_TYPE (op0),
7692 fold_build2 (MEM_REF,
7693 TREE_TYPE (TREE_TYPE (op0)),
7694 unshare_expr (op0), off));
7697 /* Canonicalize bool != 0 and bool == 0 appearing after
7698 valueization. While gimple_simplify handles this
7699 it can get confused by the ~X == 1 -> X == 0 transform
7700 which we cant reduce to a SSA name or a constant
7701 (and we have no way to tell gimple_simplify to not
7702 consider those transforms in the first place). */
7703 else if (subcode == EQ_EXPR
7704 || subcode == NE_EXPR)
7706 tree lhs = gimple_assign_lhs (stmt);
7707 tree op0 = gimple_assign_rhs1 (stmt);
7708 if (useless_type_conversion_p (TREE_TYPE (lhs),
7709 TREE_TYPE (op0)))
7711 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7712 op0 = (*valueize) (op0);
7713 if (TREE_CODE (op0) == INTEGER_CST)
7714 std::swap (op0, op1);
7715 if (TREE_CODE (op1) == INTEGER_CST
7716 && ((subcode == NE_EXPR && integer_zerop (op1))
7717 || (subcode == EQ_EXPR && integer_onep (op1))))
7718 return op0;
7721 return NULL_TREE;
7723 case GIMPLE_TERNARY_RHS:
7725 /* Handle ternary operators that can appear in GIMPLE form. */
7726 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7727 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7728 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
7729 return fold_ternary_loc (loc, subcode,
7730 TREE_TYPE (gimple_assign_lhs (stmt)),
7731 op0, op1, op2);
7734 default:
7735 gcc_unreachable ();
7739 case GIMPLE_CALL:
7741 tree fn;
7742 gcall *call_stmt = as_a <gcall *> (stmt);
7744 if (gimple_call_internal_p (stmt))
7746 enum tree_code subcode = ERROR_MARK;
7747 switch (gimple_call_internal_fn (stmt))
7749 case IFN_UBSAN_CHECK_ADD:
7750 subcode = PLUS_EXPR;
7751 break;
7752 case IFN_UBSAN_CHECK_SUB:
7753 subcode = MINUS_EXPR;
7754 break;
7755 case IFN_UBSAN_CHECK_MUL:
7756 subcode = MULT_EXPR;
7757 break;
7758 case IFN_BUILTIN_EXPECT:
7760 tree arg0 = gimple_call_arg (stmt, 0);
7761 tree op0 = (*valueize) (arg0);
7762 if (TREE_CODE (op0) == INTEGER_CST)
7763 return op0;
7764 return NULL_TREE;
7766 default:
7767 return NULL_TREE;
7769 tree arg0 = gimple_call_arg (stmt, 0);
7770 tree arg1 = gimple_call_arg (stmt, 1);
7771 tree op0 = (*valueize) (arg0);
7772 tree op1 = (*valueize) (arg1);
7774 if (TREE_CODE (op0) != INTEGER_CST
7775 || TREE_CODE (op1) != INTEGER_CST)
7777 switch (subcode)
7779 case MULT_EXPR:
7780 /* x * 0 = 0 * x = 0 without overflow. */
7781 if (integer_zerop (op0) || integer_zerop (op1))
7782 return build_zero_cst (TREE_TYPE (arg0));
7783 break;
7784 case MINUS_EXPR:
7785 /* y - y = 0 without overflow. */
7786 if (operand_equal_p (op0, op1, 0))
7787 return build_zero_cst (TREE_TYPE (arg0));
7788 break;
7789 default:
7790 break;
7793 tree res
7794 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
7795 if (res
7796 && TREE_CODE (res) == INTEGER_CST
7797 && !TREE_OVERFLOW (res))
7798 return res;
7799 return NULL_TREE;
7802 fn = (*valueize) (gimple_call_fn (stmt));
7803 if (TREE_CODE (fn) == ADDR_EXPR
7804 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
7805 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
7806 && gimple_builtin_call_types_compatible_p (stmt,
7807 TREE_OPERAND (fn, 0)))
7809 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
7810 tree retval;
7811 unsigned i;
7812 for (i = 0; i < gimple_call_num_args (stmt); ++i)
7813 args[i] = (*valueize) (gimple_call_arg (stmt, i));
7814 retval = fold_builtin_call_array (loc,
7815 gimple_call_return_type (call_stmt),
7816 fn, gimple_call_num_args (stmt), args);
7817 if (retval)
7819 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7820 STRIP_NOPS (retval);
7821 retval = fold_convert (gimple_call_return_type (call_stmt),
7822 retval);
7824 return retval;
7826 return NULL_TREE;
7829 default:
7830 return NULL_TREE;
7834 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7835 Returns NULL_TREE if folding to a constant is not possible, otherwise
7836 returns a constant according to is_gimple_min_invariant. */
7838 tree
7839 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
7841 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
7842 if (res && is_gimple_min_invariant (res))
7843 return res;
7844 return NULL_TREE;
7848 /* The following set of functions are supposed to fold references using
7849 their constant initializers. */
7851 /* See if we can find constructor defining value of BASE.
7852 When we know the consructor with constant offset (such as
7853 base is array[40] and we do know constructor of array), then
7854 BIT_OFFSET is adjusted accordingly.
7856 As a special case, return error_mark_node when constructor
7857 is not explicitly available, but it is known to be zero
7858 such as 'static const int a;'. */
7859 static tree
7860 get_base_constructor (tree base, poly_int64 *bit_offset,
7861 tree (*valueize)(tree))
7863 poly_int64 bit_offset2, size, max_size;
7864 bool reverse;
7866 if (TREE_CODE (base) == MEM_REF)
7868 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
7869 if (!boff.to_shwi (bit_offset))
7870 return NULL_TREE;
7872 if (valueize
7873 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
7874 base = valueize (TREE_OPERAND (base, 0));
7875 if (!base || TREE_CODE (base) != ADDR_EXPR)
7876 return NULL_TREE;
7877 base = TREE_OPERAND (base, 0);
7879 else if (valueize
7880 && TREE_CODE (base) == SSA_NAME)
7881 base = valueize (base);
7883 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7884 DECL_INITIAL. If BASE is a nested reference into another
7885 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7886 the inner reference. */
7887 switch (TREE_CODE (base))
7889 case VAR_DECL:
7890 case CONST_DECL:
7892 tree init = ctor_for_folding (base);
7894 /* Our semantic is exact opposite of ctor_for_folding;
7895 NULL means unknown, while error_mark_node is 0. */
7896 if (init == error_mark_node)
7897 return NULL_TREE;
7898 if (!init)
7899 return error_mark_node;
7900 return init;
7903 case VIEW_CONVERT_EXPR:
7904 return get_base_constructor (TREE_OPERAND (base, 0),
7905 bit_offset, valueize);
7907 case ARRAY_REF:
7908 case COMPONENT_REF:
7909 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
7910 &reverse);
7911 if (!known_size_p (max_size) || maybe_ne (size, max_size))
7912 return NULL_TREE;
7913 *bit_offset += bit_offset2;
7914 return get_base_constructor (base, bit_offset, valueize);
7916 case CONSTRUCTOR:
7917 return base;
7919 default:
7920 if (CONSTANT_CLASS_P (base))
7921 return base;
7923 return NULL_TREE;
7927 /* CTOR is a CONSTRUCTOR of an array or vector type. Fold a reference of SIZE
7928 bits to the memory at bit OFFSET. If non-null, TYPE is the expected type of
7929 the reference; otherwise the type of the referenced element is used instead.
7930 When SIZE is zero, attempt to fold a reference to the entire element OFFSET
7931 refers to. Increment *SUBOFF by the bit offset of the accessed element. */
7933 static tree
7934 fold_array_ctor_reference (tree type, tree ctor,
7935 unsigned HOST_WIDE_INT offset,
7936 unsigned HOST_WIDE_INT size,
7937 tree from_decl,
7938 unsigned HOST_WIDE_INT *suboff)
7940 offset_int low_bound;
7941 offset_int elt_size;
7942 offset_int access_index;
7943 tree domain_type = NULL_TREE;
7944 HOST_WIDE_INT inner_offset;
7946 /* Compute low bound and elt size. */
7947 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
7948 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
7949 if (domain_type && TYPE_MIN_VALUE (domain_type))
7951 /* Static constructors for variably sized objects make no sense. */
7952 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
7953 return NULL_TREE;
7954 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
7956 else
7957 low_bound = 0;
7958 /* Static constructors for variably sized objects make no sense. */
7959 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
7960 return NULL_TREE;
7961 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
7963 /* When TYPE is non-null, verify that it specifies a constant-sized
7964 access of a multiple of the array element size. Avoid division
7965 by zero below when ELT_SIZE is zero, such as with the result of
7966 an initializer for a zero-length array or an empty struct. */
7967 if (elt_size == 0
7968 || (type
7969 && (!TYPE_SIZE_UNIT (type)
7970 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
7971 return NULL_TREE;
7973 /* Compute the array index we look for. */
7974 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
7975 elt_size);
7976 access_index += low_bound;
7978 /* And offset within the access. */
7979 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
7981 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
7982 if (size > elt_sz * BITS_PER_UNIT)
7984 /* native_encode_expr constraints. */
7985 if (size > MAX_BITSIZE_MODE_ANY_MODE
7986 || size % BITS_PER_UNIT != 0
7987 || inner_offset % BITS_PER_UNIT != 0
7988 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
7989 return NULL_TREE;
7991 unsigned ctor_idx;
7992 tree val = get_array_ctor_element_at_index (ctor, access_index,
7993 &ctor_idx);
7994 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7995 return build_zero_cst (type);
7997 /* native-encode adjacent ctor elements. */
7998 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7999 unsigned bufoff = 0;
8000 offset_int index = 0;
8001 offset_int max_index = access_index;
8002 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
8003 if (!val)
8004 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
8005 else if (!CONSTANT_CLASS_P (val))
8006 return NULL_TREE;
8007 if (!elt->index)
8009 else if (TREE_CODE (elt->index) == RANGE_EXPR)
8011 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
8012 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
8014 else
8015 index = max_index = wi::to_offset (elt->index);
8016 index = wi::umax (index, access_index);
8019 if (bufoff + elt_sz > sizeof (buf))
8020 elt_sz = sizeof (buf) - bufoff;
8021 int len = native_encode_expr (val, buf + bufoff, elt_sz,
8022 inner_offset / BITS_PER_UNIT);
8023 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
8024 return NULL_TREE;
8025 inner_offset = 0;
8026 bufoff += len;
8028 access_index += 1;
8029 if (wi::cmpu (access_index, index) == 0)
8030 val = elt->value;
8031 else if (wi::cmpu (access_index, max_index) > 0)
8033 ctor_idx++;
8034 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
8036 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
8037 ++max_index;
8039 else
8041 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
8042 index = 0;
8043 max_index = access_index;
8044 if (!elt->index)
8046 else if (TREE_CODE (elt->index) == RANGE_EXPR)
8048 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
8049 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
8051 else
8052 index = max_index = wi::to_offset (elt->index);
8053 index = wi::umax (index, access_index);
8054 if (wi::cmpu (access_index, index) == 0)
8055 val = elt->value;
8056 else
8057 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
8061 while (bufoff < size / BITS_PER_UNIT);
8062 *suboff += size;
8063 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
8066 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
8068 if (!size && TREE_CODE (val) != CONSTRUCTOR)
8070 /* For the final reference to the entire accessed element
8071 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
8072 may be null) in favor of the type of the element, and set
8073 SIZE to the size of the accessed element. */
8074 inner_offset = 0;
8075 type = TREE_TYPE (val);
8076 size = elt_sz * BITS_PER_UNIT;
8078 else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
8079 && TREE_CODE (val) == CONSTRUCTOR
8080 && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
8081 /* If this isn't the last element in the CTOR and a CTOR itself
8082 and it does not cover the whole object we are requesting give up
8083 since we're not set up for combining from multiple CTORs. */
8084 return NULL_TREE;
8086 *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
8087 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
8088 suboff);
8091 /* Memory not explicitly mentioned in constructor is 0 (or
8092 the reference is out of range). */
8093 return type ? build_zero_cst (type) : NULL_TREE;
8096 /* CTOR is a CONSTRUCTOR of a record or union type. Fold a reference of SIZE
8097 bits to the memory at bit OFFSET. If non-null, TYPE is the expected type of
8098 the reference; otherwise the type of the referenced member is used instead.
8099 When SIZE is zero, attempt to fold a reference to the entire member OFFSET
8100 refers to. Increment *SUBOFF by the bit offset of the accessed member. */
8102 static tree
8103 fold_nonarray_ctor_reference (tree type, tree ctor,
8104 unsigned HOST_WIDE_INT offset,
8105 unsigned HOST_WIDE_INT size,
8106 tree from_decl,
8107 unsigned HOST_WIDE_INT *suboff)
8109 unsigned HOST_WIDE_INT cnt;
8110 tree cfield, cval;
8112 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
8114 tree byte_offset = DECL_FIELD_OFFSET (cfield);
8115 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
8116 tree field_size = DECL_SIZE (cfield);
8118 if (!field_size)
8120 /* Determine the size of the flexible array member from
8121 the size of the initializer provided for it. */
8122 field_size = TYPE_SIZE (TREE_TYPE (cval));
8125 /* Variable sized objects in static constructors makes no sense,
8126 but field_size can be NULL for flexible array members. */
8127 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
8128 && TREE_CODE (byte_offset) == INTEGER_CST
8129 && (field_size != NULL_TREE
8130 ? TREE_CODE (field_size) == INTEGER_CST
8131 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
8133 /* Compute bit offset of the field. */
8134 offset_int bitoffset
8135 = (wi::to_offset (field_offset)
8136 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
8137 /* Compute bit offset where the field ends. */
8138 offset_int bitoffset_end;
8139 if (field_size != NULL_TREE)
8140 bitoffset_end = bitoffset + wi::to_offset (field_size);
8141 else
8142 bitoffset_end = 0;
8144 /* Compute the bit offset of the end of the desired access.
8145 As a special case, if the size of the desired access is
8146 zero, assume the access is to the entire field (and let
8147 the caller make any necessary adjustments by storing
8148 the actual bounds of the field in FIELDBOUNDS). */
8149 offset_int access_end = offset_int (offset);
8150 if (size)
8151 access_end += size;
8152 else
8153 access_end = bitoffset_end;
8155 /* Is there any overlap between the desired access at
8156 [OFFSET, OFFSET+SIZE) and the offset of the field within
8157 the object at [BITOFFSET, BITOFFSET_END)? */
8158 if (wi::cmps (access_end, bitoffset) > 0
8159 && (field_size == NULL_TREE
8160 || wi::lts_p (offset, bitoffset_end)))
8162 *suboff += bitoffset.to_uhwi ();
8164 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
8166 /* For the final reference to the entire accessed member
8167 (SIZE is zero), reset OFFSET, disegard TYPE (which may
8168 be null) in favor of the type of the member, and set
8169 SIZE to the size of the accessed member. */
8170 offset = bitoffset.to_uhwi ();
8171 type = TREE_TYPE (cval);
8172 size = (bitoffset_end - bitoffset).to_uhwi ();
8175 /* We do have overlap. Now see if the field is large enough
8176 to cover the access. Give up for accesses that extend
8177 beyond the end of the object or that span multiple fields. */
8178 if (wi::cmps (access_end, bitoffset_end) > 0)
8179 return NULL_TREE;
8180 if (offset < bitoffset)
8181 return NULL_TREE;
8183 offset_int inner_offset = offset_int (offset) - bitoffset;
8185 /* Integral bit-fields are left-justified on big-endian targets, so
8186 we must arrange for native_encode_int to start at their MSB. */
8187 if (DECL_BIT_FIELD (cfield) && INTEGRAL_TYPE_P (TREE_TYPE (cfield)))
8189 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8190 return NULL_TREE;
8191 const unsigned int encoding_size
8192 = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (cfield)));
8193 if (BYTES_BIG_ENDIAN)
8194 inner_offset += encoding_size - wi::to_offset (field_size);
8197 return fold_ctor_reference (type, cval,
8198 inner_offset.to_uhwi (), size,
8199 from_decl, suboff);
8203 if (!type)
8204 return NULL_TREE;
8206 return build_zero_cst (type);
8209 /* CTOR is a value initializing memory. Fold a reference of TYPE and
8210 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
8211 is zero, attempt to fold a reference to the entire subobject
8212 which OFFSET refers to. This is used when folding accesses to
8213 string members of aggregates. When non-null, set *SUBOFF to
8214 the bit offset of the accessed subobject. */
8216 tree
8217 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
8218 const poly_uint64 &poly_size, tree from_decl,
8219 unsigned HOST_WIDE_INT *suboff /* = NULL */)
8221 tree ret;
8223 /* We found the field with exact match. */
8224 if (type
8225 && useless_type_conversion_p (type, TREE_TYPE (ctor))
8226 && known_eq (poly_offset, 0U))
8227 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
8229 /* The remaining optimizations need a constant size and offset. */
8230 unsigned HOST_WIDE_INT size, offset;
8231 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
8232 return NULL_TREE;
8234 /* We are at the end of walk, see if we can view convert the
8235 result. */
8236 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
8237 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
8238 && known_eq (wi::to_poly_widest (TYPE_SIZE (type)), size)
8239 && known_eq (wi::to_poly_widest (TYPE_SIZE (TREE_TYPE (ctor))), size))
8241 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
8242 if (ret)
8244 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
8245 if (ret)
8246 STRIP_USELESS_TYPE_CONVERSION (ret);
8248 return ret;
8251 /* For constants and byte-aligned/sized reads, try to go through
8252 native_encode/interpret. */
8253 if (CONSTANT_CLASS_P (ctor)
8254 && BITS_PER_UNIT == 8
8255 && offset % BITS_PER_UNIT == 0
8256 && offset / BITS_PER_UNIT <= INT_MAX
8257 && size % BITS_PER_UNIT == 0
8258 && size <= MAX_BITSIZE_MODE_ANY_MODE
8259 && can_native_interpret_type_p (type))
8261 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8262 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
8263 offset / BITS_PER_UNIT);
8264 if (len > 0)
8265 return native_interpret_expr (type, buf, len);
8268 /* For constructors, try first a recursive local processing, but in any case
8269 this requires the native storage order. */
8270 if (TREE_CODE (ctor) == CONSTRUCTOR
8271 && !(AGGREGATE_TYPE_P (TREE_TYPE (ctor))
8272 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (ctor))))
8274 unsigned HOST_WIDE_INT dummy = 0;
8275 if (!suboff)
8276 suboff = &dummy;
8278 tree ret;
8279 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
8280 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
8281 ret = fold_array_ctor_reference (type, ctor, offset, size,
8282 from_decl, suboff);
8283 else
8284 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
8285 from_decl, suboff);
8287 /* Otherwise fall back to native_encode_initializer. This may be done
8288 only from the outermost fold_ctor_reference call (because it itself
8289 recurses into CONSTRUCTORs and doesn't update suboff). */
8290 if (ret == NULL_TREE
8291 && suboff == &dummy
8292 && BITS_PER_UNIT == 8
8293 && offset % BITS_PER_UNIT == 0
8294 && offset / BITS_PER_UNIT <= INT_MAX
8295 && size % BITS_PER_UNIT == 0
8296 && size <= MAX_BITSIZE_MODE_ANY_MODE
8297 && can_native_interpret_type_p (type))
8299 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8300 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
8301 offset / BITS_PER_UNIT);
8302 if (len > 0)
8303 return native_interpret_expr (type, buf, len);
8306 return ret;
8309 return NULL_TREE;
8312 /* Return the tree representing the element referenced by T if T is an
8313 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8314 names using VALUEIZE. Return NULL_TREE otherwise. */
8316 tree
8317 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
8319 tree ctor, idx, base;
8320 poly_int64 offset, size, max_size;
8321 tree tem;
8322 bool reverse;
8324 if (TREE_THIS_VOLATILE (t))
8325 return NULL_TREE;
8327 if (DECL_P (t))
8328 return get_symbol_constant_value (t);
8330 tem = fold_read_from_constant_string (t);
8331 if (tem)
8332 return tem;
8334 switch (TREE_CODE (t))
8336 case ARRAY_REF:
8337 case ARRAY_RANGE_REF:
8338 /* Constant indexes are handled well by get_base_constructor.
8339 Only special case variable offsets.
8340 FIXME: This code can't handle nested references with variable indexes
8341 (they will be handled only by iteration of ccp). Perhaps we can bring
8342 get_ref_base_and_extent here and make it use a valueize callback. */
8343 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
8344 && valueize
8345 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
8346 && poly_int_tree_p (idx))
8348 tree low_bound, unit_size;
8350 /* If the resulting bit-offset is constant, track it. */
8351 if ((low_bound = array_ref_low_bound (t),
8352 poly_int_tree_p (low_bound))
8353 && (unit_size = array_ref_element_size (t),
8354 tree_fits_uhwi_p (unit_size)))
8356 poly_offset_int woffset
8357 = wi::sext (wi::to_poly_offset (idx)
8358 - wi::to_poly_offset (low_bound),
8359 TYPE_PRECISION (sizetype));
8360 woffset *= tree_to_uhwi (unit_size);
8361 woffset *= BITS_PER_UNIT;
8362 if (woffset.to_shwi (&offset))
8364 base = TREE_OPERAND (t, 0);
8365 ctor = get_base_constructor (base, &offset, valueize);
8366 /* Empty constructor. Always fold to 0. */
8367 if (ctor == error_mark_node)
8368 return build_zero_cst (TREE_TYPE (t));
8369 /* Out of bound array access. Value is undefined,
8370 but don't fold. */
8371 if (maybe_lt (offset, 0))
8372 return NULL_TREE;
8373 /* We cannot determine ctor. */
8374 if (!ctor)
8375 return NULL_TREE;
8376 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
8377 tree_to_uhwi (unit_size)
8378 * BITS_PER_UNIT,
8379 base);
8383 /* Fallthru. */
8385 case COMPONENT_REF:
8386 case BIT_FIELD_REF:
8387 case TARGET_MEM_REF:
8388 case MEM_REF:
8389 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
8390 ctor = get_base_constructor (base, &offset, valueize);
8392 /* Empty constructor. Always fold to 0. */
8393 if (ctor == error_mark_node)
8394 return build_zero_cst (TREE_TYPE (t));
8395 /* We do not know precise address. */
8396 if (!known_size_p (max_size) || maybe_ne (max_size, size))
8397 return NULL_TREE;
8398 /* We cannot determine ctor. */
8399 if (!ctor)
8400 return NULL_TREE;
8402 /* Out of bound array access. Value is undefined, but don't fold. */
8403 if (maybe_lt (offset, 0))
8404 return NULL_TREE;
8406 tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
8407 if (tem)
8408 return tem;
8410 /* For bit field reads try to read the representative and
8411 adjust. */
8412 if (TREE_CODE (t) == COMPONENT_REF
8413 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
8414 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
8416 HOST_WIDE_INT csize, coffset;
8417 tree field = TREE_OPERAND (t, 1);
8418 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8419 if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
8420 && size.is_constant (&csize)
8421 && offset.is_constant (&coffset)
8422 && (coffset % BITS_PER_UNIT != 0
8423 || csize % BITS_PER_UNIT != 0)
8424 && !reverse
8425 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
8427 poly_int64 bitoffset;
8428 poly_uint64 field_offset, repr_offset;
8429 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8430 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
8431 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
8432 else
8433 bitoffset = 0;
8434 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8435 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
8436 HOST_WIDE_INT bitoff;
8437 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8438 - TYPE_PRECISION (TREE_TYPE (field)));
8439 if (bitoffset.is_constant (&bitoff)
8440 && bitoff >= 0
8441 && bitoff <= diff)
8443 offset -= bitoff;
8444 size = tree_to_uhwi (DECL_SIZE (repr));
8446 tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
8447 size, base);
8448 if (tem && TREE_CODE (tem) == INTEGER_CST)
8450 if (!BYTES_BIG_ENDIAN)
8451 tem = wide_int_to_tree (TREE_TYPE (field),
8452 wi::lrshift (wi::to_wide (tem),
8453 bitoff));
8454 else
8455 tem = wide_int_to_tree (TREE_TYPE (field),
8456 wi::lrshift (wi::to_wide (tem),
8457 diff - bitoff));
8458 return tem;
8463 break;
8465 case REALPART_EXPR:
8466 case IMAGPART_EXPR:
8468 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
8469 if (c && TREE_CODE (c) == COMPLEX_CST)
8470 return fold_build1_loc (EXPR_LOCATION (t),
8471 TREE_CODE (t), TREE_TYPE (t), c);
8472 break;
8475 default:
8476 break;
8479 return NULL_TREE;
8482 tree
8483 fold_const_aggregate_ref (tree t)
8485 return fold_const_aggregate_ref_1 (t, NULL);
8488 /* Lookup virtual method with index TOKEN in a virtual table V
8489 at OFFSET.
8490 Set CAN_REFER if non-NULL to false if method
8491 is not referable or if the virtual table is ill-formed (such as rewriten
8492 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8494 tree
8495 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
8496 tree v,
8497 unsigned HOST_WIDE_INT offset,
8498 bool *can_refer)
8500 tree vtable = v, init, fn;
8501 unsigned HOST_WIDE_INT size;
8502 unsigned HOST_WIDE_INT elt_size, access_index;
8503 tree domain_type;
8505 if (can_refer)
8506 *can_refer = true;
8508 /* First of all double check we have virtual table. */
8509 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
8511 /* Pass down that we lost track of the target. */
8512 if (can_refer)
8513 *can_refer = false;
8514 return NULL_TREE;
8517 init = ctor_for_folding (v);
8519 /* The virtual tables should always be born with constructors
8520 and we always should assume that they are avaialble for
8521 folding. At the moment we do not stream them in all cases,
8522 but it should never happen that ctor seem unreachable. */
8523 gcc_assert (init);
8524 if (init == error_mark_node)
8526 /* Pass down that we lost track of the target. */
8527 if (can_refer)
8528 *can_refer = false;
8529 return NULL_TREE;
8531 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
8532 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
8533 offset *= BITS_PER_UNIT;
8534 offset += token * size;
8536 /* Lookup the value in the constructor that is assumed to be array.
8537 This is equivalent to
8538 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8539 offset, size, NULL);
8540 but in a constant time. We expect that frontend produced a simple
8541 array without indexed initializers. */
8543 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
8544 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
8545 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
8546 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
8548 access_index = offset / BITS_PER_UNIT / elt_size;
8549 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
8551 /* The C++ FE can now produce indexed fields, and we check if the indexes
8552 match. */
8553 if (access_index < CONSTRUCTOR_NELTS (init))
8555 fn = CONSTRUCTOR_ELT (init, access_index)->value;
8556 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
8557 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8558 STRIP_NOPS (fn);
8560 else
8561 fn = NULL;
8563 /* For type inconsistent program we may end up looking up virtual method
8564 in virtual table that does not contain TOKEN entries. We may overrun
8565 the virtual table and pick up a constant or RTTI info pointer.
8566 In any case the call is undefined. */
8567 if (!fn
8568 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
8569 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
8570 fn = builtin_decl_unreachable ();
8571 else
8573 fn = TREE_OPERAND (fn, 0);
8575 /* When cgraph node is missing and function is not public, we cannot
8576 devirtualize. This can happen in WHOPR when the actual method
8577 ends up in other partition, because we found devirtualization
8578 possibility too late. */
8579 if (!can_refer_decl_in_current_unit_p (fn, vtable))
8581 if (can_refer)
8583 *can_refer = false;
8584 return fn;
8586 return NULL_TREE;
8590 /* Make sure we create a cgraph node for functions we'll reference.
8591 They can be non-existent if the reference comes from an entry
8592 of an external vtable for example. */
8593 cgraph_node::get_create (fn);
8595 return fn;
8598 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8599 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8600 KNOWN_BINFO carries the binfo describing the true type of
8601 OBJ_TYPE_REF_OBJECT(REF).
8602 Set CAN_REFER if non-NULL to false if method
8603 is not referable or if the virtual table is ill-formed (such as rewriten
8604 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8606 tree
8607 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
8608 bool *can_refer)
8610 unsigned HOST_WIDE_INT offset;
8611 tree v;
8613 v = BINFO_VTABLE (known_binfo);
8614 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8615 if (!v)
8616 return NULL_TREE;
8618 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
8620 if (can_refer)
8621 *can_refer = false;
8622 return NULL_TREE;
8624 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
8627 /* Given a pointer value T, return a simplified version of an
8628 indirection through T, or NULL_TREE if no simplification is
8629 possible. Note that the resulting type may be different from
8630 the type pointed to in the sense that it is still compatible
8631 from the langhooks point of view. */
8633 tree
8634 gimple_fold_indirect_ref (tree t)
8636 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
8637 tree sub = t;
8638 tree subtype;
8640 STRIP_NOPS (sub);
8641 subtype = TREE_TYPE (sub);
8642 if (!POINTER_TYPE_P (subtype)
8643 || TYPE_REF_CAN_ALIAS_ALL (ptype))
8644 return NULL_TREE;
8646 if (TREE_CODE (sub) == ADDR_EXPR)
8648 tree op = TREE_OPERAND (sub, 0);
8649 tree optype = TREE_TYPE (op);
8650 /* *&p => p */
8651 if (useless_type_conversion_p (type, optype))
8652 return op;
8654 /* *(foo *)&fooarray => fooarray[0] */
8655 if (TREE_CODE (optype) == ARRAY_TYPE
8656 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
8657 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8659 tree type_domain = TYPE_DOMAIN (optype);
8660 tree min_val = size_zero_node;
8661 if (type_domain && TYPE_MIN_VALUE (type_domain))
8662 min_val = TYPE_MIN_VALUE (type_domain);
8663 if (TREE_CODE (min_val) == INTEGER_CST)
8664 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
8666 /* *(foo *)&complexfoo => __real__ complexfoo */
8667 else if (TREE_CODE (optype) == COMPLEX_TYPE
8668 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8669 return fold_build1 (REALPART_EXPR, type, op);
8670 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8671 else if (TREE_CODE (optype) == VECTOR_TYPE
8672 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8674 tree part_width = TYPE_SIZE (type);
8675 tree index = bitsize_int (0);
8676 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
8680 /* *(p + CST) -> ... */
8681 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
8682 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
8684 tree addr = TREE_OPERAND (sub, 0);
8685 tree off = TREE_OPERAND (sub, 1);
8686 tree addrtype;
8688 STRIP_NOPS (addr);
8689 addrtype = TREE_TYPE (addr);
8691 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8692 if (TREE_CODE (addr) == ADDR_EXPR
8693 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
8694 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
8695 && tree_fits_uhwi_p (off))
8697 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
8698 tree part_width = TYPE_SIZE (type);
8699 unsigned HOST_WIDE_INT part_widthi
8700 = tree_to_shwi (part_width) / BITS_PER_UNIT;
8701 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
8702 tree index = bitsize_int (indexi);
8703 if (known_lt (offset / part_widthi,
8704 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
8705 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
8706 part_width, index);
8709 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8710 if (TREE_CODE (addr) == ADDR_EXPR
8711 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
8712 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
8714 tree size = TYPE_SIZE_UNIT (type);
8715 if (tree_int_cst_equal (size, off))
8716 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
8719 /* *(p + CST) -> MEM_REF <p, CST>. */
8720 if (TREE_CODE (addr) != ADDR_EXPR
8721 || DECL_P (TREE_OPERAND (addr, 0)))
8722 return fold_build2 (MEM_REF, type,
8723 addr,
8724 wide_int_to_tree (ptype, wi::to_wide (off)));
8727 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8728 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
8729 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
8730 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
8732 tree type_domain;
8733 tree min_val = size_zero_node;
8734 tree osub = sub;
8735 sub = gimple_fold_indirect_ref (sub);
8736 if (! sub)
8737 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
8738 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
8739 if (type_domain && TYPE_MIN_VALUE (type_domain))
8740 min_val = TYPE_MIN_VALUE (type_domain);
8741 if (TREE_CODE (min_val) == INTEGER_CST)
8742 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
8745 return NULL_TREE;
8748 /* Return true if CODE is an operation that when operating on signed
8749 integer types involves undefined behavior on overflow and the
8750 operation can be expressed with unsigned arithmetic. */
8752 bool
8753 arith_code_with_undefined_signed_overflow (tree_code code)
8755 switch (code)
8757 case ABS_EXPR:
8758 case PLUS_EXPR:
8759 case MINUS_EXPR:
8760 case MULT_EXPR:
8761 case NEGATE_EXPR:
8762 case POINTER_PLUS_EXPR:
8763 return true;
8764 default:
8765 return false;
8769 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8770 operation that can be transformed to unsigned arithmetic by converting
8771 its operand, carrying out the operation in the corresponding unsigned
8772 type and converting the result back to the original type.
8774 If IN_PLACE is true, adjust the stmt in place and return NULL.
8775 Otherwise returns a sequence of statements that replace STMT and also
8776 contain a modified form of STMT itself. */
8778 gimple_seq
8779 rewrite_to_defined_overflow (gimple *stmt, bool in_place /* = false */)
8781 if (dump_file && (dump_flags & TDF_DETAILS))
8783 fprintf (dump_file, "rewriting stmt with undefined signed "
8784 "overflow ");
8785 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
8788 tree lhs = gimple_assign_lhs (stmt);
8789 tree type = unsigned_type_for (TREE_TYPE (lhs));
8790 gimple_seq stmts = NULL;
8791 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
8792 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
8793 else
8794 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
8796 tree op = gimple_op (stmt, i);
8797 op = gimple_convert (&stmts, type, op);
8798 gimple_set_op (stmt, i, op);
8800 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
8801 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
8802 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
8803 gimple_set_modified (stmt, true);
8804 if (in_place)
8806 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
8807 if (stmts)
8808 gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
8809 stmts = NULL;
8811 else
8812 gimple_seq_add_stmt (&stmts, stmt);
8813 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
8814 if (in_place)
8816 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
8817 gsi_insert_after (&gsi, cvt, GSI_SAME_STMT);
8818 update_stmt (stmt);
8820 else
8821 gimple_seq_add_stmt (&stmts, cvt);
8823 return stmts;
8827 /* The valueization hook we use for the gimple_build API simplification.
8828 This makes us match fold_buildN behavior by only combining with
8829 statements in the sequence(s) we are currently building. */
8831 static tree
8832 gimple_build_valueize (tree op)
8834 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
8835 return op;
8836 return NULL_TREE;
8839 /* Helper for gimple_build to perform the final insertion of stmts on SEQ. */
8841 static inline void
8842 gimple_build_insert_seq (gimple_stmt_iterator *gsi,
8843 bool before, gsi_iterator_update update,
8844 gimple_seq seq)
8846 if (before)
8848 if (gsi->bb)
8849 gsi_insert_seq_before (gsi, seq, update);
8850 else
8851 gsi_insert_seq_before_without_update (gsi, seq, update);
8853 else
8855 if (gsi->bb)
8856 gsi_insert_seq_after (gsi, seq, update);
8857 else
8858 gsi_insert_seq_after_without_update (gsi, seq, update);
8862 /* Build the expression CODE OP0 of type TYPE with location LOC,
8863 simplifying it first if possible. Returns the built
8864 expression value and inserts statements possibly defining it
8865 before GSI if BEFORE is true or after GSI if false and advance
8866 the iterator accordingly.
8867 If gsi refers to a basic block simplifying is allowed to look
8868 at all SSA defs while when it does not it is restricted to
8869 SSA defs that are not associated with a basic block yet,
8870 indicating they belong to the currently building sequence. */
8872 tree
8873 gimple_build (gimple_stmt_iterator *gsi,
8874 bool before, gsi_iterator_update update,
8875 location_t loc, enum tree_code code, tree type, tree op0)
8877 gimple_seq seq = NULL;
8878 tree res
8879 = gimple_simplify (code, type, op0, &seq,
8880 gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
8881 if (!res)
8883 res = create_tmp_reg_or_ssa_name (type);
8884 gimple *stmt;
8885 if (code == REALPART_EXPR
8886 || code == IMAGPART_EXPR
8887 || code == VIEW_CONVERT_EXPR)
8888 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
8889 else
8890 stmt = gimple_build_assign (res, code, op0);
8891 gimple_set_location (stmt, loc);
8892 gimple_seq_add_stmt_without_update (&seq, stmt);
8894 gimple_build_insert_seq (gsi, before, update, seq);
8895 return res;
8898 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8899 simplifying it first if possible. Returns the built
8900 expression value inserting any new statements at GSI honoring BEFORE
8901 and UPDATE. */
8903 tree
8904 gimple_build (gimple_stmt_iterator *gsi,
8905 bool before, gsi_iterator_update update,
8906 location_t loc, enum tree_code code, tree type,
8907 tree op0, tree op1)
8909 gimple_seq seq = NULL;
8910 tree res
8911 = gimple_simplify (code, type, op0, op1, &seq,
8912 gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
8913 if (!res)
8915 res = create_tmp_reg_or_ssa_name (type);
8916 gimple *stmt = gimple_build_assign (res, code, op0, op1);
8917 gimple_set_location (stmt, loc);
8918 gimple_seq_add_stmt_without_update (&seq, stmt);
8920 gimple_build_insert_seq (gsi, before, update, seq);
8921 return res;
8924 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8925 simplifying it first if possible. Returns the built
8926 expression value inserting any new statements at GSI honoring BEFORE
8927 and UPDATE. */
8929 tree
8930 gimple_build (gimple_stmt_iterator *gsi,
8931 bool before, gsi_iterator_update update,
8932 location_t loc, enum tree_code code, tree type,
8933 tree op0, tree op1, tree op2)
8936 gimple_seq seq = NULL;
8937 tree res
8938 = gimple_simplify (code, type, op0, op1, op2, &seq,
8939 gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
8940 if (!res)
8942 res = create_tmp_reg_or_ssa_name (type);
8943 gimple *stmt;
8944 if (code == BIT_FIELD_REF)
8945 stmt = gimple_build_assign (res, code,
8946 build3 (code, type, op0, op1, op2));
8947 else
8948 stmt = gimple_build_assign (res, code, op0, op1, op2);
8949 gimple_set_location (stmt, loc);
8950 gimple_seq_add_stmt_without_update (&seq, stmt);
8952 gimple_build_insert_seq (gsi, before, update, seq);
8953 return res;
8956 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8957 void) with a location LOC. Returns the built expression value (or NULL_TREE
8958 if TYPE is void) inserting any new statements at GSI honoring BEFORE
8959 and UPDATE. */
8961 tree
8962 gimple_build (gimple_stmt_iterator *gsi,
8963 bool before, gsi_iterator_update update,
8964 location_t loc, combined_fn fn, tree type)
8966 tree res = NULL_TREE;
8967 gimple_seq seq = NULL;
8968 gcall *stmt;
8969 if (internal_fn_p (fn))
8970 stmt = gimple_build_call_internal (as_internal_fn (fn), 0);
8971 else
8973 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8974 stmt = gimple_build_call (decl, 0);
8976 if (!VOID_TYPE_P (type))
8978 res = create_tmp_reg_or_ssa_name (type);
8979 gimple_call_set_lhs (stmt, res);
8981 gimple_set_location (stmt, loc);
8982 gimple_seq_add_stmt_without_update (&seq, stmt);
8983 gimple_build_insert_seq (gsi, before, update, seq);
8984 return res;
8987 /* Build the call FN (ARG0) with a result of type TYPE
8988 (or no result if TYPE is void) with location LOC,
8989 simplifying it first if possible. Returns the built
8990 expression value (or NULL_TREE if TYPE is void) inserting any new
8991 statements at GSI honoring BEFORE and UPDATE. */
8993 tree
8994 gimple_build (gimple_stmt_iterator *gsi,
8995 bool before, gsi_iterator_update update,
8996 location_t loc, combined_fn fn,
8997 tree type, tree arg0)
8999 gimple_seq seq = NULL;
9000 tree res = gimple_simplify (fn, type, arg0, &seq, gimple_build_valueize);
9001 if (!res)
9003 gcall *stmt;
9004 if (internal_fn_p (fn))
9005 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
9006 else
9008 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
9009 stmt = gimple_build_call (decl, 1, arg0);
9011 if (!VOID_TYPE_P (type))
9013 res = create_tmp_reg_or_ssa_name (type);
9014 gimple_call_set_lhs (stmt, res);
9016 gimple_set_location (stmt, loc);
9017 gimple_seq_add_stmt_without_update (&seq, stmt);
9019 gimple_build_insert_seq (gsi, before, update, seq);
9020 return res;
9023 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
9024 (or no result if TYPE is void) with location LOC,
9025 simplifying it first if possible. Returns the built
9026 expression value (or NULL_TREE if TYPE is void) inserting any new
9027 statements at GSI honoring BEFORE and UPDATE. */
9029 tree
9030 gimple_build (gimple_stmt_iterator *gsi,
9031 bool before, gsi_iterator_update update,
9032 location_t loc, combined_fn fn,
9033 tree type, tree arg0, tree arg1)
9035 gimple_seq seq = NULL;
9036 tree res = gimple_simplify (fn, type, arg0, arg1, &seq,
9037 gimple_build_valueize);
9038 if (!res)
9040 gcall *stmt;
9041 if (internal_fn_p (fn))
9042 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
9043 else
9045 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
9046 stmt = gimple_build_call (decl, 2, arg0, arg1);
9048 if (!VOID_TYPE_P (type))
9050 res = create_tmp_reg_or_ssa_name (type);
9051 gimple_call_set_lhs (stmt, res);
9053 gimple_set_location (stmt, loc);
9054 gimple_seq_add_stmt_without_update (&seq, stmt);
9056 gimple_build_insert_seq (gsi, before, update, seq);
9057 return res;
9060 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
9061 (or no result if TYPE is void) with location LOC,
9062 simplifying it first if possible. Returns the built
9063 expression value (or NULL_TREE if TYPE is void) inserting any new
9064 statements at GSI honoring BEFORE and UPDATE. */
9066 tree
9067 gimple_build (gimple_stmt_iterator *gsi,
9068 bool before, gsi_iterator_update update,
9069 location_t loc, combined_fn fn,
9070 tree type, tree arg0, tree arg1, tree arg2)
9072 gimple_seq seq = NULL;
9073 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
9074 &seq, gimple_build_valueize);
9075 if (!res)
9077 gcall *stmt;
9078 if (internal_fn_p (fn))
9079 stmt = gimple_build_call_internal (as_internal_fn (fn),
9080 3, arg0, arg1, arg2);
9081 else
9083 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
9084 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
9086 if (!VOID_TYPE_P (type))
9088 res = create_tmp_reg_or_ssa_name (type);
9089 gimple_call_set_lhs (stmt, res);
9091 gimple_set_location (stmt, loc);
9092 gimple_seq_add_stmt_without_update (&seq, stmt);
9094 gimple_build_insert_seq (gsi, before, update, seq);
9095 return res;
9098 /* Build CODE (OP0) with a result of type TYPE (or no result if TYPE is
9099 void) with location LOC, simplifying it first if possible. Returns the
9100 built expression value (or NULL_TREE if TYPE is void) inserting any new
9101 statements at GSI honoring BEFORE and UPDATE. */
9103 tree
9104 gimple_build (gimple_stmt_iterator *gsi,
9105 bool before, gsi_iterator_update update,
9106 location_t loc, code_helper code, tree type, tree op0)
9108 if (code.is_tree_code ())
9109 return gimple_build (gsi, before, update, loc, tree_code (code), type, op0);
9110 return gimple_build (gsi, before, update, loc, combined_fn (code), type, op0);
9113 /* Build CODE (OP0, OP1) with a result of type TYPE (or no result if TYPE is
9114 void) with location LOC, simplifying it first if possible. Returns the
9115 built expression value (or NULL_TREE if TYPE is void) inserting any new
9116 statements at GSI honoring BEFORE and UPDATE. */
9118 tree
9119 gimple_build (gimple_stmt_iterator *gsi,
9120 bool before, gsi_iterator_update update,
9121 location_t loc, code_helper code, tree type, tree op0, tree op1)
9123 if (code.is_tree_code ())
9124 return gimple_build (gsi, before, update,
9125 loc, tree_code (code), type, op0, op1);
9126 return gimple_build (gsi, before, update,
9127 loc, combined_fn (code), type, op0, op1);
9130 /* Build CODE (OP0, OP1, OP2) with a result of type TYPE (or no result if TYPE
9131 is void) with location LOC, simplifying it first if possible. Returns the
9132 built expression value (or NULL_TREE if TYPE is void) inserting any new
9133 statements at GSI honoring BEFORE and UPDATE. */
9135 tree
9136 gimple_build (gimple_stmt_iterator *gsi,
9137 bool before, gsi_iterator_update update,
9138 location_t loc, code_helper code,
9139 tree type, tree op0, tree op1, tree op2)
9141 if (code.is_tree_code ())
9142 return gimple_build (gsi, before, update,
9143 loc, tree_code (code), type, op0, op1, op2);
9144 return gimple_build (gsi, before, update,
9145 loc, combined_fn (code), type, op0, op1, op2);
9148 /* Build the conversion (TYPE) OP with a result of type TYPE
9149 with location LOC if such conversion is neccesary in GIMPLE,
9150 simplifying it first.
9151 Returns the built expression inserting any new statements
9152 at GSI honoring BEFORE and UPDATE. */
9154 tree
9155 gimple_convert (gimple_stmt_iterator *gsi,
9156 bool before, gsi_iterator_update update,
9157 location_t loc, tree type, tree op)
9159 if (useless_type_conversion_p (type, TREE_TYPE (op)))
9160 return op;
9161 return gimple_build (gsi, before, update, loc, NOP_EXPR, type, op);
9164 /* Build the conversion (ptrofftype) OP with a result of a type
9165 compatible with ptrofftype with location LOC if such conversion
9166 is neccesary in GIMPLE, simplifying it first.
9167 Returns the built expression value inserting any new statements
9168 at GSI honoring BEFORE and UPDATE. */
9170 tree
9171 gimple_convert_to_ptrofftype (gimple_stmt_iterator *gsi,
9172 bool before, gsi_iterator_update update,
9173 location_t loc, tree op)
9175 if (ptrofftype_p (TREE_TYPE (op)))
9176 return op;
9177 return gimple_convert (gsi, before, update, loc, sizetype, op);
9180 /* Build a vector of type TYPE in which each element has the value OP.
9181 Return a gimple value for the result, inserting any new statements
9182 at GSI honoring BEFORE and UPDATE. */
9184 tree
9185 gimple_build_vector_from_val (gimple_stmt_iterator *gsi,
9186 bool before, gsi_iterator_update update,
9187 location_t loc, tree type, tree op)
9189 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
9190 && !CONSTANT_CLASS_P (op))
9191 return gimple_build (gsi, before, update,
9192 loc, VEC_DUPLICATE_EXPR, type, op);
9194 tree res, vec = build_vector_from_val (type, op);
9195 if (is_gimple_val (vec))
9196 return vec;
9197 if (gimple_in_ssa_p (cfun))
9198 res = make_ssa_name (type);
9199 else
9200 res = create_tmp_reg (type);
9201 gimple_seq seq = NULL;
9202 gimple *stmt = gimple_build_assign (res, vec);
9203 gimple_set_location (stmt, loc);
9204 gimple_seq_add_stmt_without_update (&seq, stmt);
9205 gimple_build_insert_seq (gsi, before, update, seq);
9206 return res;
9209 /* Build a vector from BUILDER, handling the case in which some elements
9210 are non-constant. Return a gimple value for the result, inserting
9211 any new instructions to GSI honoring BEFORE and UPDATE.
9213 BUILDER must not have a stepped encoding on entry. This is because
9214 the function is not geared up to handle the arithmetic that would
9215 be needed in the variable case, and any code building a vector that
9216 is known to be constant should use BUILDER->build () directly. */
9218 tree
9219 gimple_build_vector (gimple_stmt_iterator *gsi,
9220 bool before, gsi_iterator_update update,
9221 location_t loc, tree_vector_builder *builder)
9223 gcc_assert (builder->nelts_per_pattern () <= 2);
9224 unsigned int encoded_nelts = builder->encoded_nelts ();
9225 for (unsigned int i = 0; i < encoded_nelts; ++i)
9226 if (!CONSTANT_CLASS_P ((*builder)[i]))
9228 gimple_seq seq = NULL;
9229 tree type = builder->type ();
9230 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
9231 vec<constructor_elt, va_gc> *v;
9232 vec_alloc (v, nelts);
9233 for (i = 0; i < nelts; ++i)
9234 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
9236 tree res;
9237 if (gimple_in_ssa_p (cfun))
9238 res = make_ssa_name (type);
9239 else
9240 res = create_tmp_reg (type);
9241 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
9242 gimple_set_location (stmt, loc);
9243 gimple_seq_add_stmt_without_update (&seq, stmt);
9244 gimple_build_insert_seq (gsi, before, update, seq);
9245 return res;
9247 return builder->build ();
9250 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
9251 and generate a value guaranteed to be rounded upwards to ALIGN.
9253 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
9255 tree
9256 gimple_build_round_up (gimple_stmt_iterator *gsi,
9257 bool before, gsi_iterator_update update,
9258 location_t loc, tree type,
9259 tree old_size, unsigned HOST_WIDE_INT align)
9261 unsigned HOST_WIDE_INT tg_mask = align - 1;
9262 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
9263 gcc_assert (INTEGRAL_TYPE_P (type));
9264 tree tree_mask = build_int_cst (type, tg_mask);
9265 tree oversize = gimple_build (gsi, before, update,
9266 loc, PLUS_EXPR, type, old_size, tree_mask);
9268 tree mask = build_int_cst (type, -align);
9269 return gimple_build (gsi, before, update,
9270 loc, BIT_AND_EXPR, type, oversize, mask);
9273 /* Return true if the result of assignment STMT is known to be non-negative.
9274 If the return value is based on the assumption that signed overflow is
9275 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9276 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9278 static bool
9279 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9280 int depth)
9282 enum tree_code code = gimple_assign_rhs_code (stmt);
9283 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
9284 switch (get_gimple_rhs_class (code))
9286 case GIMPLE_UNARY_RHS:
9287 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
9288 type,
9289 gimple_assign_rhs1 (stmt),
9290 strict_overflow_p, depth);
9291 case GIMPLE_BINARY_RHS:
9292 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
9293 type,
9294 gimple_assign_rhs1 (stmt),
9295 gimple_assign_rhs2 (stmt),
9296 strict_overflow_p, depth);
9297 case GIMPLE_TERNARY_RHS:
9298 return false;
9299 case GIMPLE_SINGLE_RHS:
9300 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
9301 strict_overflow_p, depth);
9302 case GIMPLE_INVALID_RHS:
9303 break;
9305 gcc_unreachable ();
9308 /* Return true if return value of call STMT is known to be non-negative.
9309 If the return value is based on the assumption that signed overflow is
9310 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9311 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9313 static bool
9314 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9315 int depth)
9317 tree arg0 = gimple_call_num_args (stmt) > 0 ?
9318 gimple_call_arg (stmt, 0) : NULL_TREE;
9319 tree arg1 = gimple_call_num_args (stmt) > 1 ?
9320 gimple_call_arg (stmt, 1) : NULL_TREE;
9321 tree lhs = gimple_call_lhs (stmt);
9322 return (lhs
9323 && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs),
9324 gimple_call_combined_fn (stmt),
9325 arg0, arg1,
9326 strict_overflow_p, depth));
9329 /* Return true if return value of call STMT is known to be non-negative.
9330 If the return value is based on the assumption that signed overflow is
9331 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9332 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9334 static bool
9335 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9336 int depth)
9338 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9340 tree arg = gimple_phi_arg_def (stmt, i);
9341 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
9342 return false;
9344 return true;
9347 /* Return true if STMT is known to compute a non-negative value.
9348 If the return value is based on the assumption that signed overflow is
9349 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9350 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9352 bool
9353 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9354 int depth)
9356 tree type = gimple_range_type (stmt);
9357 if (type && frange::supports_p (type))
9359 frange r;
9360 bool sign;
9361 if (get_global_range_query ()->range_of_stmt (r, stmt)
9362 && r.signbit_p (sign))
9363 return !sign;
9365 switch (gimple_code (stmt))
9367 case GIMPLE_ASSIGN:
9368 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
9369 depth);
9370 case GIMPLE_CALL:
9371 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
9372 depth);
9373 case GIMPLE_PHI:
9374 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
9375 depth);
9376 default:
9377 return false;
9381 /* Return true if the floating-point value computed by assignment STMT
9382 is known to have an integer value. We also allow +Inf, -Inf and NaN
9383 to be considered integer values. Return false for signaling NaN.
9385 DEPTH is the current nesting depth of the query. */
9387 static bool
9388 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
9390 enum tree_code code = gimple_assign_rhs_code (stmt);
9391 switch (get_gimple_rhs_class (code))
9393 case GIMPLE_UNARY_RHS:
9394 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
9395 gimple_assign_rhs1 (stmt), depth);
9396 case GIMPLE_BINARY_RHS:
9397 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
9398 gimple_assign_rhs1 (stmt),
9399 gimple_assign_rhs2 (stmt), depth);
9400 case GIMPLE_TERNARY_RHS:
9401 return false;
9402 case GIMPLE_SINGLE_RHS:
9403 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
9404 case GIMPLE_INVALID_RHS:
9405 break;
9407 gcc_unreachable ();
9410 /* Return true if the floating-point value computed by call STMT is known
9411 to have an integer value. We also allow +Inf, -Inf and NaN to be
9412 considered integer values. Return false for signaling NaN.
9414 DEPTH is the current nesting depth of the query. */
9416 static bool
9417 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
9419 tree arg0 = (gimple_call_num_args (stmt) > 0
9420 ? gimple_call_arg (stmt, 0)
9421 : NULL_TREE);
9422 tree arg1 = (gimple_call_num_args (stmt) > 1
9423 ? gimple_call_arg (stmt, 1)
9424 : NULL_TREE);
9425 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
9426 arg0, arg1, depth);
9429 /* Return true if the floating-point result of phi STMT is known to have
9430 an integer value. We also allow +Inf, -Inf and NaN to be considered
9431 integer values. Return false for signaling NaN.
9433 DEPTH is the current nesting depth of the query. */
9435 static bool
9436 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
9438 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9440 tree arg = gimple_phi_arg_def (stmt, i);
9441 if (!integer_valued_real_single_p (arg, depth + 1))
9442 return false;
9444 return true;
9447 /* Return true if the floating-point value computed by STMT is known
9448 to have an integer value. We also allow +Inf, -Inf and NaN to be
9449 considered integer values. Return false for signaling NaN.
9451 DEPTH is the current nesting depth of the query. */
9453 bool
9454 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
9456 switch (gimple_code (stmt))
9458 case GIMPLE_ASSIGN:
9459 return gimple_assign_integer_valued_real_p (stmt, depth);
9460 case GIMPLE_CALL:
9461 return gimple_call_integer_valued_real_p (stmt, depth);
9462 case GIMPLE_PHI:
9463 return gimple_phi_integer_valued_real_p (stmt, depth);
9464 default:
9465 return false;