ada: Fix internal error on instantiation with private component type
[official-gcc.git] / gcc / gimple-fold.cc
blobfd01810581ad48687b5e778a4ca758f1d492560b
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2023 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.cc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-access.h"
34 #include "gimple-ssa-warn-restrict.h"
35 #include "fold-const.h"
36 #include "stmt.h"
37 #include "expr.h"
38 #include "stor-layout.h"
39 #include "dumpfile.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
42 #include "gimplify.h"
43 #include "tree-into-ssa.h"
44 #include "tree-dfa.h"
45 #include "tree-object-size.h"
46 #include "tree-ssa.h"
47 #include "tree-ssa-propagate.h"
48 #include "ipa-utils.h"
49 #include "tree-ssa-address.h"
50 #include "langhooks.h"
51 #include "gimplify-me.h"
52 #include "dbgcnt.h"
53 #include "builtins.h"
54 #include "tree-eh.h"
55 #include "gimple-match.h"
56 #include "gomp-constants.h"
57 #include "optabs-query.h"
58 #include "omp-general.h"
59 #include "tree-cfg.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "asan.h"
64 #include "diagnostic-core.h"
65 #include "intl.h"
66 #include "calls.h"
67 #include "tree-vector-builder.h"
68 #include "tree-ssa-strlen.h"
69 #include "varasm.h"
70 #include "internal-fn.h"
71 #include "gimple-range.h"
73 enum strlen_range_kind {
74 /* Compute the exact constant string length. */
75 SRK_STRLEN,
76 /* Compute the maximum constant string length. */
77 SRK_STRLENMAX,
78 /* Compute a range of string lengths bounded by object sizes. When
79 the length of a string cannot be determined, consider as the upper
80 bound the size of the enclosing object the string may be a member
81 or element of. Also determine the size of the largest character
82 array the string may refer to. */
83 SRK_LENRANGE,
84 /* Determine the integer value of the argument (not string length). */
85 SRK_INT_VALUE
88 static bool
89 get_range_strlen (tree, bitmap, strlen_range_kind, c_strlen_data *, unsigned);
91 /* Return true when DECL can be referenced from current unit.
92 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
93 We can get declarations that are not possible to reference for various
94 reasons:
96 1) When analyzing C++ virtual tables.
97 C++ virtual tables do have known constructors even
98 when they are keyed to other compilation unit.
99 Those tables can contain pointers to methods and vars
100 in other units. Those methods have both STATIC and EXTERNAL
101 set.
102 2) In WHOPR mode devirtualization might lead to reference
103 to method that was partitioned elsehwere.
104 In this case we have static VAR_DECL or FUNCTION_DECL
105 that has no corresponding callgraph/varpool node
106 declaring the body.
107 3) COMDAT functions referred by external vtables that
108 we devirtualize only during final compilation stage.
109 At this time we already decided that we will not output
110 the function body and thus we can't reference the symbol
111 directly. */
113 static bool
114 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
116 varpool_node *vnode;
117 struct cgraph_node *node;
118 symtab_node *snode;
120 if (DECL_ABSTRACT_P (decl))
121 return false;
123 /* We are concerned only about static/external vars and functions. */
124 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
125 || !VAR_OR_FUNCTION_DECL_P (decl))
126 return true;
128 /* Static objects can be referred only if they are defined and not optimized
129 out yet. */
130 if (!TREE_PUBLIC (decl))
132 if (DECL_EXTERNAL (decl))
133 return false;
134 /* Before we start optimizing unreachable code we can be sure all
135 static objects are defined. */
136 if (symtab->function_flags_ready)
137 return true;
138 snode = symtab_node::get (decl);
139 if (!snode || !snode->definition)
140 return false;
141 node = dyn_cast <cgraph_node *> (snode);
142 return !node || !node->inlined_to;
145 /* We will later output the initializer, so we can refer to it.
146 So we are concerned only when DECL comes from initializer of
147 external var or var that has been optimized out. */
148 if (!from_decl
149 || !VAR_P (from_decl)
150 || (!DECL_EXTERNAL (from_decl)
151 && (vnode = varpool_node::get (from_decl)) != NULL
152 && vnode->definition)
153 || (flag_ltrans
154 && (vnode = varpool_node::get (from_decl)) != NULL
155 && vnode->in_other_partition))
156 return true;
157 /* We are folding reference from external vtable. The vtable may reffer
158 to a symbol keyed to other compilation unit. The other compilation
159 unit may be in separate DSO and the symbol may be hidden. */
160 if (DECL_VISIBILITY_SPECIFIED (decl)
161 && DECL_EXTERNAL (decl)
162 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
163 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
164 return false;
165 /* When function is public, we always can introduce new reference.
166 Exception are the COMDAT functions where introducing a direct
167 reference imply need to include function body in the curren tunit. */
168 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
169 return true;
170 /* We have COMDAT. We are going to check if we still have definition
171 or if the definition is going to be output in other partition.
172 Bypass this when gimplifying; all needed functions will be produced.
174 As observed in PR20991 for already optimized out comdat virtual functions
175 it may be tempting to not necessarily give up because the copy will be
176 output elsewhere when corresponding vtable is output.
177 This is however not possible - ABI specify that COMDATs are output in
178 units where they are used and when the other unit was compiled with LTO
179 it is possible that vtable was kept public while the function itself
180 was privatized. */
181 if (!symtab->function_flags_ready)
182 return true;
184 snode = symtab_node::get (decl);
185 if (!snode
186 || ((!snode->definition || DECL_EXTERNAL (decl))
187 && (!snode->in_other_partition
188 || (!snode->forced_by_abi && !snode->force_output))))
189 return false;
190 node = dyn_cast <cgraph_node *> (snode);
191 return !node || !node->inlined_to;
194 /* Create a temporary for TYPE for a statement STMT. If the current function
195 is in SSA form, a SSA name is created. Otherwise a temporary register
196 is made. */
198 tree
199 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
201 if (gimple_in_ssa_p (cfun))
202 return make_ssa_name (type, stmt);
203 else
204 return create_tmp_reg (type);
207 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
208 acceptable form for is_gimple_min_invariant.
209 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
211 tree
212 canonicalize_constructor_val (tree cval, tree from_decl)
214 if (CONSTANT_CLASS_P (cval))
215 return cval;
217 tree orig_cval = cval;
218 STRIP_NOPS (cval);
219 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
220 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
222 tree ptr = TREE_OPERAND (cval, 0);
223 if (is_gimple_min_invariant (ptr))
224 cval = build1_loc (EXPR_LOCATION (cval),
225 ADDR_EXPR, TREE_TYPE (ptr),
226 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
227 ptr,
228 fold_convert (ptr_type_node,
229 TREE_OPERAND (cval, 1))));
231 if (TREE_CODE (cval) == ADDR_EXPR)
233 tree base = NULL_TREE;
234 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
236 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
237 if (base)
238 TREE_OPERAND (cval, 0) = base;
240 else
241 base = get_base_address (TREE_OPERAND (cval, 0));
242 if (!base)
243 return NULL_TREE;
245 if (VAR_OR_FUNCTION_DECL_P (base)
246 && !can_refer_decl_in_current_unit_p (base, from_decl))
247 return NULL_TREE;
248 if (TREE_TYPE (base) == error_mark_node)
249 return NULL_TREE;
250 if (VAR_P (base))
251 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
252 but since the use can be in a debug stmt we can't. */
254 else if (TREE_CODE (base) == FUNCTION_DECL)
256 /* Make sure we create a cgraph node for functions we'll reference.
257 They can be non-existent if the reference comes from an entry
258 of an external vtable for example. */
259 cgraph_node::get_create (base);
261 /* Fixup types in global initializers. */
262 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
263 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
265 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
266 cval = fold_convert (TREE_TYPE (orig_cval), cval);
267 return cval;
269 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
270 if (TREE_CODE (cval) == INTEGER_CST)
272 if (TREE_OVERFLOW_P (cval))
273 cval = drop_tree_overflow (cval);
274 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
275 cval = fold_convert (TREE_TYPE (orig_cval), cval);
276 return cval;
278 return orig_cval;
281 /* If SYM is a constant variable with known value, return the value.
282 NULL_TREE is returned otherwise. */
284 tree
285 get_symbol_constant_value (tree sym)
287 tree val = ctor_for_folding (sym);
288 if (val != error_mark_node)
290 if (val)
292 val = canonicalize_constructor_val (unshare_expr (val), sym);
293 if (val
294 && is_gimple_min_invariant (val)
295 && useless_type_conversion_p (TREE_TYPE (sym), TREE_TYPE (val)))
296 return val;
297 else
298 return NULL_TREE;
300 /* Variables declared 'const' without an initializer
301 have zero as the initializer if they may not be
302 overridden at link or run time. */
303 if (!val
304 && is_gimple_reg_type (TREE_TYPE (sym)))
305 return build_zero_cst (TREE_TYPE (sym));
308 return NULL_TREE;
313 /* Subroutine of fold_stmt. We perform constant folding of the
314 memory reference tree EXPR. */
316 static tree
317 maybe_fold_reference (tree expr)
319 tree result = NULL_TREE;
321 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
322 || TREE_CODE (expr) == REALPART_EXPR
323 || TREE_CODE (expr) == IMAGPART_EXPR)
324 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
325 result = fold_unary_loc (EXPR_LOCATION (expr),
326 TREE_CODE (expr),
327 TREE_TYPE (expr),
328 TREE_OPERAND (expr, 0));
329 else if (TREE_CODE (expr) == BIT_FIELD_REF
330 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
331 result = fold_ternary_loc (EXPR_LOCATION (expr),
332 TREE_CODE (expr),
333 TREE_TYPE (expr),
334 TREE_OPERAND (expr, 0),
335 TREE_OPERAND (expr, 1),
336 TREE_OPERAND (expr, 2));
337 else
338 result = fold_const_aggregate_ref (expr);
340 if (result && is_gimple_min_invariant (result))
341 return result;
343 return NULL_TREE;
346 /* Return true if EXPR is an acceptable right-hand-side for a
347 GIMPLE assignment. We validate the entire tree, not just
348 the root node, thus catching expressions that embed complex
349 operands that are not permitted in GIMPLE. This function
350 is needed because the folding routines in fold-const.cc
351 may return such expressions in some cases, e.g., an array
352 access with an embedded index addition. It may make more
353 sense to have folding routines that are sensitive to the
354 constraints on GIMPLE operands, rather than abandoning any
355 any attempt to fold if the usual folding turns out to be too
356 aggressive. */
358 bool
359 valid_gimple_rhs_p (tree expr)
361 enum tree_code code = TREE_CODE (expr);
363 switch (TREE_CODE_CLASS (code))
365 case tcc_declaration:
366 if (!is_gimple_variable (expr))
367 return false;
368 break;
370 case tcc_constant:
371 /* All constants are ok. */
372 break;
374 case tcc_comparison:
375 /* GENERIC allows comparisons with non-boolean types, reject
376 those for GIMPLE. Let vector-typed comparisons pass - rules
377 for GENERIC and GIMPLE are the same here. */
378 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
379 && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
380 || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
381 && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
382 return false;
384 /* Fallthru. */
385 case tcc_binary:
386 if (!is_gimple_val (TREE_OPERAND (expr, 0))
387 || !is_gimple_val (TREE_OPERAND (expr, 1)))
388 return false;
389 break;
391 case tcc_unary:
392 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
393 return false;
394 break;
396 case tcc_expression:
397 switch (code)
399 case ADDR_EXPR:
401 tree t;
402 if (is_gimple_min_invariant (expr))
403 return true;
404 t = TREE_OPERAND (expr, 0);
405 while (handled_component_p (t))
407 /* ??? More checks needed, see the GIMPLE verifier. */
408 if ((TREE_CODE (t) == ARRAY_REF
409 || TREE_CODE (t) == ARRAY_RANGE_REF)
410 && !is_gimple_val (TREE_OPERAND (t, 1)))
411 return false;
412 t = TREE_OPERAND (t, 0);
414 if (!is_gimple_id (t))
415 return false;
417 break;
419 default:
420 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
422 if (!is_gimple_val (TREE_OPERAND (expr, 0))
423 || !is_gimple_val (TREE_OPERAND (expr, 1))
424 || !is_gimple_val (TREE_OPERAND (expr, 2)))
425 return false;
426 break;
428 return false;
430 break;
432 case tcc_vl_exp:
433 return false;
435 case tcc_exceptional:
436 if (code == CONSTRUCTOR)
438 unsigned i;
439 tree elt;
440 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
441 if (!is_gimple_val (elt))
442 return false;
443 return true;
445 if (code != SSA_NAME)
446 return false;
447 break;
449 case tcc_reference:
450 if (code == BIT_FIELD_REF)
451 return is_gimple_val (TREE_OPERAND (expr, 0));
452 return false;
454 default:
455 return false;
458 return true;
462 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
463 replacement rhs for the statement or NULL_TREE if no simplification
464 could be made. It is assumed that the operands have been previously
465 folded. */
467 static tree
468 fold_gimple_assign (gimple_stmt_iterator *si)
470 gimple *stmt = gsi_stmt (*si);
471 enum tree_code subcode = gimple_assign_rhs_code (stmt);
472 location_t loc = gimple_location (stmt);
474 tree result = NULL_TREE;
476 switch (get_gimple_rhs_class (subcode))
478 case GIMPLE_SINGLE_RHS:
480 tree rhs = gimple_assign_rhs1 (stmt);
482 if (TREE_CLOBBER_P (rhs))
483 return NULL_TREE;
485 if (REFERENCE_CLASS_P (rhs))
486 return maybe_fold_reference (rhs);
488 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
490 tree val = OBJ_TYPE_REF_EXPR (rhs);
491 if (is_gimple_min_invariant (val))
492 return val;
493 else if (flag_devirtualize && virtual_method_call_p (rhs))
495 bool final;
496 vec <cgraph_node *>targets
497 = possible_polymorphic_call_targets (rhs, stmt, &final);
498 if (final && targets.length () <= 1 && dbg_cnt (devirt))
500 if (dump_enabled_p ())
502 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
503 "resolving virtual function address "
504 "reference to function %s\n",
505 targets.length () == 1
506 ? targets[0]->name ()
507 : "NULL");
509 if (targets.length () == 1)
511 val = fold_convert (TREE_TYPE (val),
512 build_fold_addr_expr_loc
513 (loc, targets[0]->decl));
514 STRIP_USELESS_TYPE_CONVERSION (val);
516 else
517 /* We cannot use __builtin_unreachable here because it
518 cannot have address taken. */
519 val = build_int_cst (TREE_TYPE (val), 0);
520 return val;
525 else if (TREE_CODE (rhs) == ADDR_EXPR)
527 tree ref = TREE_OPERAND (rhs, 0);
528 if (TREE_CODE (ref) == MEM_REF
529 && integer_zerop (TREE_OPERAND (ref, 1)))
531 result = TREE_OPERAND (ref, 0);
532 if (!useless_type_conversion_p (TREE_TYPE (rhs),
533 TREE_TYPE (result)))
534 result = build1 (NOP_EXPR, TREE_TYPE (rhs), result);
535 return result;
539 else if (TREE_CODE (rhs) == CONSTRUCTOR
540 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
542 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
543 unsigned i;
544 tree val;
546 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
547 if (! CONSTANT_CLASS_P (val))
548 return NULL_TREE;
550 return build_vector_from_ctor (TREE_TYPE (rhs),
551 CONSTRUCTOR_ELTS (rhs));
554 else if (DECL_P (rhs)
555 && is_gimple_reg_type (TREE_TYPE (rhs)))
556 return get_symbol_constant_value (rhs);
558 break;
560 case GIMPLE_UNARY_RHS:
561 break;
563 case GIMPLE_BINARY_RHS:
564 break;
566 case GIMPLE_TERNARY_RHS:
567 result = fold_ternary_loc (loc, subcode,
568 TREE_TYPE (gimple_assign_lhs (stmt)),
569 gimple_assign_rhs1 (stmt),
570 gimple_assign_rhs2 (stmt),
571 gimple_assign_rhs3 (stmt));
573 if (result)
575 STRIP_USELESS_TYPE_CONVERSION (result);
576 if (valid_gimple_rhs_p (result))
577 return result;
579 break;
581 case GIMPLE_INVALID_RHS:
582 gcc_unreachable ();
585 return NULL_TREE;
589 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
590 adjusting the replacement stmts location and virtual operands.
591 If the statement has a lhs the last stmt in the sequence is expected
592 to assign to that lhs. */
594 void
595 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
597 gimple *stmt = gsi_stmt (*si_p);
599 if (gimple_has_location (stmt))
600 annotate_all_with_location (stmts, gimple_location (stmt));
602 /* First iterate over the replacement statements backward, assigning
603 virtual operands to their defining statements. */
604 gimple *laststore = NULL;
605 for (gimple_stmt_iterator i = gsi_last (stmts);
606 !gsi_end_p (i); gsi_prev (&i))
608 gimple *new_stmt = gsi_stmt (i);
609 if ((gimple_assign_single_p (new_stmt)
610 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
611 || (is_gimple_call (new_stmt)
612 && (gimple_call_flags (new_stmt)
613 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
615 tree vdef;
616 if (!laststore)
617 vdef = gimple_vdef (stmt);
618 else
619 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
620 gimple_set_vdef (new_stmt, vdef);
621 if (vdef && TREE_CODE (vdef) == SSA_NAME)
622 SSA_NAME_DEF_STMT (vdef) = new_stmt;
623 laststore = new_stmt;
627 /* Second iterate over the statements forward, assigning virtual
628 operands to their uses. */
629 tree reaching_vuse = gimple_vuse (stmt);
630 for (gimple_stmt_iterator i = gsi_start (stmts);
631 !gsi_end_p (i); gsi_next (&i))
633 gimple *new_stmt = gsi_stmt (i);
634 /* If the new statement possibly has a VUSE, update it with exact SSA
635 name we know will reach this one. */
636 if (gimple_has_mem_ops (new_stmt))
637 gimple_set_vuse (new_stmt, reaching_vuse);
638 gimple_set_modified (new_stmt, true);
639 if (gimple_vdef (new_stmt))
640 reaching_vuse = gimple_vdef (new_stmt);
643 /* If the new sequence does not do a store release the virtual
644 definition of the original statement. */
645 if (reaching_vuse
646 && reaching_vuse == gimple_vuse (stmt))
648 tree vdef = gimple_vdef (stmt);
649 if (vdef
650 && TREE_CODE (vdef) == SSA_NAME)
652 unlink_stmt_vdef (stmt);
653 release_ssa_name (vdef);
657 /* Finally replace the original statement with the sequence. */
658 gsi_replace_with_seq (si_p, stmts, false);
661 /* Helper function for update_gimple_call and
662 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
663 with GIMPLE_CALL NEW_STMT. */
665 static void
666 finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
667 gimple *stmt)
669 tree lhs = gimple_call_lhs (stmt);
670 gimple_call_set_lhs (new_stmt, lhs);
671 if (lhs && TREE_CODE (lhs) == SSA_NAME)
672 SSA_NAME_DEF_STMT (lhs) = new_stmt;
673 gimple_move_vops (new_stmt, stmt);
674 gimple_set_location (new_stmt, gimple_location (stmt));
675 if (gimple_block (new_stmt) == NULL_TREE)
676 gimple_set_block (new_stmt, gimple_block (stmt));
677 gsi_replace (si_p, new_stmt, false);
680 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
681 with number of arguments NARGS, where the arguments in GIMPLE form
682 follow NARGS argument. */
684 bool
685 update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
687 va_list ap;
688 gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
690 gcc_assert (is_gimple_call (stmt));
691 va_start (ap, nargs);
692 new_stmt = gimple_build_call_valist (fn, nargs, ap);
693 finish_update_gimple_call (si_p, new_stmt, stmt);
694 va_end (ap);
695 return true;
698 /* Return true if EXPR is a CALL_EXPR suitable for representation
699 as a single GIMPLE_CALL statement. If the arguments require
700 further gimplification, return false. */
702 static bool
703 valid_gimple_call_p (tree expr)
705 unsigned i, nargs;
707 if (TREE_CODE (expr) != CALL_EXPR)
708 return false;
710 nargs = call_expr_nargs (expr);
711 for (i = 0; i < nargs; i++)
713 tree arg = CALL_EXPR_ARG (expr, i);
714 if (is_gimple_reg_type (TREE_TYPE (arg)))
716 if (!is_gimple_val (arg))
717 return false;
719 else
720 if (!is_gimple_lvalue (arg))
721 return false;
724 return true;
727 /* Convert EXPR into a GIMPLE value suitable for substitution on the
728 RHS of an assignment. Insert the necessary statements before
729 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
730 is replaced. If the call is expected to produces a result, then it
731 is replaced by an assignment of the new RHS to the result variable.
732 If the result is to be ignored, then the call is replaced by a
733 GIMPLE_NOP. A proper VDEF chain is retained by making the first
734 VUSE and the last VDEF of the whole sequence be the same as the replaced
735 statement and using new SSA names for stores in between. */
737 void
738 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
740 tree lhs;
741 gimple *stmt, *new_stmt;
742 gimple_stmt_iterator i;
743 gimple_seq stmts = NULL;
745 stmt = gsi_stmt (*si_p);
747 gcc_assert (is_gimple_call (stmt));
749 if (valid_gimple_call_p (expr))
751 /* The call has simplified to another call. */
752 tree fn = CALL_EXPR_FN (expr);
753 unsigned i;
754 unsigned nargs = call_expr_nargs (expr);
755 vec<tree> args = vNULL;
756 gcall *new_stmt;
758 if (nargs > 0)
760 args.create (nargs);
761 args.safe_grow_cleared (nargs, true);
763 for (i = 0; i < nargs; i++)
764 args[i] = CALL_EXPR_ARG (expr, i);
767 new_stmt = gimple_build_call_vec (fn, args);
768 finish_update_gimple_call (si_p, new_stmt, stmt);
769 args.release ();
770 return;
773 lhs = gimple_call_lhs (stmt);
774 if (lhs == NULL_TREE)
776 push_gimplify_context (gimple_in_ssa_p (cfun));
777 gimplify_and_add (expr, &stmts);
778 pop_gimplify_context (NULL);
780 /* We can end up with folding a memcpy of an empty class assignment
781 which gets optimized away by C++ gimplification. */
782 if (gimple_seq_empty_p (stmts))
784 if (gimple_in_ssa_p (cfun))
786 unlink_stmt_vdef (stmt);
787 release_defs (stmt);
789 gsi_replace (si_p, gimple_build_nop (), false);
790 return;
793 else
795 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
796 new_stmt = gimple_build_assign (lhs, tmp);
797 i = gsi_last (stmts);
798 gsi_insert_after_without_update (&i, new_stmt,
799 GSI_CONTINUE_LINKING);
802 gsi_replace_with_seq_vops (si_p, stmts);
806 /* Replace the call at *GSI with the gimple value VAL. */
808 void
809 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
811 gimple *stmt = gsi_stmt (*gsi);
812 tree lhs = gimple_call_lhs (stmt);
813 gimple *repl;
814 if (lhs)
816 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
817 val = fold_convert (TREE_TYPE (lhs), val);
818 repl = gimple_build_assign (lhs, val);
820 else
821 repl = gimple_build_nop ();
822 tree vdef = gimple_vdef (stmt);
823 if (vdef && TREE_CODE (vdef) == SSA_NAME)
825 unlink_stmt_vdef (stmt);
826 release_ssa_name (vdef);
828 gsi_replace (gsi, repl, false);
831 /* Replace the call at *GSI with the new call REPL and fold that
832 again. */
834 static void
835 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
837 gimple *stmt = gsi_stmt (*gsi);
838 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
839 gimple_set_location (repl, gimple_location (stmt));
840 gimple_move_vops (repl, stmt);
841 gsi_replace (gsi, repl, false);
842 fold_stmt (gsi);
845 /* Return true if VAR is a VAR_DECL or a component thereof. */
847 static bool
848 var_decl_component_p (tree var)
850 tree inner = var;
851 while (handled_component_p (inner))
852 inner = TREE_OPERAND (inner, 0);
853 return (DECL_P (inner)
854 || (TREE_CODE (inner) == MEM_REF
855 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
858 /* Return TRUE if the SIZE argument, representing the size of an
859 object, is in a range of values of which exactly zero is valid. */
861 static bool
862 size_must_be_zero_p (tree size)
864 if (integer_zerop (size))
865 return true;
867 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
868 return false;
870 tree type = TREE_TYPE (size);
871 int prec = TYPE_PRECISION (type);
873 /* Compute the value of SSIZE_MAX, the largest positive value that
874 can be stored in ssize_t, the signed counterpart of size_t. */
875 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
876 wide_int zero = wi::zero (TYPE_PRECISION (type));
877 value_range valid_range (type, zero, ssize_max);
878 value_range vr;
879 if (cfun)
880 get_range_query (cfun)->range_of_expr (vr, size);
881 else
882 get_global_range_query ()->range_of_expr (vr, size);
883 if (vr.undefined_p ())
884 vr.set_varying (TREE_TYPE (size));
885 vr.intersect (valid_range);
886 return vr.zero_p ();
889 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
890 diagnose (otherwise undefined) overlapping copies without preventing
891 folding. When folded, GCC guarantees that overlapping memcpy has
892 the same semantics as memmove. Call to the library memcpy need not
893 provide the same guarantee. Return false if no simplification can
894 be made. */
896 static bool
897 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
898 tree dest, tree src, enum built_in_function code)
900 gimple *stmt = gsi_stmt (*gsi);
901 tree lhs = gimple_call_lhs (stmt);
902 tree len = gimple_call_arg (stmt, 2);
903 location_t loc = gimple_location (stmt);
905 /* If the LEN parameter is a constant zero or in range where
906 the only valid value is zero, return DEST. */
907 if (size_must_be_zero_p (len))
909 gimple *repl;
910 if (gimple_call_lhs (stmt))
911 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
912 else
913 repl = gimple_build_nop ();
914 tree vdef = gimple_vdef (stmt);
915 if (vdef && TREE_CODE (vdef) == SSA_NAME)
917 unlink_stmt_vdef (stmt);
918 release_ssa_name (vdef);
920 gsi_replace (gsi, repl, false);
921 return true;
924 /* If SRC and DEST are the same (and not volatile), return
925 DEST{,+LEN,+LEN-1}. */
926 if (operand_equal_p (src, dest, 0))
928 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
929 It's safe and may even be emitted by GCC itself (see bug
930 32667). */
931 unlink_stmt_vdef (stmt);
932 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
933 release_ssa_name (gimple_vdef (stmt));
934 if (!lhs)
936 gsi_replace (gsi, gimple_build_nop (), false);
937 return true;
939 goto done;
941 else
943 /* We cannot (easily) change the type of the copy if it is a storage
944 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
945 modify the storage order of objects (see storage_order_barrier_p). */
946 tree srctype
947 = POINTER_TYPE_P (TREE_TYPE (src))
948 ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
949 tree desttype
950 = POINTER_TYPE_P (TREE_TYPE (dest))
951 ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
952 tree destvar, srcvar, srcoff;
953 unsigned int src_align, dest_align;
954 unsigned HOST_WIDE_INT tmp_len;
955 const char *tmp_str;
957 /* Build accesses at offset zero with a ref-all character type. */
958 tree off0
959 = build_int_cst (build_pointer_type_for_mode (char_type_node,
960 ptr_mode, true), 0);
962 /* If we can perform the copy efficiently with first doing all loads
963 and then all stores inline it that way. Currently efficiently
964 means that we can load all the memory into a single integer
965 register which is what MOVE_MAX gives us. */
966 src_align = get_pointer_alignment (src);
967 dest_align = get_pointer_alignment (dest);
968 if (tree_fits_uhwi_p (len)
969 && compare_tree_int (len, MOVE_MAX) <= 0
970 /* FIXME: Don't transform copies from strings with known length.
971 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
972 from being handled, and the case was XFAILed for that reason.
973 Now that it is handled and the XFAIL removed, as soon as other
974 strlenopt tests that rely on it for passing are adjusted, this
975 hack can be removed. */
976 && !c_strlen (src, 1)
977 && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
978 && memchr (tmp_str, 0, tmp_len) == NULL)
979 && !(srctype
980 && AGGREGATE_TYPE_P (srctype)
981 && TYPE_REVERSE_STORAGE_ORDER (srctype))
982 && !(desttype
983 && AGGREGATE_TYPE_P (desttype)
984 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
986 unsigned ilen = tree_to_uhwi (len);
987 if (pow2p_hwi (ilen))
989 /* Detect out-of-bounds accesses without issuing warnings.
990 Avoid folding out-of-bounds copies but to avoid false
991 positives for unreachable code defer warning until after
992 DCE has worked its magic.
993 -Wrestrict is still diagnosed. */
994 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
995 dest, src, len, len,
996 false, false))
997 if (warning != OPT_Wrestrict)
998 return false;
1000 scalar_int_mode mode;
1001 if (int_mode_for_size (ilen * 8, 0).exists (&mode)
1002 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
1003 /* If the destination pointer is not aligned we must be able
1004 to emit an unaligned store. */
1005 && (dest_align >= GET_MODE_ALIGNMENT (mode)
1006 || !targetm.slow_unaligned_access (mode, dest_align)
1007 || (optab_handler (movmisalign_optab, mode)
1008 != CODE_FOR_nothing)))
1010 tree type = build_nonstandard_integer_type (ilen * 8, 1);
1011 tree srctype = type;
1012 tree desttype = type;
1013 if (src_align < GET_MODE_ALIGNMENT (mode))
1014 srctype = build_aligned_type (type, src_align);
1015 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
1016 tree tem = fold_const_aggregate_ref (srcmem);
1017 if (tem)
1018 srcmem = tem;
1019 else if (src_align < GET_MODE_ALIGNMENT (mode)
1020 && targetm.slow_unaligned_access (mode, src_align)
1021 && (optab_handler (movmisalign_optab, mode)
1022 == CODE_FOR_nothing))
1023 srcmem = NULL_TREE;
1024 if (srcmem)
1026 gimple *new_stmt;
1027 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
1029 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
1030 srcmem
1031 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
1032 new_stmt);
1033 gimple_assign_set_lhs (new_stmt, srcmem);
1034 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1035 gimple_set_location (new_stmt, loc);
1036 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1038 if (dest_align < GET_MODE_ALIGNMENT (mode))
1039 desttype = build_aligned_type (type, dest_align);
1040 new_stmt
1041 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
1042 dest, off0),
1043 srcmem);
1044 gimple_move_vops (new_stmt, stmt);
1045 if (!lhs)
1047 gsi_replace (gsi, new_stmt, false);
1048 return true;
1050 gimple_set_location (new_stmt, loc);
1051 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1052 goto done;
1058 if (code == BUILT_IN_MEMMOVE)
1060 /* Both DEST and SRC must be pointer types.
1061 ??? This is what old code did. Is the testing for pointer types
1062 really mandatory?
1064 If either SRC is readonly or length is 1, we can use memcpy. */
1065 if (!dest_align || !src_align)
1066 return false;
1067 if (readonly_data_expr (src)
1068 || (tree_fits_uhwi_p (len)
1069 && (MIN (src_align, dest_align) / BITS_PER_UNIT
1070 >= tree_to_uhwi (len))))
1072 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1073 if (!fn)
1074 return false;
1075 gimple_call_set_fndecl (stmt, fn);
1076 gimple_call_set_arg (stmt, 0, dest);
1077 gimple_call_set_arg (stmt, 1, src);
1078 fold_stmt (gsi);
1079 return true;
1082 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1083 if (TREE_CODE (src) == ADDR_EXPR
1084 && TREE_CODE (dest) == ADDR_EXPR)
1086 tree src_base, dest_base, fn;
1087 poly_int64 src_offset = 0, dest_offset = 0;
1088 poly_uint64 maxsize;
1090 srcvar = TREE_OPERAND (src, 0);
1091 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
1092 if (src_base == NULL)
1093 src_base = srcvar;
1094 destvar = TREE_OPERAND (dest, 0);
1095 dest_base = get_addr_base_and_unit_offset (destvar,
1096 &dest_offset);
1097 if (dest_base == NULL)
1098 dest_base = destvar;
1099 if (!poly_int_tree_p (len, &maxsize))
1100 maxsize = -1;
1101 if (SSA_VAR_P (src_base)
1102 && SSA_VAR_P (dest_base))
1104 if (operand_equal_p (src_base, dest_base, 0)
1105 && ranges_maybe_overlap_p (src_offset, maxsize,
1106 dest_offset, maxsize))
1107 return false;
1109 else if (TREE_CODE (src_base) == MEM_REF
1110 && TREE_CODE (dest_base) == MEM_REF)
1112 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
1113 TREE_OPERAND (dest_base, 0), 0))
1114 return false;
1115 poly_offset_int full_src_offset
1116 = mem_ref_offset (src_base) + src_offset;
1117 poly_offset_int full_dest_offset
1118 = mem_ref_offset (dest_base) + dest_offset;
1119 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
1120 full_dest_offset, maxsize))
1121 return false;
1123 else
1124 return false;
1126 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1127 if (!fn)
1128 return false;
1129 gimple_call_set_fndecl (stmt, fn);
1130 gimple_call_set_arg (stmt, 0, dest);
1131 gimple_call_set_arg (stmt, 1, src);
1132 fold_stmt (gsi);
1133 return true;
1136 /* If the destination and source do not alias optimize into
1137 memcpy as well. */
1138 if ((is_gimple_min_invariant (dest)
1139 || TREE_CODE (dest) == SSA_NAME)
1140 && (is_gimple_min_invariant (src)
1141 || TREE_CODE (src) == SSA_NAME))
1143 ao_ref destr, srcr;
1144 ao_ref_init_from_ptr_and_size (&destr, dest, len);
1145 ao_ref_init_from_ptr_and_size (&srcr, src, len);
1146 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1148 tree fn;
1149 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1150 if (!fn)
1151 return false;
1152 gimple_call_set_fndecl (stmt, fn);
1153 gimple_call_set_arg (stmt, 0, dest);
1154 gimple_call_set_arg (stmt, 1, src);
1155 fold_stmt (gsi);
1156 return true;
1160 return false;
1163 if (!tree_fits_shwi_p (len))
1164 return false;
1165 if (!srctype
1166 || (AGGREGATE_TYPE_P (srctype)
1167 && TYPE_REVERSE_STORAGE_ORDER (srctype)))
1168 return false;
1169 if (!desttype
1170 || (AGGREGATE_TYPE_P (desttype)
1171 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
1172 return false;
1173 /* In the following try to find a type that is most natural to be
1174 used for the memcpy source and destination and that allows
1175 the most optimization when memcpy is turned into a plain assignment
1176 using that type. In theory we could always use a char[len] type
1177 but that only gains us that the destination and source possibly
1178 no longer will have their address taken. */
1179 if (TREE_CODE (srctype) == ARRAY_TYPE
1180 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1181 srctype = TREE_TYPE (srctype);
1182 if (TREE_CODE (desttype) == ARRAY_TYPE
1183 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1184 desttype = TREE_TYPE (desttype);
1185 if (TREE_ADDRESSABLE (srctype)
1186 || TREE_ADDRESSABLE (desttype))
1187 return false;
1189 /* Make sure we are not copying using a floating-point mode or
1190 a type whose size possibly does not match its precision. */
1191 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1192 || TREE_CODE (desttype) == BOOLEAN_TYPE
1193 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1194 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1195 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1196 || TREE_CODE (srctype) == BOOLEAN_TYPE
1197 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1198 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1199 if (!srctype)
1200 srctype = desttype;
1201 if (!desttype)
1202 desttype = srctype;
1203 if (!srctype)
1204 return false;
1206 src_align = get_pointer_alignment (src);
1207 dest_align = get_pointer_alignment (dest);
1209 /* Choose between src and destination type for the access based
1210 on alignment, whether the access constitutes a register access
1211 and whether it may actually expose a declaration for SSA rewrite
1212 or SRA decomposition. Also try to expose a string constant, we
1213 might be able to concatenate several of them later into a single
1214 string store. */
1215 destvar = NULL_TREE;
1216 srcvar = NULL_TREE;
1217 if (TREE_CODE (dest) == ADDR_EXPR
1218 && var_decl_component_p (TREE_OPERAND (dest, 0))
1219 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1220 && dest_align >= TYPE_ALIGN (desttype)
1221 && (is_gimple_reg_type (desttype)
1222 || src_align >= TYPE_ALIGN (desttype)))
1223 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1224 else if (TREE_CODE (src) == ADDR_EXPR
1225 && var_decl_component_p (TREE_OPERAND (src, 0))
1226 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1227 && src_align >= TYPE_ALIGN (srctype)
1228 && (is_gimple_reg_type (srctype)
1229 || dest_align >= TYPE_ALIGN (srctype)))
1230 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1231 /* FIXME: Don't transform copies from strings with known original length.
1232 As soon as strlenopt tests that rely on it for passing are adjusted,
1233 this hack can be removed. */
1234 else if (gimple_call_alloca_for_var_p (stmt)
1235 && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1236 && integer_zerop (srcoff)
1237 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1238 && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1239 srctype = TREE_TYPE (srcvar);
1240 else
1241 return false;
1243 /* Now that we chose an access type express the other side in
1244 terms of it if the target allows that with respect to alignment
1245 constraints. */
1246 if (srcvar == NULL_TREE)
1248 if (src_align >= TYPE_ALIGN (desttype))
1249 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1250 else
1252 enum machine_mode mode = TYPE_MODE (desttype);
1253 if ((mode == BLKmode && STRICT_ALIGNMENT)
1254 || (targetm.slow_unaligned_access (mode, src_align)
1255 && (optab_handler (movmisalign_optab, mode)
1256 == CODE_FOR_nothing)))
1257 return false;
1258 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1259 src_align);
1260 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1263 else if (destvar == NULL_TREE)
1265 if (dest_align >= TYPE_ALIGN (srctype))
1266 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1267 else
1269 enum machine_mode mode = TYPE_MODE (srctype);
1270 if ((mode == BLKmode && STRICT_ALIGNMENT)
1271 || (targetm.slow_unaligned_access (mode, dest_align)
1272 && (optab_handler (movmisalign_optab, mode)
1273 == CODE_FOR_nothing)))
1274 return false;
1275 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1276 dest_align);
1277 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1281 /* Same as above, detect out-of-bounds accesses without issuing
1282 warnings. Avoid folding out-of-bounds copies but to avoid
1283 false positives for unreachable code defer warning until
1284 after DCE has worked its magic.
1285 -Wrestrict is still diagnosed. */
1286 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1287 dest, src, len, len,
1288 false, false))
1289 if (warning != OPT_Wrestrict)
1290 return false;
1292 gimple *new_stmt;
1293 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1295 tree tem = fold_const_aggregate_ref (srcvar);
1296 if (tem)
1297 srcvar = tem;
1298 if (! is_gimple_min_invariant (srcvar))
1300 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1301 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1302 new_stmt);
1303 gimple_assign_set_lhs (new_stmt, srcvar);
1304 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1305 gimple_set_location (new_stmt, loc);
1306 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1308 new_stmt = gimple_build_assign (destvar, srcvar);
1309 goto set_vop_and_replace;
1312 /* We get an aggregate copy. If the source is a STRING_CST, then
1313 directly use its type to perform the copy. */
1314 if (TREE_CODE (srcvar) == STRING_CST)
1315 desttype = srctype;
1317 /* Or else, use an unsigned char[] type to perform the copy in order
1318 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1319 types or float modes behavior on copying. */
1320 else
1322 desttype = build_array_type_nelts (unsigned_char_type_node,
1323 tree_to_uhwi (len));
1324 srctype = desttype;
1325 if (src_align > TYPE_ALIGN (srctype))
1326 srctype = build_aligned_type (srctype, src_align);
1327 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1330 if (dest_align > TYPE_ALIGN (desttype))
1331 desttype = build_aligned_type (desttype, dest_align);
1332 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1333 new_stmt = gimple_build_assign (destvar, srcvar);
1335 set_vop_and_replace:
1336 gimple_move_vops (new_stmt, stmt);
1337 if (!lhs)
1339 gsi_replace (gsi, new_stmt, false);
1340 return true;
1342 gimple_set_location (new_stmt, loc);
1343 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1346 done:
1347 gimple_seq stmts = NULL;
1348 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1349 len = NULL_TREE;
1350 else if (code == BUILT_IN_MEMPCPY)
1352 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1353 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1354 TREE_TYPE (dest), dest, len);
1356 else
1357 gcc_unreachable ();
1359 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1360 gimple *repl = gimple_build_assign (lhs, dest);
1361 gsi_replace (gsi, repl, false);
1362 return true;
1365 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1366 to built-in memcmp (a, b, len). */
1368 static bool
1369 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1371 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1373 if (!fn)
1374 return false;
1376 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1378 gimple *stmt = gsi_stmt (*gsi);
1379 tree a = gimple_call_arg (stmt, 0);
1380 tree b = gimple_call_arg (stmt, 1);
1381 tree len = gimple_call_arg (stmt, 2);
1383 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1384 replace_call_with_call_and_fold (gsi, repl);
1386 return true;
1389 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1390 to built-in memmove (dest, src, len). */
1392 static bool
1393 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1395 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1397 if (!fn)
1398 return false;
1400 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1401 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1402 len) into memmove (dest, src, len). */
1404 gimple *stmt = gsi_stmt (*gsi);
1405 tree src = gimple_call_arg (stmt, 0);
1406 tree dest = gimple_call_arg (stmt, 1);
1407 tree len = gimple_call_arg (stmt, 2);
1409 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1410 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1411 replace_call_with_call_and_fold (gsi, repl);
1413 return true;
1416 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1417 to built-in memset (dest, 0, len). */
1419 static bool
1420 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1422 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1424 if (!fn)
1425 return false;
1427 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1429 gimple *stmt = gsi_stmt (*gsi);
1430 tree dest = gimple_call_arg (stmt, 0);
1431 tree len = gimple_call_arg (stmt, 1);
1433 gimple_seq seq = NULL;
1434 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1435 gimple_seq_add_stmt_without_update (&seq, repl);
1436 gsi_replace_with_seq_vops (gsi, seq);
1437 fold_stmt (gsi);
1439 return true;
1442 /* Fold function call to builtin memset or bzero at *GSI setting the
1443 memory of size LEN to VAL. Return whether a simplification was made. */
1445 static bool
1446 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1448 gimple *stmt = gsi_stmt (*gsi);
1449 tree etype;
1450 unsigned HOST_WIDE_INT length, cval;
1452 /* If the LEN parameter is zero, return DEST. */
1453 if (integer_zerop (len))
1455 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1456 return true;
1459 if (! tree_fits_uhwi_p (len))
1460 return false;
1462 if (TREE_CODE (c) != INTEGER_CST)
1463 return false;
1465 tree dest = gimple_call_arg (stmt, 0);
1466 tree var = dest;
1467 if (TREE_CODE (var) != ADDR_EXPR)
1468 return false;
1470 var = TREE_OPERAND (var, 0);
1471 if (TREE_THIS_VOLATILE (var))
1472 return false;
1474 etype = TREE_TYPE (var);
1475 if (TREE_CODE (etype) == ARRAY_TYPE)
1476 etype = TREE_TYPE (etype);
1478 if (!INTEGRAL_TYPE_P (etype)
1479 && !POINTER_TYPE_P (etype))
1480 return NULL_TREE;
1482 if (! var_decl_component_p (var))
1483 return NULL_TREE;
1485 length = tree_to_uhwi (len);
1486 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1487 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1488 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
1489 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1490 return NULL_TREE;
1492 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1493 return NULL_TREE;
1495 if (!type_has_mode_precision_p (etype))
1496 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1497 TYPE_UNSIGNED (etype));
1499 if (integer_zerop (c))
1500 cval = 0;
1501 else
1503 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1504 return NULL_TREE;
1506 cval = TREE_INT_CST_LOW (c);
1507 cval &= 0xff;
1508 cval |= cval << 8;
1509 cval |= cval << 16;
1510 cval |= (cval << 31) << 1;
1513 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1514 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1515 gimple_move_vops (store, stmt);
1516 gimple_set_location (store, gimple_location (stmt));
1517 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1518 if (gimple_call_lhs (stmt))
1520 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1521 gsi_replace (gsi, asgn, false);
1523 else
1525 gimple_stmt_iterator gsi2 = *gsi;
1526 gsi_prev (gsi);
1527 gsi_remove (&gsi2, true);
1530 return true;
1533 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1535 static bool
1536 get_range_strlen_tree (tree arg, bitmap visited, strlen_range_kind rkind,
1537 c_strlen_data *pdata, unsigned eltsize)
1539 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1541 /* The length computed by this invocation of the function. */
1542 tree val = NULL_TREE;
1544 /* True if VAL is an optimistic (tight) bound determined from
1545 the size of the character array in which the string may be
1546 stored. In that case, the computed VAL is used to set
1547 PDATA->MAXBOUND. */
1548 bool tight_bound = false;
1550 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1551 if (TREE_CODE (arg) == ADDR_EXPR
1552 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1554 tree op = TREE_OPERAND (arg, 0);
1555 if (integer_zerop (TREE_OPERAND (op, 1)))
1557 tree aop0 = TREE_OPERAND (op, 0);
1558 if (TREE_CODE (aop0) == INDIRECT_REF
1559 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1560 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1561 pdata, eltsize);
1563 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1564 && rkind == SRK_LENRANGE)
1566 /* Fail if an array is the last member of a struct object
1567 since it could be treated as a (fake) flexible array
1568 member. */
1569 tree idx = TREE_OPERAND (op, 1);
1571 arg = TREE_OPERAND (op, 0);
1572 tree optype = TREE_TYPE (arg);
1573 if (tree dom = TYPE_DOMAIN (optype))
1574 if (tree bound = TYPE_MAX_VALUE (dom))
1575 if (TREE_CODE (bound) == INTEGER_CST
1576 && TREE_CODE (idx) == INTEGER_CST
1577 && tree_int_cst_lt (bound, idx))
1578 return false;
1582 if (rkind == SRK_INT_VALUE)
1584 /* We are computing the maximum value (not string length). */
1585 val = arg;
1586 if (TREE_CODE (val) != INTEGER_CST
1587 || tree_int_cst_sgn (val) < 0)
1588 return false;
1590 else
1592 c_strlen_data lendata = { };
1593 val = c_strlen (arg, 1, &lendata, eltsize);
1595 if (!val && lendata.decl)
1597 /* ARG refers to an unterminated const character array.
1598 DATA.DECL with size DATA.LEN. */
1599 val = lendata.minlen;
1600 pdata->decl = lendata.decl;
1604 /* Set if VAL represents the maximum length based on array size (set
1605 when exact length cannot be determined). */
1606 bool maxbound = false;
1608 if (!val && rkind == SRK_LENRANGE)
1610 if (TREE_CODE (arg) == ADDR_EXPR)
1611 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1612 pdata, eltsize);
1614 if (TREE_CODE (arg) == ARRAY_REF)
1616 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1618 /* Determine the "innermost" array type. */
1619 while (TREE_CODE (optype) == ARRAY_TYPE
1620 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1621 optype = TREE_TYPE (optype);
1623 /* Avoid arrays of pointers. */
1624 tree eltype = TREE_TYPE (optype);
1625 if (TREE_CODE (optype) != ARRAY_TYPE
1626 || !INTEGRAL_TYPE_P (eltype))
1627 return false;
1629 /* Fail when the array bound is unknown or zero. */
1630 val = TYPE_SIZE_UNIT (optype);
1631 if (!val
1632 || TREE_CODE (val) != INTEGER_CST
1633 || integer_zerop (val))
1634 return false;
1636 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1637 integer_one_node);
1639 /* Set the minimum size to zero since the string in
1640 the array could have zero length. */
1641 pdata->minlen = ssize_int (0);
1643 tight_bound = true;
1645 else if (TREE_CODE (arg) == COMPONENT_REF
1646 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1647 == ARRAY_TYPE))
1649 /* Use the type of the member array to determine the upper
1650 bound on the length of the array. This may be overly
1651 optimistic if the array itself isn't NUL-terminated and
1652 the caller relies on the subsequent member to contain
1653 the NUL but that would only be considered valid if
1654 the array were the last member of a struct. */
1656 tree fld = TREE_OPERAND (arg, 1);
1658 tree optype = TREE_TYPE (fld);
1660 /* Determine the "innermost" array type. */
1661 while (TREE_CODE (optype) == ARRAY_TYPE
1662 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1663 optype = TREE_TYPE (optype);
1665 /* Fail when the array bound is unknown or zero. */
1666 val = TYPE_SIZE_UNIT (optype);
1667 if (!val
1668 || TREE_CODE (val) != INTEGER_CST
1669 || integer_zerop (val))
1670 return false;
1671 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1672 integer_one_node);
1674 /* Set the minimum size to zero since the string in
1675 the array could have zero length. */
1676 pdata->minlen = ssize_int (0);
1678 /* The array size determined above is an optimistic bound
1679 on the length. If the array isn't nul-terminated the
1680 length computed by the library function would be greater.
1681 Even though using strlen to cross the subobject boundary
1682 is undefined, avoid drawing conclusions from the member
1683 type about the length here. */
1684 tight_bound = true;
1686 else if (TREE_CODE (arg) == MEM_REF
1687 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1688 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1689 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1691 /* Handle a MEM_REF into a DECL accessing an array of integers,
1692 being conservative about references to extern structures with
1693 flexible array members that can be initialized to arbitrary
1694 numbers of elements as an extension (static structs are okay). */
1695 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1696 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1697 && (decl_binds_to_current_def_p (ref)
1698 || !array_ref_flexible_size_p (arg)))
1700 /* Fail if the offset is out of bounds. Such accesses
1701 should be diagnosed at some point. */
1702 val = DECL_SIZE_UNIT (ref);
1703 if (!val
1704 || TREE_CODE (val) != INTEGER_CST
1705 || integer_zerop (val))
1706 return false;
1708 poly_offset_int psiz = wi::to_offset (val);
1709 poly_offset_int poff = mem_ref_offset (arg);
1710 if (known_le (psiz, poff))
1711 return false;
1713 pdata->minlen = ssize_int (0);
1715 /* Subtract the offset and one for the terminating nul. */
1716 psiz -= poff;
1717 psiz -= 1;
1718 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1719 /* Since VAL reflects the size of a declared object
1720 rather the type of the access it is not a tight bound. */
1723 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
1725 /* Avoid handling pointers to arrays. GCC might misuse
1726 a pointer to an array of one bound to point to an array
1727 object of a greater bound. */
1728 tree argtype = TREE_TYPE (arg);
1729 if (TREE_CODE (argtype) == ARRAY_TYPE)
1731 val = TYPE_SIZE_UNIT (argtype);
1732 if (!val
1733 || TREE_CODE (val) != INTEGER_CST
1734 || integer_zerop (val))
1735 return false;
1736 val = wide_int_to_tree (TREE_TYPE (val),
1737 wi::sub (wi::to_wide (val), 1));
1739 /* Set the minimum size to zero since the string in
1740 the array could have zero length. */
1741 pdata->minlen = ssize_int (0);
1744 maxbound = true;
1747 if (!val)
1748 return false;
1750 /* Adjust the lower bound on the string length as necessary. */
1751 if (!pdata->minlen
1752 || (rkind != SRK_STRLEN
1753 && TREE_CODE (pdata->minlen) == INTEGER_CST
1754 && TREE_CODE (val) == INTEGER_CST
1755 && tree_int_cst_lt (val, pdata->minlen)))
1756 pdata->minlen = val;
1758 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1760 /* Adjust the tighter (more optimistic) string length bound
1761 if necessary and proceed to adjust the more conservative
1762 bound. */
1763 if (TREE_CODE (val) == INTEGER_CST)
1765 if (tree_int_cst_lt (pdata->maxbound, val))
1766 pdata->maxbound = val;
1768 else
1769 pdata->maxbound = val;
1771 else if (pdata->maxbound || maxbound)
1772 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1773 if VAL corresponds to the maximum length determined based
1774 on the type of the object. */
1775 pdata->maxbound = val;
1777 if (tight_bound)
1779 /* VAL computed above represents an optimistically tight bound
1780 on the length of the string based on the referenced object's
1781 or subobject's type. Determine the conservative upper bound
1782 based on the enclosing object's size if possible. */
1783 if (rkind == SRK_LENRANGE)
1785 poly_int64 offset;
1786 tree base = get_addr_base_and_unit_offset (arg, &offset);
1787 if (!base)
1789 /* When the call above fails due to a non-constant offset
1790 assume the offset is zero and use the size of the whole
1791 enclosing object instead. */
1792 base = get_base_address (arg);
1793 offset = 0;
1795 /* If the base object is a pointer no upper bound on the length
1796 can be determined. Otherwise the maximum length is equal to
1797 the size of the enclosing object minus the offset of
1798 the referenced subobject minus 1 (for the terminating nul). */
1799 tree type = TREE_TYPE (base);
1800 if (TREE_CODE (type) == POINTER_TYPE
1801 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1802 || !(val = DECL_SIZE_UNIT (base)))
1803 val = build_all_ones_cst (size_type_node);
1804 else
1806 val = DECL_SIZE_UNIT (base);
1807 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1808 size_int (offset + 1));
1811 else
1812 return false;
1815 if (pdata->maxlen)
1817 /* Adjust the more conservative bound if possible/necessary
1818 and fail otherwise. */
1819 if (rkind != SRK_STRLEN)
1821 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1822 || TREE_CODE (val) != INTEGER_CST)
1823 return false;
1825 if (tree_int_cst_lt (pdata->maxlen, val))
1826 pdata->maxlen = val;
1827 return true;
1829 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1831 /* Fail if the length of this ARG is different from that
1832 previously determined from another ARG. */
1833 return false;
1837 pdata->maxlen = val;
1838 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1841 /* For an ARG referencing one or more strings, try to obtain the range
1842 of their lengths, or the size of the largest array ARG referes to if
1843 the range of lengths cannot be determined, and store all in *PDATA.
1844 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1845 the maximum constant value.
1846 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1847 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1848 length or if we are unable to determine the length, return false.
1849 VISITED is a bitmap of visited variables.
1850 RKIND determines the kind of value or range to obtain (see
1851 strlen_range_kind).
1852 Set PDATA->DECL if ARG refers to an unterminated constant array.
1853 On input, set ELTSIZE to 1 for normal single byte character strings,
1854 and either 2 or 4 for wide characer strings (the size of wchar_t).
1855 Return true if *PDATA was successfully populated and false otherwise. */
1857 static bool
1858 get_range_strlen (tree arg, bitmap visited,
1859 strlen_range_kind rkind,
1860 c_strlen_data *pdata, unsigned eltsize)
1863 if (TREE_CODE (arg) != SSA_NAME)
1864 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1866 /* If ARG is registered for SSA update we cannot look at its defining
1867 statement. */
1868 if (name_registered_for_update_p (arg))
1869 return false;
1871 /* If we were already here, break the infinite cycle. */
1872 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1873 return true;
1875 tree var = arg;
1876 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1878 switch (gimple_code (def_stmt))
1880 case GIMPLE_ASSIGN:
1881 /* The RHS of the statement defining VAR must either have a
1882 constant length or come from another SSA_NAME with a constant
1883 length. */
1884 if (gimple_assign_single_p (def_stmt)
1885 || gimple_assign_unary_nop_p (def_stmt))
1887 tree rhs = gimple_assign_rhs1 (def_stmt);
1888 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1890 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1892 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1893 gimple_assign_rhs3 (def_stmt) };
1895 for (unsigned int i = 0; i < 2; i++)
1896 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1898 if (rkind != SRK_LENRANGE)
1899 return false;
1900 /* Set the upper bound to the maximum to prevent
1901 it from being adjusted in the next iteration but
1902 leave MINLEN and the more conservative MAXBOUND
1903 determined so far alone (or leave them null if
1904 they haven't been set yet). That the MINLEN is
1905 in fact zero can be determined from MAXLEN being
1906 unbounded but the discovered minimum is used for
1907 diagnostics. */
1908 pdata->maxlen = build_all_ones_cst (size_type_node);
1910 return true;
1912 return false;
1914 case GIMPLE_PHI:
1915 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1916 must have a constant length. */
1917 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1919 tree arg = gimple_phi_arg (def_stmt, i)->def;
1921 /* If this PHI has itself as an argument, we cannot
1922 determine the string length of this argument. However,
1923 if we can find a constant string length for the other
1924 PHI args then we can still be sure that this is a
1925 constant string length. So be optimistic and just
1926 continue with the next argument. */
1927 if (arg == gimple_phi_result (def_stmt))
1928 continue;
1930 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1932 if (rkind != SRK_LENRANGE)
1933 return false;
1934 /* Set the upper bound to the maximum to prevent
1935 it from being adjusted in the next iteration but
1936 leave MINLEN and the more conservative MAXBOUND
1937 determined so far alone (or leave them null if
1938 they haven't been set yet). That the MINLEN is
1939 in fact zero can be determined from MAXLEN being
1940 unbounded but the discovered minimum is used for
1941 diagnostics. */
1942 pdata->maxlen = build_all_ones_cst (size_type_node);
1945 return true;
1947 default:
1948 return false;
1952 /* Try to obtain the range of the lengths of the string(s) referenced
1953 by ARG, or the size of the largest array ARG refers to if the range
1954 of lengths cannot be determined, and store all in *PDATA which must
1955 be zero-initialized on input except PDATA->MAXBOUND may be set to
1956 a non-null tree node other than INTEGER_CST to request to have it
1957 set to the length of the longest string in a PHI. ELTSIZE is
1958 the expected size of the string element in bytes: 1 for char and
1959 some power of 2 for wide characters.
1960 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1961 for optimization. Returning false means that a nonzero PDATA->MINLEN
1962 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1963 is -1 (in that case, the actual range is indeterminate, i.e.,
1964 [0, PTRDIFF_MAX - 2]. */
1966 bool
1967 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1969 auto_bitmap visited;
1970 tree maxbound = pdata->maxbound;
1972 if (!get_range_strlen (arg, visited, SRK_LENRANGE, pdata, eltsize))
1974 /* On failure extend the length range to an impossible maximum
1975 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1976 members can stay unchanged regardless. */
1977 pdata->minlen = ssize_int (0);
1978 pdata->maxlen = build_all_ones_cst (size_type_node);
1980 else if (!pdata->minlen)
1981 pdata->minlen = ssize_int (0);
1983 /* If it's unchanged from it initial non-null value, set the conservative
1984 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1985 if (maxbound && pdata->maxbound == maxbound)
1986 pdata->maxbound = build_all_ones_cst (size_type_node);
1988 return !integer_all_onesp (pdata->maxlen);
1991 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1992 For ARG of pointer types, NONSTR indicates if the caller is prepared
1993 to handle unterminated strings. For integer ARG and when RKIND ==
1994 SRK_INT_VALUE, NONSTR must be null.
1996 If an unterminated array is discovered and our caller handles
1997 unterminated arrays, then bubble up the offending DECL and
1998 return the maximum size. Otherwise return NULL. */
2000 static tree
2001 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
2003 /* A non-null NONSTR is meaningless when determining the maximum
2004 value of an integer ARG. */
2005 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
2006 /* ARG must have an integral type when RKIND says so. */
2007 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
2009 auto_bitmap visited;
2011 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2012 is unbounded. */
2013 c_strlen_data lendata = { };
2014 if (!get_range_strlen (arg, visited, rkind, &lendata, /* eltsize = */1))
2015 lendata.maxlen = NULL_TREE;
2016 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
2017 lendata.maxlen = NULL_TREE;
2019 if (nonstr)
2021 /* For callers prepared to handle unterminated arrays set
2022 *NONSTR to point to the declaration of the array and return
2023 the maximum length/size. */
2024 *nonstr = lendata.decl;
2025 return lendata.maxlen;
2028 /* Fail if the constant array isn't nul-terminated. */
2029 return lendata.decl ? NULL_TREE : lendata.maxlen;
2032 /* Return true if LEN is known to be less than or equal to (or if STRICT is
2033 true, strictly less than) the lower bound of SIZE at compile time and false
2034 otherwise. */
2036 static bool
2037 known_lower (gimple *stmt, tree len, tree size, bool strict = false)
2039 if (len == NULL_TREE)
2040 return false;
2042 wide_int size_range[2];
2043 wide_int len_range[2];
2044 if (get_range (len, stmt, len_range) && get_range (size, stmt, size_range))
2046 if (strict)
2047 return wi::ltu_p (len_range[1], size_range[0]);
2048 else
2049 return wi::leu_p (len_range[1], size_range[0]);
2052 return false;
2055 /* Fold function call to builtin strcpy with arguments DEST and SRC.
2056 If LEN is not NULL, it represents the length of the string to be
2057 copied. Return NULL_TREE if no simplification can be made. */
2059 static bool
2060 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
2061 tree dest, tree src)
2063 gimple *stmt = gsi_stmt (*gsi);
2064 location_t loc = gimple_location (stmt);
2065 tree fn;
2067 /* If SRC and DEST are the same (and not volatile), return DEST. */
2068 if (operand_equal_p (src, dest, 0))
2070 /* Issue -Wrestrict unless the pointers are null (those do
2071 not point to objects and so do not indicate an overlap;
2072 such calls could be the result of sanitization and jump
2073 threading). */
2074 if (!integer_zerop (dest) && !warning_suppressed_p (stmt, OPT_Wrestrict))
2076 tree func = gimple_call_fndecl (stmt);
2078 warning_at (loc, OPT_Wrestrict,
2079 "%qD source argument is the same as destination",
2080 func);
2083 replace_call_with_value (gsi, dest);
2084 return true;
2087 if (optimize_function_for_size_p (cfun))
2088 return false;
2090 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2091 if (!fn)
2092 return false;
2094 /* Set to non-null if ARG refers to an unterminated array. */
2095 tree nonstr = NULL;
2096 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
2098 if (nonstr)
2100 /* Avoid folding calls with unterminated arrays. */
2101 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
2102 warn_string_no_nul (loc, stmt, "strcpy", src, nonstr);
2103 suppress_warning (stmt, OPT_Wstringop_overread);
2104 return false;
2107 if (!len)
2108 return false;
2110 len = fold_convert_loc (loc, size_type_node, len);
2111 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
2112 len = force_gimple_operand_gsi (gsi, len, true,
2113 NULL_TREE, true, GSI_SAME_STMT);
2114 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2115 replace_call_with_call_and_fold (gsi, repl);
2116 return true;
2119 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2120 If SLEN is not NULL, it represents the length of the source string.
2121 Return NULL_TREE if no simplification can be made. */
2123 static bool
2124 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
2125 tree dest, tree src, tree len)
2127 gimple *stmt = gsi_stmt (*gsi);
2128 location_t loc = gimple_location (stmt);
2129 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
2131 /* If the LEN parameter is zero, return DEST. */
2132 if (integer_zerop (len))
2134 /* Avoid warning if the destination refers to an array/pointer
2135 decorate with attribute nonstring. */
2136 if (!nonstring)
2138 tree fndecl = gimple_call_fndecl (stmt);
2140 /* Warn about the lack of nul termination: the result is not
2141 a (nul-terminated) string. */
2142 tree slen = get_maxval_strlen (src, SRK_STRLEN);
2143 if (slen && !integer_zerop (slen))
2144 warning_at (loc, OPT_Wstringop_truncation,
2145 "%qD destination unchanged after copying no bytes "
2146 "from a string of length %E",
2147 fndecl, slen);
2148 else
2149 warning_at (loc, OPT_Wstringop_truncation,
2150 "%qD destination unchanged after copying no bytes",
2151 fndecl);
2154 replace_call_with_value (gsi, dest);
2155 return true;
2158 /* We can't compare slen with len as constants below if len is not a
2159 constant. */
2160 if (TREE_CODE (len) != INTEGER_CST)
2161 return false;
2163 /* Now, we must be passed a constant src ptr parameter. */
2164 tree slen = get_maxval_strlen (src, SRK_STRLEN);
2165 if (!slen || TREE_CODE (slen) != INTEGER_CST)
2166 return false;
2168 /* The size of the source string including the terminating nul. */
2169 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
2171 /* We do not support simplification of this case, though we do
2172 support it when expanding trees into RTL. */
2173 /* FIXME: generate a call to __builtin_memset. */
2174 if (tree_int_cst_lt (ssize, len))
2175 return false;
2177 /* Diagnose truncation that leaves the copy unterminated. */
2178 maybe_diag_stxncpy_trunc (*gsi, src, len);
2180 /* OK transform into builtin memcpy. */
2181 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2182 if (!fn)
2183 return false;
2185 len = fold_convert_loc (loc, size_type_node, len);
2186 len = force_gimple_operand_gsi (gsi, len, true,
2187 NULL_TREE, true, GSI_SAME_STMT);
2188 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2189 replace_call_with_call_and_fold (gsi, repl);
2191 return true;
2194 /* Fold function call to builtin strchr or strrchr.
2195 If both arguments are constant, evaluate and fold the result,
2196 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2197 In general strlen is significantly faster than strchr
2198 due to being a simpler operation. */
2199 static bool
2200 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
2202 gimple *stmt = gsi_stmt (*gsi);
2203 tree str = gimple_call_arg (stmt, 0);
2204 tree c = gimple_call_arg (stmt, 1);
2205 location_t loc = gimple_location (stmt);
2206 const char *p;
2207 char ch;
2209 if (!gimple_call_lhs (stmt))
2210 return false;
2212 /* Avoid folding if the first argument is not a nul-terminated array.
2213 Defer warning until later. */
2214 if (!check_nul_terminated_array (NULL_TREE, str))
2215 return false;
2217 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
2219 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
2221 if (p1 == NULL)
2223 replace_call_with_value (gsi, integer_zero_node);
2224 return true;
2227 tree len = build_int_cst (size_type_node, p1 - p);
2228 gimple_seq stmts = NULL;
2229 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2230 POINTER_PLUS_EXPR, str, len);
2231 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2232 gsi_replace_with_seq_vops (gsi, stmts);
2233 return true;
2236 if (!integer_zerop (c))
2237 return false;
2239 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2240 if (is_strrchr && optimize_function_for_size_p (cfun))
2242 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2244 if (strchr_fn)
2246 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2247 replace_call_with_call_and_fold (gsi, repl);
2248 return true;
2251 return false;
2254 tree len;
2255 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2257 if (!strlen_fn)
2258 return false;
2260 /* Create newstr = strlen (str). */
2261 gimple_seq stmts = NULL;
2262 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2263 gimple_set_location (new_stmt, loc);
2264 len = create_tmp_reg_or_ssa_name (size_type_node);
2265 gimple_call_set_lhs (new_stmt, len);
2266 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2268 /* Create (str p+ strlen (str)). */
2269 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2270 POINTER_PLUS_EXPR, str, len);
2271 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2272 gsi_replace_with_seq_vops (gsi, stmts);
2273 /* gsi now points at the assignment to the lhs, get a
2274 stmt iterator to the strlen.
2275 ??? We can't use gsi_for_stmt as that doesn't work when the
2276 CFG isn't built yet. */
2277 gimple_stmt_iterator gsi2 = *gsi;
2278 gsi_prev (&gsi2);
2279 fold_stmt (&gsi2);
2280 return true;
2283 /* Fold function call to builtin strstr.
2284 If both arguments are constant, evaluate and fold the result,
2285 additionally fold strstr (x, "") into x and strstr (x, "c")
2286 into strchr (x, 'c'). */
2287 static bool
2288 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2290 gimple *stmt = gsi_stmt (*gsi);
2291 if (!gimple_call_lhs (stmt))
2292 return false;
2294 tree haystack = gimple_call_arg (stmt, 0);
2295 tree needle = gimple_call_arg (stmt, 1);
2297 /* Avoid folding if either argument is not a nul-terminated array.
2298 Defer warning until later. */
2299 if (!check_nul_terminated_array (NULL_TREE, haystack)
2300 || !check_nul_terminated_array (NULL_TREE, needle))
2301 return false;
2303 const char *q = c_getstr (needle);
2304 if (q == NULL)
2305 return false;
2307 if (const char *p = c_getstr (haystack))
2309 const char *r = strstr (p, q);
2311 if (r == NULL)
2313 replace_call_with_value (gsi, integer_zero_node);
2314 return true;
2317 tree len = build_int_cst (size_type_node, r - p);
2318 gimple_seq stmts = NULL;
2319 gimple *new_stmt
2320 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2321 haystack, len);
2322 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2323 gsi_replace_with_seq_vops (gsi, stmts);
2324 return true;
2327 /* For strstr (x, "") return x. */
2328 if (q[0] == '\0')
2330 replace_call_with_value (gsi, haystack);
2331 return true;
2334 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2335 if (q[1] == '\0')
2337 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2338 if (strchr_fn)
2340 tree c = build_int_cst (integer_type_node, q[0]);
2341 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2342 replace_call_with_call_and_fold (gsi, repl);
2343 return true;
2347 return false;
2350 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2351 to the call.
2353 Return NULL_TREE if no simplification was possible, otherwise return the
2354 simplified form of the call as a tree.
2356 The simplified form may be a constant or other expression which
2357 computes the same value, but in a more efficient manner (including
2358 calls to other builtin functions).
2360 The call may contain arguments which need to be evaluated, but
2361 which are not useful to determine the result of the call. In
2362 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2363 COMPOUND_EXPR will be an argument which must be evaluated.
2364 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2365 COMPOUND_EXPR in the chain will contain the tree for the simplified
2366 form of the builtin function call. */
2368 static bool
2369 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2371 gimple *stmt = gsi_stmt (*gsi);
2372 location_t loc = gimple_location (stmt);
2374 const char *p = c_getstr (src);
2376 /* If the string length is zero, return the dst parameter. */
2377 if (p && *p == '\0')
2379 replace_call_with_value (gsi, dst);
2380 return true;
2383 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2384 return false;
2386 /* See if we can store by pieces into (dst + strlen(dst)). */
2387 tree newdst;
2388 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2389 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2391 if (!strlen_fn || !memcpy_fn)
2392 return false;
2394 /* If the length of the source string isn't computable don't
2395 split strcat into strlen and memcpy. */
2396 tree len = get_maxval_strlen (src, SRK_STRLEN);
2397 if (! len)
2398 return false;
2400 /* Create strlen (dst). */
2401 gimple_seq stmts = NULL, stmts2;
2402 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2403 gimple_set_location (repl, loc);
2404 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2405 gimple_call_set_lhs (repl, newdst);
2406 gimple_seq_add_stmt_without_update (&stmts, repl);
2408 /* Create (dst p+ strlen (dst)). */
2409 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2410 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2411 gimple_seq_add_seq_without_update (&stmts, stmts2);
2413 len = fold_convert_loc (loc, size_type_node, len);
2414 len = size_binop_loc (loc, PLUS_EXPR, len,
2415 build_int_cst (size_type_node, 1));
2416 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2417 gimple_seq_add_seq_without_update (&stmts, stmts2);
2419 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2420 gimple_seq_add_stmt_without_update (&stmts, repl);
2421 if (gimple_call_lhs (stmt))
2423 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2424 gimple_seq_add_stmt_without_update (&stmts, repl);
2425 gsi_replace_with_seq_vops (gsi, stmts);
2426 /* gsi now points at the assignment to the lhs, get a
2427 stmt iterator to the memcpy call.
2428 ??? We can't use gsi_for_stmt as that doesn't work when the
2429 CFG isn't built yet. */
2430 gimple_stmt_iterator gsi2 = *gsi;
2431 gsi_prev (&gsi2);
2432 fold_stmt (&gsi2);
2434 else
2436 gsi_replace_with_seq_vops (gsi, stmts);
2437 fold_stmt (gsi);
2439 return true;
2442 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2443 are the arguments to the call. */
2445 static bool
2446 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2448 gimple *stmt = gsi_stmt (*gsi);
2449 tree dest = gimple_call_arg (stmt, 0);
2450 tree src = gimple_call_arg (stmt, 1);
2451 tree size = gimple_call_arg (stmt, 2);
2452 tree fn;
2453 const char *p;
2456 p = c_getstr (src);
2457 /* If the SRC parameter is "", return DEST. */
2458 if (p && *p == '\0')
2460 replace_call_with_value (gsi, dest);
2461 return true;
2464 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2465 return false;
2467 /* If __builtin_strcat_chk is used, assume strcat is available. */
2468 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2469 if (!fn)
2470 return false;
2472 gimple *repl = gimple_build_call (fn, 2, dest, src);
2473 replace_call_with_call_and_fold (gsi, repl);
2474 return true;
2477 /* Simplify a call to the strncat builtin. */
2479 static bool
2480 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2482 gimple *stmt = gsi_stmt (*gsi);
2483 tree dst = gimple_call_arg (stmt, 0);
2484 tree src = gimple_call_arg (stmt, 1);
2485 tree len = gimple_call_arg (stmt, 2);
2486 tree src_len = c_strlen (src, 1);
2488 /* If the requested length is zero, or the src parameter string
2489 length is zero, return the dst parameter. */
2490 if (integer_zerop (len) || (src_len && integer_zerop (src_len)))
2492 replace_call_with_value (gsi, dst);
2493 return true;
2496 /* Return early if the requested len is less than the string length.
2497 Warnings will be issued elsewhere later. */
2498 if (!src_len || known_lower (stmt, len, src_len, true))
2499 return false;
2501 /* Warn on constant LEN. */
2502 if (TREE_CODE (len) == INTEGER_CST)
2504 bool nowarn = warning_suppressed_p (stmt, OPT_Wstringop_overflow_);
2505 tree dstsize;
2507 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize)
2508 && TREE_CODE (dstsize) == INTEGER_CST)
2510 int cmpdst = tree_int_cst_compare (len, dstsize);
2512 if (cmpdst >= 0)
2514 tree fndecl = gimple_call_fndecl (stmt);
2516 /* Strncat copies (at most) LEN bytes and always appends
2517 the terminating NUL so the specified bound should never
2518 be equal to (or greater than) the size of the destination.
2519 If it is, the copy could overflow. */
2520 location_t loc = gimple_location (stmt);
2521 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2522 cmpdst == 0
2523 ? G_("%qD specified bound %E equals "
2524 "destination size")
2525 : G_("%qD specified bound %E exceeds "
2526 "destination size %E"),
2527 fndecl, len, dstsize);
2528 if (nowarn)
2529 suppress_warning (stmt, OPT_Wstringop_overflow_);
2533 if (!nowarn && TREE_CODE (src_len) == INTEGER_CST
2534 && tree_int_cst_compare (src_len, len) == 0)
2536 tree fndecl = gimple_call_fndecl (stmt);
2537 location_t loc = gimple_location (stmt);
2539 /* To avoid possible overflow the specified bound should also
2540 not be equal to the length of the source, even when the size
2541 of the destination is unknown (it's not an uncommon mistake
2542 to specify as the bound to strncpy the length of the source). */
2543 if (warning_at (loc, OPT_Wstringop_overflow_,
2544 "%qD specified bound %E equals source length",
2545 fndecl, len))
2546 suppress_warning (stmt, OPT_Wstringop_overflow_);
2550 if (!known_lower (stmt, src_len, len))
2551 return false;
2553 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2555 /* If the replacement _DECL isn't initialized, don't do the
2556 transformation. */
2557 if (!fn)
2558 return false;
2560 /* Otherwise, emit a call to strcat. */
2561 gcall *repl = gimple_build_call (fn, 2, dst, src);
2562 replace_call_with_call_and_fold (gsi, repl);
2563 return true;
2566 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2567 LEN, and SIZE. */
2569 static bool
2570 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2572 gimple *stmt = gsi_stmt (*gsi);
2573 tree dest = gimple_call_arg (stmt, 0);
2574 tree src = gimple_call_arg (stmt, 1);
2575 tree len = gimple_call_arg (stmt, 2);
2576 tree size = gimple_call_arg (stmt, 3);
2577 tree fn;
2578 const char *p;
2580 p = c_getstr (src);
2581 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2582 if ((p && *p == '\0')
2583 || integer_zerop (len))
2585 replace_call_with_value (gsi, dest);
2586 return true;
2589 if (! integer_all_onesp (size))
2591 tree src_len = c_strlen (src, 1);
2592 if (known_lower (stmt, src_len, len))
2594 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2595 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2596 if (!fn)
2597 return false;
2599 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2600 replace_call_with_call_and_fold (gsi, repl);
2601 return true;
2603 return false;
2606 /* If __builtin_strncat_chk is used, assume strncat is available. */
2607 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2608 if (!fn)
2609 return false;
2611 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2612 replace_call_with_call_and_fold (gsi, repl);
2613 return true;
2616 /* Build and append gimple statements to STMTS that would load a first
2617 character of a memory location identified by STR. LOC is location
2618 of the statement. */
2620 static tree
2621 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2623 tree var;
2625 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2626 tree cst_uchar_ptr_node
2627 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2628 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2630 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2631 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2632 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2634 gimple_assign_set_lhs (stmt, var);
2635 gimple_seq_add_stmt_without_update (stmts, stmt);
2637 return var;
2640 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2642 static bool
2643 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2645 gimple *stmt = gsi_stmt (*gsi);
2646 tree callee = gimple_call_fndecl (stmt);
2647 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2649 tree type = integer_type_node;
2650 tree str1 = gimple_call_arg (stmt, 0);
2651 tree str2 = gimple_call_arg (stmt, 1);
2652 tree lhs = gimple_call_lhs (stmt);
2654 tree bound_node = NULL_TREE;
2655 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
2657 /* Handle strncmp and strncasecmp functions. */
2658 if (gimple_call_num_args (stmt) == 3)
2660 bound_node = gimple_call_arg (stmt, 2);
2661 if (tree_fits_uhwi_p (bound_node))
2662 bound = tree_to_uhwi (bound_node);
2665 /* If the BOUND parameter is zero, return zero. */
2666 if (bound == 0)
2668 replace_call_with_value (gsi, integer_zero_node);
2669 return true;
2672 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2673 if (operand_equal_p (str1, str2, 0))
2675 replace_call_with_value (gsi, integer_zero_node);
2676 return true;
2679 /* Initially set to the number of characters, including the terminating
2680 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2681 the array Sx is not terminated by a nul.
2682 For nul-terminated strings then adjusted to their length so that
2683 LENx == NULPOSx holds. */
2684 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2685 const char *p1 = getbyterep (str1, &len1);
2686 const char *p2 = getbyterep (str2, &len2);
2688 /* The position of the terminating nul character if one exists, otherwise
2689 a value greater than LENx. */
2690 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2692 if (p1)
2694 size_t n = strnlen (p1, len1);
2695 if (n < len1)
2696 len1 = nulpos1 = n;
2699 if (p2)
2701 size_t n = strnlen (p2, len2);
2702 if (n < len2)
2703 len2 = nulpos2 = n;
2706 /* For known strings, return an immediate value. */
2707 if (p1 && p2)
2709 int r = 0;
2710 bool known_result = false;
2712 switch (fcode)
2714 case BUILT_IN_STRCMP:
2715 case BUILT_IN_STRCMP_EQ:
2716 if (len1 != nulpos1 || len2 != nulpos2)
2717 break;
2719 r = strcmp (p1, p2);
2720 known_result = true;
2721 break;
2723 case BUILT_IN_STRNCMP:
2724 case BUILT_IN_STRNCMP_EQ:
2726 if (bound == HOST_WIDE_INT_M1U)
2727 break;
2729 /* Reduce the bound to be no more than the length
2730 of the shorter of the two strings, or the sizes
2731 of the unterminated arrays. */
2732 unsigned HOST_WIDE_INT n = bound;
2734 if (len1 == nulpos1 && len1 < n)
2735 n = len1 + 1;
2736 if (len2 == nulpos2 && len2 < n)
2737 n = len2 + 1;
2739 if (MIN (nulpos1, nulpos2) + 1 < n)
2740 break;
2742 r = strncmp (p1, p2, n);
2743 known_result = true;
2744 break;
2746 /* Only handleable situation is where the string are equal (result 0),
2747 which is already handled by operand_equal_p case. */
2748 case BUILT_IN_STRCASECMP:
2749 break;
2750 case BUILT_IN_STRNCASECMP:
2752 if (bound == HOST_WIDE_INT_M1U)
2753 break;
2754 r = strncmp (p1, p2, bound);
2755 if (r == 0)
2756 known_result = true;
2757 break;
2759 default:
2760 gcc_unreachable ();
2763 if (known_result)
2765 replace_call_with_value (gsi, build_cmp_result (type, r));
2766 return true;
2770 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
2771 || fcode == BUILT_IN_STRCMP
2772 || fcode == BUILT_IN_STRCMP_EQ
2773 || fcode == BUILT_IN_STRCASECMP;
2775 location_t loc = gimple_location (stmt);
2777 /* If the second arg is "", return *(const unsigned char*)arg1. */
2778 if (p2 && *p2 == '\0' && nonzero_bound)
2780 gimple_seq stmts = NULL;
2781 tree var = gimple_load_first_char (loc, str1, &stmts);
2782 if (lhs)
2784 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2785 gimple_seq_add_stmt_without_update (&stmts, stmt);
2788 gsi_replace_with_seq_vops (gsi, stmts);
2789 return true;
2792 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2793 if (p1 && *p1 == '\0' && nonzero_bound)
2795 gimple_seq stmts = NULL;
2796 tree var = gimple_load_first_char (loc, str2, &stmts);
2798 if (lhs)
2800 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2801 stmt = gimple_build_assign (c, NOP_EXPR, var);
2802 gimple_seq_add_stmt_without_update (&stmts, stmt);
2804 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2805 gimple_seq_add_stmt_without_update (&stmts, stmt);
2808 gsi_replace_with_seq_vops (gsi, stmts);
2809 return true;
2812 /* If BOUND is one, return an expression corresponding to
2813 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2814 if (fcode == BUILT_IN_STRNCMP && bound == 1)
2816 gimple_seq stmts = NULL;
2817 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2818 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2820 if (lhs)
2822 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2823 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2824 gimple_seq_add_stmt_without_update (&stmts, convert1);
2826 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2827 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2828 gimple_seq_add_stmt_without_update (&stmts, convert2);
2830 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2831 gimple_seq_add_stmt_without_update (&stmts, stmt);
2834 gsi_replace_with_seq_vops (gsi, stmts);
2835 return true;
2838 /* If BOUND is greater than the length of one constant string,
2839 and the other argument is also a nul-terminated string, replace
2840 strncmp with strcmp. */
2841 if (fcode == BUILT_IN_STRNCMP
2842 && bound > 0 && bound < HOST_WIDE_INT_M1U
2843 && ((p2 && len2 < bound && len2 == nulpos2)
2844 || (p1 && len1 < bound && len1 == nulpos1)))
2846 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2847 if (!fn)
2848 return false;
2849 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2850 replace_call_with_call_and_fold (gsi, repl);
2851 return true;
2854 return false;
2857 /* Fold a call to the memchr pointed by GSI iterator. */
2859 static bool
2860 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2862 gimple *stmt = gsi_stmt (*gsi);
2863 tree lhs = gimple_call_lhs (stmt);
2864 tree arg1 = gimple_call_arg (stmt, 0);
2865 tree arg2 = gimple_call_arg (stmt, 1);
2866 tree len = gimple_call_arg (stmt, 2);
2868 /* If the LEN parameter is zero, return zero. */
2869 if (integer_zerop (len))
2871 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2872 return true;
2875 char c;
2876 if (TREE_CODE (arg2) != INTEGER_CST
2877 || !tree_fits_uhwi_p (len)
2878 || !target_char_cst_p (arg2, &c))
2879 return false;
2881 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2882 unsigned HOST_WIDE_INT string_length;
2883 const char *p1 = getbyterep (arg1, &string_length);
2885 if (p1)
2887 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2888 if (r == NULL)
2890 tree mem_size, offset_node;
2891 byte_representation (arg1, &offset_node, &mem_size, NULL);
2892 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2893 ? 0 : tree_to_uhwi (offset_node);
2894 /* MEM_SIZE is the size of the array the string literal
2895 is stored in. */
2896 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2897 gcc_checking_assert (string_length <= string_size);
2898 if (length <= string_size)
2900 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2901 return true;
2904 else
2906 unsigned HOST_WIDE_INT offset = r - p1;
2907 gimple_seq stmts = NULL;
2908 if (lhs != NULL_TREE)
2910 tree offset_cst = build_int_cst (sizetype, offset);
2911 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2912 arg1, offset_cst);
2913 gimple_seq_add_stmt_without_update (&stmts, stmt);
2915 else
2916 gimple_seq_add_stmt_without_update (&stmts,
2917 gimple_build_nop ());
2919 gsi_replace_with_seq_vops (gsi, stmts);
2920 return true;
2924 return false;
2927 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2928 to the call. IGNORE is true if the value returned
2929 by the builtin will be ignored. UNLOCKED is true is true if this
2930 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2931 the known length of the string. Return NULL_TREE if no simplification
2932 was possible. */
2934 static bool
2935 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2936 tree arg0, tree arg1,
2937 bool unlocked)
2939 gimple *stmt = gsi_stmt (*gsi);
2941 /* If we're using an unlocked function, assume the other unlocked
2942 functions exist explicitly. */
2943 tree const fn_fputc = (unlocked
2944 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2945 : builtin_decl_implicit (BUILT_IN_FPUTC));
2946 tree const fn_fwrite = (unlocked
2947 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2948 : builtin_decl_implicit (BUILT_IN_FWRITE));
2950 /* If the return value is used, don't do the transformation. */
2951 if (gimple_call_lhs (stmt))
2952 return false;
2954 /* Get the length of the string passed to fputs. If the length
2955 can't be determined, punt. */
2956 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2957 if (!len || TREE_CODE (len) != INTEGER_CST)
2958 return false;
2960 switch (compare_tree_int (len, 1))
2962 case -1: /* length is 0, delete the call entirely . */
2963 replace_call_with_value (gsi, integer_zero_node);
2964 return true;
2966 case 0: /* length is 1, call fputc. */
2968 const char *p = c_getstr (arg0);
2969 if (p != NULL)
2971 if (!fn_fputc)
2972 return false;
2974 gimple *repl
2975 = gimple_build_call (fn_fputc, 2,
2976 build_int_cst (integer_type_node, p[0]),
2977 arg1);
2978 replace_call_with_call_and_fold (gsi, repl);
2979 return true;
2982 /* FALLTHROUGH */
2983 case 1: /* length is greater than 1, call fwrite. */
2985 /* If optimizing for size keep fputs. */
2986 if (optimize_function_for_size_p (cfun))
2987 return false;
2988 /* New argument list transforming fputs(string, stream) to
2989 fwrite(string, 1, len, stream). */
2990 if (!fn_fwrite)
2991 return false;
2993 gimple *repl
2994 = gimple_build_call (fn_fwrite, 4, arg0, size_one_node,
2995 fold_convert (size_type_node, len), arg1);
2996 replace_call_with_call_and_fold (gsi, repl);
2997 return true;
2999 default:
3000 gcc_unreachable ();
3004 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
3005 DEST, SRC, LEN, and SIZE are the arguments to the call.
3006 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
3007 code of the builtin. If MAXLEN is not NULL, it is maximum length
3008 passed as third argument. */
3010 static bool
3011 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
3012 tree dest, tree src, tree len, tree size,
3013 enum built_in_function fcode)
3015 gimple *stmt = gsi_stmt (*gsi);
3016 location_t loc = gimple_location (stmt);
3017 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3018 tree fn;
3020 /* If SRC and DEST are the same (and not volatile), return DEST
3021 (resp. DEST+LEN for __mempcpy_chk). */
3022 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
3024 if (fcode != BUILT_IN_MEMPCPY_CHK)
3026 replace_call_with_value (gsi, dest);
3027 return true;
3029 else
3031 gimple_seq stmts = NULL;
3032 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
3033 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
3034 TREE_TYPE (dest), dest, len);
3035 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3036 replace_call_with_value (gsi, temp);
3037 return true;
3041 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3042 if (! integer_all_onesp (size)
3043 && !known_lower (stmt, len, size)
3044 && !known_lower (stmt, maxlen, size))
3046 /* MAXLEN and LEN both cannot be proved to be less than SIZE, at
3047 least try to optimize (void) __mempcpy_chk () into
3048 (void) __memcpy_chk () */
3049 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
3051 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3052 if (!fn)
3053 return false;
3055 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3056 replace_call_with_call_and_fold (gsi, repl);
3057 return true;
3059 return false;
3062 fn = NULL_TREE;
3063 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3064 mem{cpy,pcpy,move,set} is available. */
3065 switch (fcode)
3067 case BUILT_IN_MEMCPY_CHK:
3068 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
3069 break;
3070 case BUILT_IN_MEMPCPY_CHK:
3071 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
3072 break;
3073 case BUILT_IN_MEMMOVE_CHK:
3074 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
3075 break;
3076 case BUILT_IN_MEMSET_CHK:
3077 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
3078 break;
3079 default:
3080 break;
3083 if (!fn)
3084 return false;
3086 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3087 replace_call_with_call_and_fold (gsi, repl);
3088 return true;
3091 /* Print a message in the dump file recording transformation of FROM to TO. */
3093 static void
3094 dump_transformation (gcall *from, gcall *to)
3096 if (dump_enabled_p ())
3097 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, from, "simplified %T to %T\n",
3098 gimple_call_fn (from), gimple_call_fn (to));
3101 /* Fold a call to the __st[rp]cpy_chk builtin.
3102 DEST, SRC, and SIZE are the arguments to the call.
3103 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3104 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3105 strings passed as second argument. */
3107 static bool
3108 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
3109 tree dest,
3110 tree src, tree size,
3111 enum built_in_function fcode)
3113 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3114 location_t loc = gimple_location (stmt);
3115 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3116 tree len, fn;
3118 /* If SRC and DEST are the same (and not volatile), return DEST. */
3119 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
3121 /* Issue -Wrestrict unless the pointers are null (those do
3122 not point to objects and so do not indicate an overlap;
3123 such calls could be the result of sanitization and jump
3124 threading). */
3125 if (!integer_zerop (dest)
3126 && !warning_suppressed_p (stmt, OPT_Wrestrict))
3128 tree func = gimple_call_fndecl (stmt);
3130 warning_at (loc, OPT_Wrestrict,
3131 "%qD source argument is the same as destination",
3132 func);
3135 replace_call_with_value (gsi, dest);
3136 return true;
3139 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
3140 if (! integer_all_onesp (size))
3142 len = c_strlen (src, 1);
3143 if (!known_lower (stmt, len, size, true)
3144 && !known_lower (stmt, maxlen, size, true))
3146 if (fcode == BUILT_IN_STPCPY_CHK)
3148 if (! ignore)
3149 return false;
3151 /* If return value of __stpcpy_chk is ignored,
3152 optimize into __strcpy_chk. */
3153 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
3154 if (!fn)
3155 return false;
3157 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
3158 replace_call_with_call_and_fold (gsi, repl);
3159 return true;
3162 if (! len || TREE_SIDE_EFFECTS (len))
3163 return false;
3165 /* If c_strlen returned something, but not provably less than size,
3166 transform __strcpy_chk into __memcpy_chk. */
3167 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3168 if (!fn)
3169 return false;
3171 gimple_seq stmts = NULL;
3172 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
3173 len = gimple_convert (&stmts, loc, size_type_node, len);
3174 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
3175 build_int_cst (size_type_node, 1));
3176 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3177 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3178 replace_call_with_call_and_fold (gsi, repl);
3179 return true;
3183 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3184 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK && !ignore
3185 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
3186 if (!fn)
3187 return false;
3189 gcall *repl = gimple_build_call (fn, 2, dest, src);
3190 dump_transformation (stmt, repl);
3191 replace_call_with_call_and_fold (gsi, repl);
3192 return true;
3195 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3196 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3197 length passed as third argument. IGNORE is true if return value can be
3198 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3200 static bool
3201 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
3202 tree dest, tree src,
3203 tree len, tree size,
3204 enum built_in_function fcode)
3206 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3207 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3208 tree fn;
3210 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3211 if (! integer_all_onesp (size)
3212 && !known_lower (stmt, len, size) && !known_lower (stmt, maxlen, size))
3214 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
3216 /* If return value of __stpncpy_chk is ignored,
3217 optimize into __strncpy_chk. */
3218 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3219 if (fn)
3221 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3222 replace_call_with_call_and_fold (gsi, repl);
3223 return true;
3226 return false;
3229 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3230 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK && !ignore
3231 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3232 if (!fn)
3233 return false;
3235 gcall *repl = gimple_build_call (fn, 3, dest, src, len);
3236 dump_transformation (stmt, repl);
3237 replace_call_with_call_and_fold (gsi, repl);
3238 return true;
3241 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3242 Return NULL_TREE if no simplification can be made. */
3244 static bool
3245 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3247 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3248 location_t loc = gimple_location (stmt);
3249 tree dest = gimple_call_arg (stmt, 0);
3250 tree src = gimple_call_arg (stmt, 1);
3251 tree fn, lenp1;
3253 /* If the result is unused, replace stpcpy with strcpy. */
3254 if (gimple_call_lhs (stmt) == NULL_TREE)
3256 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3257 if (!fn)
3258 return false;
3259 gimple_call_set_fndecl (stmt, fn);
3260 fold_stmt (gsi);
3261 return true;
3264 /* Set to non-null if ARG refers to an unterminated array. */
3265 c_strlen_data data = { };
3266 /* The size of the unterminated array if SRC referes to one. */
3267 tree size;
3268 /* True if the size is exact/constant, false if it's the lower bound
3269 of a range. */
3270 bool exact;
3271 tree len = c_strlen (src, 1, &data, 1);
3272 if (!len
3273 || TREE_CODE (len) != INTEGER_CST)
3275 data.decl = unterminated_array (src, &size, &exact);
3276 if (!data.decl)
3277 return false;
3280 if (data.decl)
3282 /* Avoid folding calls with unterminated arrays. */
3283 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
3284 warn_string_no_nul (loc, stmt, "stpcpy", src, data.decl, size,
3285 exact);
3286 suppress_warning (stmt, OPT_Wstringop_overread);
3287 return false;
3290 if (optimize_function_for_size_p (cfun)
3291 /* If length is zero it's small enough. */
3292 && !integer_zerop (len))
3293 return false;
3295 /* If the source has a known length replace stpcpy with memcpy. */
3296 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3297 if (!fn)
3298 return false;
3300 gimple_seq stmts = NULL;
3301 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3302 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3303 tem, build_int_cst (size_type_node, 1));
3304 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3305 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
3306 gimple_move_vops (repl, stmt);
3307 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3308 /* Replace the result with dest + len. */
3309 stmts = NULL;
3310 tem = gimple_convert (&stmts, loc, sizetype, len);
3311 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3312 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3313 POINTER_PLUS_EXPR, dest, tem);
3314 gsi_replace (gsi, ret, false);
3315 /* Finally fold the memcpy call. */
3316 gimple_stmt_iterator gsi2 = *gsi;
3317 gsi_prev (&gsi2);
3318 fold_stmt (&gsi2);
3319 return true;
3322 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3323 NULL_TREE if a normal call should be emitted rather than expanding
3324 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3325 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3326 passed as second argument. */
3328 static bool
3329 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3330 enum built_in_function fcode)
3332 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3333 tree dest, size, len, fn, fmt, flag;
3334 const char *fmt_str;
3336 /* Verify the required arguments in the original call. */
3337 if (gimple_call_num_args (stmt) < 5)
3338 return false;
3340 dest = gimple_call_arg (stmt, 0);
3341 len = gimple_call_arg (stmt, 1);
3342 flag = gimple_call_arg (stmt, 2);
3343 size = gimple_call_arg (stmt, 3);
3344 fmt = gimple_call_arg (stmt, 4);
3346 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3347 if (! integer_all_onesp (size)
3348 && !known_lower (stmt, len, size) && !known_lower (stmt, maxlen, size))
3349 return false;
3351 if (!init_target_chars ())
3352 return false;
3354 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3355 or if format doesn't contain % chars or is "%s". */
3356 if (! integer_zerop (flag))
3358 fmt_str = c_getstr (fmt);
3359 if (fmt_str == NULL)
3360 return false;
3361 if (strchr (fmt_str, target_percent) != NULL
3362 && strcmp (fmt_str, target_percent_s))
3363 return false;
3366 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3367 available. */
3368 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3369 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3370 if (!fn)
3371 return false;
3373 /* Replace the called function and the first 5 argument by 3 retaining
3374 trailing varargs. */
3375 gimple_call_set_fndecl (stmt, fn);
3376 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3377 gimple_call_set_arg (stmt, 0, dest);
3378 gimple_call_set_arg (stmt, 1, len);
3379 gimple_call_set_arg (stmt, 2, fmt);
3380 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3381 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3382 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3383 fold_stmt (gsi);
3384 return true;
3387 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3388 Return NULL_TREE if a normal call should be emitted rather than
3389 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3390 or BUILT_IN_VSPRINTF_CHK. */
3392 static bool
3393 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3394 enum built_in_function fcode)
3396 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3397 tree dest, size, len, fn, fmt, flag;
3398 const char *fmt_str;
3399 unsigned nargs = gimple_call_num_args (stmt);
3401 /* Verify the required arguments in the original call. */
3402 if (nargs < 4)
3403 return false;
3404 dest = gimple_call_arg (stmt, 0);
3405 flag = gimple_call_arg (stmt, 1);
3406 size = gimple_call_arg (stmt, 2);
3407 fmt = gimple_call_arg (stmt, 3);
3409 len = NULL_TREE;
3411 if (!init_target_chars ())
3412 return false;
3414 /* Check whether the format is a literal string constant. */
3415 fmt_str = c_getstr (fmt);
3416 if (fmt_str != NULL)
3418 /* If the format doesn't contain % args or %%, we know the size. */
3419 if (strchr (fmt_str, target_percent) == 0)
3421 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3422 len = build_int_cstu (size_type_node, strlen (fmt_str));
3424 /* If the format is "%s" and first ... argument is a string literal,
3425 we know the size too. */
3426 else if (fcode == BUILT_IN_SPRINTF_CHK
3427 && strcmp (fmt_str, target_percent_s) == 0)
3429 tree arg;
3431 if (nargs == 5)
3433 arg = gimple_call_arg (stmt, 4);
3434 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3435 len = c_strlen (arg, 1);
3440 if (! integer_all_onesp (size) && !known_lower (stmt, len, size, true))
3441 return false;
3443 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3444 or if format doesn't contain % chars or is "%s". */
3445 if (! integer_zerop (flag))
3447 if (fmt_str == NULL)
3448 return false;
3449 if (strchr (fmt_str, target_percent) != NULL
3450 && strcmp (fmt_str, target_percent_s))
3451 return false;
3454 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3455 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3456 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3457 if (!fn)
3458 return false;
3460 /* Replace the called function and the first 4 argument by 2 retaining
3461 trailing varargs. */
3462 gimple_call_set_fndecl (stmt, fn);
3463 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3464 gimple_call_set_arg (stmt, 0, dest);
3465 gimple_call_set_arg (stmt, 1, fmt);
3466 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3467 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3468 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3469 fold_stmt (gsi);
3470 return true;
3473 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3474 ORIG may be null if this is a 2-argument call. We don't attempt to
3475 simplify calls with more than 3 arguments.
3477 Return true if simplification was possible, otherwise false. */
3479 bool
3480 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3482 gimple *stmt = gsi_stmt (*gsi);
3484 /* Verify the required arguments in the original call. We deal with two
3485 types of sprintf() calls: 'sprintf (str, fmt)' and
3486 'sprintf (dest, "%s", orig)'. */
3487 if (gimple_call_num_args (stmt) > 3)
3488 return false;
3490 tree orig = NULL_TREE;
3491 if (gimple_call_num_args (stmt) == 3)
3492 orig = gimple_call_arg (stmt, 2);
3494 /* Check whether the format is a literal string constant. */
3495 tree fmt = gimple_call_arg (stmt, 1);
3496 const char *fmt_str = c_getstr (fmt);
3497 if (fmt_str == NULL)
3498 return false;
3500 tree dest = gimple_call_arg (stmt, 0);
3502 if (!init_target_chars ())
3503 return false;
3505 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3506 if (!fn)
3507 return false;
3509 /* If the format doesn't contain % args or %%, use strcpy. */
3510 if (strchr (fmt_str, target_percent) == NULL)
3512 /* Don't optimize sprintf (buf, "abc", ptr++). */
3513 if (orig)
3514 return false;
3516 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3517 'format' is known to contain no % formats. */
3518 gimple_seq stmts = NULL;
3519 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3521 /* Propagate the NO_WARNING bit to avoid issuing the same
3522 warning more than once. */
3523 copy_warning (repl, stmt);
3525 gimple_seq_add_stmt_without_update (&stmts, repl);
3526 if (tree lhs = gimple_call_lhs (stmt))
3528 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3529 strlen (fmt_str)));
3530 gimple_seq_add_stmt_without_update (&stmts, repl);
3531 gsi_replace_with_seq_vops (gsi, stmts);
3532 /* gsi now points at the assignment to the lhs, get a
3533 stmt iterator to the memcpy call.
3534 ??? We can't use gsi_for_stmt as that doesn't work when the
3535 CFG isn't built yet. */
3536 gimple_stmt_iterator gsi2 = *gsi;
3537 gsi_prev (&gsi2);
3538 fold_stmt (&gsi2);
3540 else
3542 gsi_replace_with_seq_vops (gsi, stmts);
3543 fold_stmt (gsi);
3545 return true;
3548 /* If the format is "%s", use strcpy if the result isn't used. */
3549 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3551 /* Don't crash on sprintf (str1, "%s"). */
3552 if (!orig)
3553 return false;
3555 /* Don't fold calls with source arguments of invalid (nonpointer)
3556 types. */
3557 if (!POINTER_TYPE_P (TREE_TYPE (orig)))
3558 return false;
3560 tree orig_len = NULL_TREE;
3561 if (gimple_call_lhs (stmt))
3563 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3564 if (!orig_len)
3565 return false;
3568 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3569 gimple_seq stmts = NULL;
3570 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3572 /* Propagate the NO_WARNING bit to avoid issuing the same
3573 warning more than once. */
3574 copy_warning (repl, stmt);
3576 gimple_seq_add_stmt_without_update (&stmts, repl);
3577 if (tree lhs = gimple_call_lhs (stmt))
3579 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3580 TREE_TYPE (orig_len)))
3581 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3582 repl = gimple_build_assign (lhs, orig_len);
3583 gimple_seq_add_stmt_without_update (&stmts, repl);
3584 gsi_replace_with_seq_vops (gsi, stmts);
3585 /* gsi now points at the assignment to the lhs, get a
3586 stmt iterator to the memcpy call.
3587 ??? We can't use gsi_for_stmt as that doesn't work when the
3588 CFG isn't built yet. */
3589 gimple_stmt_iterator gsi2 = *gsi;
3590 gsi_prev (&gsi2);
3591 fold_stmt (&gsi2);
3593 else
3595 gsi_replace_with_seq_vops (gsi, stmts);
3596 fold_stmt (gsi);
3598 return true;
3600 return false;
3603 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3604 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3605 attempt to simplify calls with more than 4 arguments.
3607 Return true if simplification was possible, otherwise false. */
3609 bool
3610 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3612 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3613 tree dest = gimple_call_arg (stmt, 0);
3614 tree destsize = gimple_call_arg (stmt, 1);
3615 tree fmt = gimple_call_arg (stmt, 2);
3616 tree orig = NULL_TREE;
3617 const char *fmt_str = NULL;
3619 if (gimple_call_num_args (stmt) > 4)
3620 return false;
3622 if (gimple_call_num_args (stmt) == 4)
3623 orig = gimple_call_arg (stmt, 3);
3625 /* Check whether the format is a literal string constant. */
3626 fmt_str = c_getstr (fmt);
3627 if (fmt_str == NULL)
3628 return false;
3630 if (!init_target_chars ())
3631 return false;
3633 /* If the format doesn't contain % args or %%, use strcpy. */
3634 if (strchr (fmt_str, target_percent) == NULL)
3636 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3637 if (!fn)
3638 return false;
3640 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3641 if (orig)
3642 return false;
3644 tree len = build_int_cstu (TREE_TYPE (destsize), strlen (fmt_str));
3646 /* We could expand this as
3647 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3648 or to
3649 memcpy (str, fmt_with_nul_at_cstm1, cst);
3650 but in the former case that might increase code size
3651 and in the latter case grow .rodata section too much.
3652 So punt for now. */
3653 if (!known_lower (stmt, len, destsize, true))
3654 return false;
3656 gimple_seq stmts = NULL;
3657 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3658 gimple_seq_add_stmt_without_update (&stmts, repl);
3659 if (tree lhs = gimple_call_lhs (stmt))
3661 repl = gimple_build_assign (lhs,
3662 fold_convert (TREE_TYPE (lhs), len));
3663 gimple_seq_add_stmt_without_update (&stmts, repl);
3664 gsi_replace_with_seq_vops (gsi, stmts);
3665 /* gsi now points at the assignment to the lhs, get a
3666 stmt iterator to the memcpy call.
3667 ??? We can't use gsi_for_stmt as that doesn't work when the
3668 CFG isn't built yet. */
3669 gimple_stmt_iterator gsi2 = *gsi;
3670 gsi_prev (&gsi2);
3671 fold_stmt (&gsi2);
3673 else
3675 gsi_replace_with_seq_vops (gsi, stmts);
3676 fold_stmt (gsi);
3678 return true;
3681 /* If the format is "%s", use strcpy if the result isn't used. */
3682 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3684 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3685 if (!fn)
3686 return false;
3688 /* Don't crash on snprintf (str1, cst, "%s"). */
3689 if (!orig)
3690 return false;
3692 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3694 /* We could expand this as
3695 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3696 or to
3697 memcpy (str1, str2_with_nul_at_cstm1, cst);
3698 but in the former case that might increase code size
3699 and in the latter case grow .rodata section too much.
3700 So punt for now. */
3701 if (!known_lower (stmt, orig_len, destsize, true))
3702 return false;
3704 /* Convert snprintf (str1, cst, "%s", str2) into
3705 strcpy (str1, str2) if strlen (str2) < cst. */
3706 gimple_seq stmts = NULL;
3707 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3708 gimple_seq_add_stmt_without_update (&stmts, repl);
3709 if (tree lhs = gimple_call_lhs (stmt))
3711 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3712 TREE_TYPE (orig_len)))
3713 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3714 repl = gimple_build_assign (lhs, orig_len);
3715 gimple_seq_add_stmt_without_update (&stmts, repl);
3716 gsi_replace_with_seq_vops (gsi, stmts);
3717 /* gsi now points at the assignment to the lhs, get a
3718 stmt iterator to the memcpy call.
3719 ??? We can't use gsi_for_stmt as that doesn't work when the
3720 CFG isn't built yet. */
3721 gimple_stmt_iterator gsi2 = *gsi;
3722 gsi_prev (&gsi2);
3723 fold_stmt (&gsi2);
3725 else
3727 gsi_replace_with_seq_vops (gsi, stmts);
3728 fold_stmt (gsi);
3730 return true;
3732 return false;
3735 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3736 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3737 more than 3 arguments, and ARG may be null in the 2-argument case.
3739 Return NULL_TREE if no simplification was possible, otherwise return the
3740 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3741 code of the function to be simplified. */
3743 static bool
3744 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3745 tree fp, tree fmt, tree arg,
3746 enum built_in_function fcode)
3748 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3749 tree fn_fputc, fn_fputs;
3750 const char *fmt_str = NULL;
3752 /* If the return value is used, don't do the transformation. */
3753 if (gimple_call_lhs (stmt) != NULL_TREE)
3754 return false;
3756 /* Check whether the format is a literal string constant. */
3757 fmt_str = c_getstr (fmt);
3758 if (fmt_str == NULL)
3759 return false;
3761 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3763 /* If we're using an unlocked function, assume the other
3764 unlocked functions exist explicitly. */
3765 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3766 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3768 else
3770 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3771 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3774 if (!init_target_chars ())
3775 return false;
3777 /* If the format doesn't contain % args or %%, use strcpy. */
3778 if (strchr (fmt_str, target_percent) == NULL)
3780 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3781 && arg)
3782 return false;
3784 /* If the format specifier was "", fprintf does nothing. */
3785 if (fmt_str[0] == '\0')
3787 replace_call_with_value (gsi, NULL_TREE);
3788 return true;
3791 /* When "string" doesn't contain %, replace all cases of
3792 fprintf (fp, string) with fputs (string, fp). The fputs
3793 builtin will take care of special cases like length == 1. */
3794 if (fn_fputs)
3796 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3797 replace_call_with_call_and_fold (gsi, repl);
3798 return true;
3802 /* The other optimizations can be done only on the non-va_list variants. */
3803 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3804 return false;
3806 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3807 else if (strcmp (fmt_str, target_percent_s) == 0)
3809 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3810 return false;
3811 if (fn_fputs)
3813 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3814 replace_call_with_call_and_fold (gsi, repl);
3815 return true;
3819 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3820 else if (strcmp (fmt_str, target_percent_c) == 0)
3822 if (!arg
3823 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3824 return false;
3825 if (fn_fputc)
3827 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3828 replace_call_with_call_and_fold (gsi, repl);
3829 return true;
3833 return false;
3836 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3837 FMT and ARG are the arguments to the call; we don't fold cases with
3838 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3840 Return NULL_TREE if no simplification was possible, otherwise return the
3841 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3842 code of the function to be simplified. */
3844 static bool
3845 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3846 tree arg, enum built_in_function fcode)
3848 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3849 tree fn_putchar, fn_puts, newarg;
3850 const char *fmt_str = NULL;
3852 /* If the return value is used, don't do the transformation. */
3853 if (gimple_call_lhs (stmt) != NULL_TREE)
3854 return false;
3856 /* Check whether the format is a literal string constant. */
3857 fmt_str = c_getstr (fmt);
3858 if (fmt_str == NULL)
3859 return false;
3861 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3863 /* If we're using an unlocked function, assume the other
3864 unlocked functions exist explicitly. */
3865 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3866 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3868 else
3870 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3871 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3874 if (!init_target_chars ())
3875 return false;
3877 if (strcmp (fmt_str, target_percent_s) == 0
3878 || strchr (fmt_str, target_percent) == NULL)
3880 const char *str;
3882 if (strcmp (fmt_str, target_percent_s) == 0)
3884 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3885 return false;
3887 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3888 return false;
3890 str = c_getstr (arg);
3891 if (str == NULL)
3892 return false;
3894 else
3896 /* The format specifier doesn't contain any '%' characters. */
3897 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3898 && arg)
3899 return false;
3900 str = fmt_str;
3903 /* If the string was "", printf does nothing. */
3904 if (str[0] == '\0')
3906 replace_call_with_value (gsi, NULL_TREE);
3907 return true;
3910 /* If the string has length of 1, call putchar. */
3911 if (str[1] == '\0')
3913 /* Given printf("c"), (where c is any one character,)
3914 convert "c"[0] to an int and pass that to the replacement
3915 function. */
3916 newarg = build_int_cst (integer_type_node, str[0]);
3917 if (fn_putchar)
3919 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3920 replace_call_with_call_and_fold (gsi, repl);
3921 return true;
3924 else
3926 /* If the string was "string\n", call puts("string"). */
3927 size_t len = strlen (str);
3928 if ((unsigned char)str[len - 1] == target_newline
3929 && (size_t) (int) len == len
3930 && (int) len > 0)
3932 char *newstr;
3934 /* Create a NUL-terminated string that's one char shorter
3935 than the original, stripping off the trailing '\n'. */
3936 newstr = xstrdup (str);
3937 newstr[len - 1] = '\0';
3938 newarg = build_string_literal (len, newstr);
3939 free (newstr);
3940 if (fn_puts)
3942 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3943 replace_call_with_call_and_fold (gsi, repl);
3944 return true;
3947 else
3948 /* We'd like to arrange to call fputs(string,stdout) here,
3949 but we need stdout and don't have a way to get it yet. */
3950 return false;
3954 /* The other optimizations can be done only on the non-va_list variants. */
3955 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3956 return false;
3958 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3959 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3961 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3962 return false;
3963 if (fn_puts)
3965 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3966 replace_call_with_call_and_fold (gsi, repl);
3967 return true;
3971 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3972 else if (strcmp (fmt_str, target_percent_c) == 0)
3974 if (!arg || ! useless_type_conversion_p (integer_type_node,
3975 TREE_TYPE (arg)))
3976 return false;
3977 if (fn_putchar)
3979 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3980 replace_call_with_call_and_fold (gsi, repl);
3981 return true;
3985 return false;
3990 /* Fold a call to __builtin_strlen with known length LEN. */
3992 static bool
3993 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3995 gimple *stmt = gsi_stmt (*gsi);
3996 tree arg = gimple_call_arg (stmt, 0);
3998 wide_int minlen;
3999 wide_int maxlen;
4001 c_strlen_data lendata = { };
4002 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
4003 && !lendata.decl
4004 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
4005 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
4007 /* The range of lengths refers to either a single constant
4008 string or to the longest and shortest constant string
4009 referenced by the argument of the strlen() call, or to
4010 the strings that can possibly be stored in the arrays
4011 the argument refers to. */
4012 minlen = wi::to_wide (lendata.minlen);
4013 maxlen = wi::to_wide (lendata.maxlen);
4015 else
4017 unsigned prec = TYPE_PRECISION (sizetype);
4019 minlen = wi::shwi (0, prec);
4020 maxlen = wi::to_wide (max_object_size (), prec) - 2;
4023 if (minlen == maxlen)
4025 /* Fold the strlen call to a constant. */
4026 tree type = TREE_TYPE (lendata.minlen);
4027 tree len = force_gimple_operand_gsi (gsi,
4028 wide_int_to_tree (type, minlen),
4029 true, NULL, true, GSI_SAME_STMT);
4030 replace_call_with_value (gsi, len);
4031 return true;
4034 /* Set the strlen() range to [0, MAXLEN]. */
4035 if (tree lhs = gimple_call_lhs (stmt))
4036 set_strlen_range (lhs, minlen, maxlen);
4038 return false;
4041 /* Fold a call to __builtin_acc_on_device. */
4043 static bool
4044 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
4046 /* Defer folding until we know which compiler we're in. */
4047 if (symtab->state != EXPANSION)
4048 return false;
4050 unsigned val_host = GOMP_DEVICE_HOST;
4051 unsigned val_dev = GOMP_DEVICE_NONE;
4053 #ifdef ACCEL_COMPILER
4054 val_host = GOMP_DEVICE_NOT_HOST;
4055 val_dev = ACCEL_COMPILER_acc_device;
4056 #endif
4058 location_t loc = gimple_location (gsi_stmt (*gsi));
4060 tree host_eq = make_ssa_name (boolean_type_node);
4061 gimple *host_ass = gimple_build_assign
4062 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
4063 gimple_set_location (host_ass, loc);
4064 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
4066 tree dev_eq = make_ssa_name (boolean_type_node);
4067 gimple *dev_ass = gimple_build_assign
4068 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
4069 gimple_set_location (dev_ass, loc);
4070 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
4072 tree result = make_ssa_name (boolean_type_node);
4073 gimple *result_ass = gimple_build_assign
4074 (result, BIT_IOR_EXPR, host_eq, dev_eq);
4075 gimple_set_location (result_ass, loc);
4076 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
4078 replace_call_with_value (gsi, result);
4080 return true;
4083 /* Fold realloc (0, n) -> malloc (n). */
4085 static bool
4086 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
4088 gimple *stmt = gsi_stmt (*gsi);
4089 tree arg = gimple_call_arg (stmt, 0);
4090 tree size = gimple_call_arg (stmt, 1);
4092 if (operand_equal_p (arg, null_pointer_node, 0))
4094 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
4095 if (fn_malloc)
4097 gcall *repl = gimple_build_call (fn_malloc, 1, size);
4098 replace_call_with_call_and_fold (gsi, repl);
4099 return true;
4102 return false;
4105 /* Number of bytes into which any type but aggregate or vector types
4106 should fit. */
4107 static constexpr size_t clear_padding_unit
4108 = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
4109 /* Buffer size on which __builtin_clear_padding folding code works. */
4110 static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
4112 /* Data passed through __builtin_clear_padding folding. */
4113 struct clear_padding_struct {
4114 location_t loc;
4115 /* 0 during __builtin_clear_padding folding, nonzero during
4116 clear_type_padding_in_mask. In that case, instead of clearing the
4117 non-padding bits in union_ptr array clear the padding bits in there. */
4118 bool clear_in_mask;
4119 tree base;
4120 tree alias_type;
4121 gimple_stmt_iterator *gsi;
4122 /* Alignment of buf->base + 0. */
4123 unsigned align;
4124 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4125 HOST_WIDE_INT off;
4126 /* Number of padding bytes before buf->off that don't have padding clear
4127 code emitted yet. */
4128 HOST_WIDE_INT padding_bytes;
4129 /* The size of the whole object. Never emit code to touch
4130 buf->base + buf->sz or following bytes. */
4131 HOST_WIDE_INT sz;
4132 /* Number of bytes recorded in buf->buf. */
4133 size_t size;
4134 /* When inside union, instead of emitting code we and bits inside of
4135 the union_ptr array. */
4136 unsigned char *union_ptr;
4137 /* Set bits mean padding bits that need to be cleared by the builtin. */
4138 unsigned char buf[clear_padding_buf_size + clear_padding_unit];
4141 /* Emit code to clear padding requested in BUF->buf - set bits
4142 in there stand for padding that should be cleared. FULL is true
4143 if everything from the buffer should be flushed, otherwise
4144 it can leave up to 2 * clear_padding_unit bytes for further
4145 processing. */
4147 static void
4148 clear_padding_flush (clear_padding_struct *buf, bool full)
4150 gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
4151 if (!full && buf->size < 2 * clear_padding_unit)
4152 return;
4153 gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
4154 size_t end = buf->size;
4155 if (!full)
4156 end = ((end - clear_padding_unit - 1) / clear_padding_unit
4157 * clear_padding_unit);
4158 size_t padding_bytes = buf->padding_bytes;
4159 if (buf->union_ptr)
4161 if (buf->clear_in_mask)
4163 /* During clear_type_padding_in_mask, clear the padding
4164 bits set in buf->buf in the buf->union_ptr mask. */
4165 for (size_t i = 0; i < end; i++)
4167 if (buf->buf[i] == (unsigned char) ~0)
4168 padding_bytes++;
4169 else
4171 memset (&buf->union_ptr[buf->off + i - padding_bytes],
4172 0, padding_bytes);
4173 padding_bytes = 0;
4174 buf->union_ptr[buf->off + i] &= ~buf->buf[i];
4177 if (full)
4179 memset (&buf->union_ptr[buf->off + end - padding_bytes],
4180 0, padding_bytes);
4181 buf->off = 0;
4182 buf->size = 0;
4183 buf->padding_bytes = 0;
4185 else
4187 memmove (buf->buf, buf->buf + end, buf->size - end);
4188 buf->off += end;
4189 buf->size -= end;
4190 buf->padding_bytes = padding_bytes;
4192 return;
4194 /* Inside of a union, instead of emitting any code, instead
4195 clear all bits in the union_ptr buffer that are clear
4196 in buf. Whole padding bytes don't clear anything. */
4197 for (size_t i = 0; i < end; i++)
4199 if (buf->buf[i] == (unsigned char) ~0)
4200 padding_bytes++;
4201 else
4203 padding_bytes = 0;
4204 buf->union_ptr[buf->off + i] &= buf->buf[i];
4207 if (full)
4209 buf->off = 0;
4210 buf->size = 0;
4211 buf->padding_bytes = 0;
4213 else
4215 memmove (buf->buf, buf->buf + end, buf->size - end);
4216 buf->off += end;
4217 buf->size -= end;
4218 buf->padding_bytes = padding_bytes;
4220 return;
4222 size_t wordsize = UNITS_PER_WORD;
4223 for (size_t i = 0; i < end; i += wordsize)
4225 size_t nonzero_first = wordsize;
4226 size_t nonzero_last = 0;
4227 size_t zero_first = wordsize;
4228 size_t zero_last = 0;
4229 bool all_ones = true, bytes_only = true;
4230 if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4231 > (unsigned HOST_WIDE_INT) buf->sz)
4233 gcc_assert (wordsize > 1);
4234 wordsize /= 2;
4235 i -= wordsize;
4236 continue;
4238 for (size_t j = i; j < i + wordsize && j < end; j++)
4240 if (buf->buf[j])
4242 if (nonzero_first == wordsize)
4244 nonzero_first = j - i;
4245 nonzero_last = j - i;
4247 if (nonzero_last != j - i)
4248 all_ones = false;
4249 nonzero_last = j + 1 - i;
4251 else
4253 if (zero_first == wordsize)
4254 zero_first = j - i;
4255 zero_last = j + 1 - i;
4257 if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4259 all_ones = false;
4260 bytes_only = false;
4263 size_t padding_end = i;
4264 if (padding_bytes)
4266 if (nonzero_first == 0
4267 && nonzero_last == wordsize
4268 && all_ones)
4270 /* All bits are padding and we had some padding
4271 before too. Just extend it. */
4272 padding_bytes += wordsize;
4273 continue;
4275 if (all_ones && nonzero_first == 0)
4277 padding_bytes += nonzero_last;
4278 padding_end += nonzero_last;
4279 nonzero_first = wordsize;
4280 nonzero_last = 0;
4282 else if (bytes_only && nonzero_first == 0)
4284 gcc_assert (zero_first && zero_first != wordsize);
4285 padding_bytes += zero_first;
4286 padding_end += zero_first;
4288 tree atype, src;
4289 if (padding_bytes == 1)
4291 atype = char_type_node;
4292 src = build_zero_cst (char_type_node);
4294 else
4296 atype = build_array_type_nelts (char_type_node, padding_bytes);
4297 src = build_constructor (atype, NULL);
4299 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4300 build_int_cst (buf->alias_type,
4301 buf->off + padding_end
4302 - padding_bytes));
4303 gimple *g = gimple_build_assign (dst, src);
4304 gimple_set_location (g, buf->loc);
4305 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4306 padding_bytes = 0;
4307 buf->padding_bytes = 0;
4309 if (nonzero_first == wordsize)
4310 /* All bits in a word are 0, there are no padding bits. */
4311 continue;
4312 if (all_ones && nonzero_last == wordsize)
4314 /* All bits between nonzero_first and end of word are padding
4315 bits, start counting padding_bytes. */
4316 padding_bytes = nonzero_last - nonzero_first;
4317 continue;
4319 if (bytes_only)
4321 /* If bitfields aren't involved in this word, prefer storing
4322 individual bytes or groups of them over performing a RMW
4323 operation on the whole word. */
4324 gcc_assert (i + zero_last <= end);
4325 for (size_t j = padding_end; j < i + zero_last; j++)
4327 if (buf->buf[j])
4329 size_t k;
4330 for (k = j; k < i + zero_last; k++)
4331 if (buf->buf[k] == 0)
4332 break;
4333 HOST_WIDE_INT off = buf->off + j;
4334 tree atype, src;
4335 if (k - j == 1)
4337 atype = char_type_node;
4338 src = build_zero_cst (char_type_node);
4340 else
4342 atype = build_array_type_nelts (char_type_node, k - j);
4343 src = build_constructor (atype, NULL);
4345 tree dst = build2_loc (buf->loc, MEM_REF, atype,
4346 buf->base,
4347 build_int_cst (buf->alias_type, off));
4348 gimple *g = gimple_build_assign (dst, src);
4349 gimple_set_location (g, buf->loc);
4350 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4351 j = k;
4354 if (nonzero_last == wordsize)
4355 padding_bytes = nonzero_last - zero_last;
4356 continue;
4358 for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4360 if (nonzero_last - nonzero_first <= eltsz
4361 && ((nonzero_first & ~(eltsz - 1))
4362 == ((nonzero_last - 1) & ~(eltsz - 1))))
4364 tree type;
4365 if (eltsz == 1)
4366 type = char_type_node;
4367 else
4368 type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4370 size_t start = nonzero_first & ~(eltsz - 1);
4371 HOST_WIDE_INT off = buf->off + i + start;
4372 tree atype = type;
4373 if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4374 atype = build_aligned_type (type, buf->align);
4375 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4376 build_int_cst (buf->alias_type, off));
4377 tree src;
4378 gimple *g;
4379 if (all_ones
4380 && nonzero_first == start
4381 && nonzero_last == start + eltsz)
4382 src = build_zero_cst (type);
4383 else
4385 src = make_ssa_name (type);
4386 tree tmp_dst = unshare_expr (dst);
4387 /* The folding introduces a read from the tmp_dst, we should
4388 prevent uninitialized warning analysis from issuing warning
4389 for such fake read. In order to suppress warning only for
4390 this expr, we should set the location of tmp_dst to
4391 UNKNOWN_LOCATION first, then suppress_warning will call
4392 set_no_warning_bit to set the no_warning flag only for
4393 tmp_dst. */
4394 SET_EXPR_LOCATION (tmp_dst, UNKNOWN_LOCATION);
4395 suppress_warning (tmp_dst, OPT_Wuninitialized);
4396 g = gimple_build_assign (src, tmp_dst);
4397 gimple_set_location (g, buf->loc);
4398 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4399 tree mask = native_interpret_expr (type,
4400 buf->buf + i + start,
4401 eltsz);
4402 gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4403 mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4404 tree src_masked = make_ssa_name (type);
4405 g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4406 src, mask);
4407 gimple_set_location (g, buf->loc);
4408 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4409 src = src_masked;
4411 g = gimple_build_assign (dst, src);
4412 gimple_set_location (g, buf->loc);
4413 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4414 break;
4418 if (full)
4420 if (padding_bytes)
4422 tree atype, src;
4423 if (padding_bytes == 1)
4425 atype = char_type_node;
4426 src = build_zero_cst (char_type_node);
4428 else
4430 atype = build_array_type_nelts (char_type_node, padding_bytes);
4431 src = build_constructor (atype, NULL);
4433 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4434 build_int_cst (buf->alias_type,
4435 buf->off + end
4436 - padding_bytes));
4437 gimple *g = gimple_build_assign (dst, src);
4438 gimple_set_location (g, buf->loc);
4439 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4441 size_t end_rem = end % UNITS_PER_WORD;
4442 buf->off += end - end_rem;
4443 buf->size = end_rem;
4444 memset (buf->buf, 0, buf->size);
4445 buf->padding_bytes = 0;
4447 else
4449 memmove (buf->buf, buf->buf + end, buf->size - end);
4450 buf->off += end;
4451 buf->size -= end;
4452 buf->padding_bytes = padding_bytes;
4456 /* Append PADDING_BYTES padding bytes. */
4458 static void
4459 clear_padding_add_padding (clear_padding_struct *buf,
4460 HOST_WIDE_INT padding_bytes)
4462 if (padding_bytes == 0)
4463 return;
4464 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4465 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4466 clear_padding_flush (buf, false);
4467 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4468 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4470 memset (buf->buf + buf->size, ~0, clear_padding_buf_size - buf->size);
4471 padding_bytes -= clear_padding_buf_size - buf->size;
4472 buf->size = clear_padding_buf_size;
4473 clear_padding_flush (buf, false);
4474 gcc_assert (buf->padding_bytes);
4475 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4476 is guaranteed to be all ones. */
4477 padding_bytes += buf->size;
4478 buf->size = padding_bytes % UNITS_PER_WORD;
4479 memset (buf->buf, ~0, buf->size);
4480 buf->off += padding_bytes - buf->size;
4481 buf->padding_bytes += padding_bytes - buf->size;
4483 else
4485 memset (buf->buf + buf->size, ~0, padding_bytes);
4486 buf->size += padding_bytes;
4490 static void clear_padding_type (clear_padding_struct *, tree,
4491 HOST_WIDE_INT, bool);
4493 /* Clear padding bits of union type TYPE. */
4495 static void
4496 clear_padding_union (clear_padding_struct *buf, tree type,
4497 HOST_WIDE_INT sz, bool for_auto_init)
4499 clear_padding_struct *union_buf;
4500 HOST_WIDE_INT start_off = 0, next_off = 0;
4501 size_t start_size = 0;
4502 if (buf->union_ptr)
4504 start_off = buf->off + buf->size;
4505 next_off = start_off + sz;
4506 start_size = start_off % UNITS_PER_WORD;
4507 start_off -= start_size;
4508 clear_padding_flush (buf, true);
4509 union_buf = buf;
4511 else
4513 if (sz + buf->size > clear_padding_buf_size)
4514 clear_padding_flush (buf, false);
4515 union_buf = XALLOCA (clear_padding_struct);
4516 union_buf->loc = buf->loc;
4517 union_buf->clear_in_mask = buf->clear_in_mask;
4518 union_buf->base = NULL_TREE;
4519 union_buf->alias_type = NULL_TREE;
4520 union_buf->gsi = NULL;
4521 union_buf->align = 0;
4522 union_buf->off = 0;
4523 union_buf->padding_bytes = 0;
4524 union_buf->sz = sz;
4525 union_buf->size = 0;
4526 if (sz + buf->size <= clear_padding_buf_size)
4527 union_buf->union_ptr = buf->buf + buf->size;
4528 else
4529 union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4530 memset (union_buf->union_ptr, ~0, sz);
4533 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4534 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4536 if (DECL_SIZE_UNIT (field) == NULL_TREE)
4538 if (TREE_TYPE (field) == error_mark_node)
4539 continue;
4540 gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4541 && !COMPLETE_TYPE_P (TREE_TYPE (field)));
4542 if (!buf->clear_in_mask && !for_auto_init)
4543 error_at (buf->loc, "flexible array member %qD does not have "
4544 "well defined padding bits for %qs",
4545 field, "__builtin_clear_padding");
4546 continue;
4548 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4549 gcc_assert (union_buf->size == 0);
4550 union_buf->off = start_off;
4551 union_buf->size = start_size;
4552 memset (union_buf->buf, ~0, start_size);
4553 clear_padding_type (union_buf, TREE_TYPE (field), fldsz, for_auto_init);
4554 clear_padding_add_padding (union_buf, sz - fldsz);
4555 clear_padding_flush (union_buf, true);
4558 if (buf == union_buf)
4560 buf->off = next_off;
4561 buf->size = next_off % UNITS_PER_WORD;
4562 buf->off -= buf->size;
4563 memset (buf->buf, ~0, buf->size);
4565 else if (sz + buf->size <= clear_padding_buf_size)
4566 buf->size += sz;
4567 else
4569 unsigned char *union_ptr = union_buf->union_ptr;
4570 while (sz)
4572 clear_padding_flush (buf, false);
4573 HOST_WIDE_INT this_sz
4574 = MIN ((unsigned HOST_WIDE_INT) sz,
4575 clear_padding_buf_size - buf->size);
4576 memcpy (buf->buf + buf->size, union_ptr, this_sz);
4577 buf->size += this_sz;
4578 union_ptr += this_sz;
4579 sz -= this_sz;
4581 XDELETE (union_buf->union_ptr);
4585 /* The only known floating point formats with padding bits are the
4586 IEEE extended ones. */
4588 static bool
4589 clear_padding_real_needs_padding_p (tree type)
4591 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4592 return (fmt->b == 2
4593 && fmt->signbit_ro == fmt->signbit_rw
4594 && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4597 /* Return true if TYPE might contain any padding bits. */
4599 bool
4600 clear_padding_type_may_have_padding_p (tree type)
4602 switch (TREE_CODE (type))
4604 case RECORD_TYPE:
4605 case UNION_TYPE:
4606 return true;
4607 case ARRAY_TYPE:
4608 case COMPLEX_TYPE:
4609 case VECTOR_TYPE:
4610 return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4611 case REAL_TYPE:
4612 return clear_padding_real_needs_padding_p (type);
4613 default:
4614 return false;
4618 /* Emit a runtime loop:
4619 for (; buf.base != end; buf.base += sz)
4620 __builtin_clear_padding (buf.base); */
4622 static void
4623 clear_padding_emit_loop (clear_padding_struct *buf, tree type,
4624 tree end, bool for_auto_init)
4626 tree l1 = create_artificial_label (buf->loc);
4627 tree l2 = create_artificial_label (buf->loc);
4628 tree l3 = create_artificial_label (buf->loc);
4629 gimple *g = gimple_build_goto (l2);
4630 gimple_set_location (g, buf->loc);
4631 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4632 g = gimple_build_label (l1);
4633 gimple_set_location (g, buf->loc);
4634 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4635 clear_padding_type (buf, type, buf->sz, for_auto_init);
4636 clear_padding_flush (buf, true);
4637 g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4638 size_int (buf->sz));
4639 gimple_set_location (g, buf->loc);
4640 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4641 g = gimple_build_label (l2);
4642 gimple_set_location (g, buf->loc);
4643 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4644 g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4645 gimple_set_location (g, buf->loc);
4646 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4647 g = gimple_build_label (l3);
4648 gimple_set_location (g, buf->loc);
4649 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4652 /* Clear padding bits for TYPE. Called recursively from
4653 gimple_fold_builtin_clear_padding. If FOR_AUTO_INIT is true,
4654 the __builtin_clear_padding is not called by the end user,
4655 instead, it's inserted by the compiler to initialize the
4656 paddings of automatic variable. Therefore, we should not
4657 emit the error messages for flexible array members to confuse
4658 the end user. */
4660 static void
4661 clear_padding_type (clear_padding_struct *buf, tree type,
4662 HOST_WIDE_INT sz, bool for_auto_init)
4664 switch (TREE_CODE (type))
4666 case RECORD_TYPE:
4667 HOST_WIDE_INT cur_pos;
4668 cur_pos = 0;
4669 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4670 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4672 tree ftype = TREE_TYPE (field);
4673 if (DECL_BIT_FIELD (field))
4675 HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
4676 if (fldsz == 0)
4677 continue;
4678 HOST_WIDE_INT pos = int_byte_position (field);
4679 if (pos >= sz)
4680 continue;
4681 HOST_WIDE_INT bpos
4682 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4683 bpos %= BITS_PER_UNIT;
4684 HOST_WIDE_INT end
4685 = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4686 if (pos + end > cur_pos)
4688 clear_padding_add_padding (buf, pos + end - cur_pos);
4689 cur_pos = pos + end;
4691 gcc_assert (cur_pos > pos
4692 && ((unsigned HOST_WIDE_INT) buf->size
4693 >= (unsigned HOST_WIDE_INT) cur_pos - pos));
4694 unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4695 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4696 sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4697 " in %qs", "__builtin_clear_padding");
4698 else if (BYTES_BIG_ENDIAN)
4700 /* Big endian. */
4701 if (bpos + fldsz <= BITS_PER_UNIT)
4702 *p &= ~(((1 << fldsz) - 1)
4703 << (BITS_PER_UNIT - bpos - fldsz));
4704 else
4706 if (bpos)
4708 *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4709 p++;
4710 fldsz -= BITS_PER_UNIT - bpos;
4712 memset (p, 0, fldsz / BITS_PER_UNIT);
4713 p += fldsz / BITS_PER_UNIT;
4714 fldsz %= BITS_PER_UNIT;
4715 if (fldsz)
4716 *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4719 else
4721 /* Little endian. */
4722 if (bpos + fldsz <= BITS_PER_UNIT)
4723 *p &= ~(((1 << fldsz) - 1) << bpos);
4724 else
4726 if (bpos)
4728 *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4729 p++;
4730 fldsz -= BITS_PER_UNIT - bpos;
4732 memset (p, 0, fldsz / BITS_PER_UNIT);
4733 p += fldsz / BITS_PER_UNIT;
4734 fldsz %= BITS_PER_UNIT;
4735 if (fldsz)
4736 *p &= ~((1 << fldsz) - 1);
4740 else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4742 if (ftype == error_mark_node)
4743 continue;
4744 gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4745 && !COMPLETE_TYPE_P (ftype));
4746 if (!buf->clear_in_mask && !for_auto_init)
4747 error_at (buf->loc, "flexible array member %qD does not "
4748 "have well defined padding bits for %qs",
4749 field, "__builtin_clear_padding");
4751 else if (is_empty_type (ftype))
4752 continue;
4753 else
4755 HOST_WIDE_INT pos = int_byte_position (field);
4756 if (pos >= sz)
4757 continue;
4758 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4759 gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4760 clear_padding_add_padding (buf, pos - cur_pos);
4761 cur_pos = pos;
4762 if (tree asbase = lang_hooks.types.classtype_as_base (field))
4763 ftype = asbase;
4764 clear_padding_type (buf, ftype, fldsz, for_auto_init);
4765 cur_pos += fldsz;
4768 gcc_assert (sz >= cur_pos);
4769 clear_padding_add_padding (buf, sz - cur_pos);
4770 break;
4771 case ARRAY_TYPE:
4772 HOST_WIDE_INT nelts, fldsz;
4773 fldsz = int_size_in_bytes (TREE_TYPE (type));
4774 if (fldsz == 0)
4775 break;
4776 nelts = sz / fldsz;
4777 if (nelts > 1
4778 && sz > 8 * UNITS_PER_WORD
4779 && buf->union_ptr == NULL
4780 && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4782 /* For sufficiently large array of more than one elements,
4783 emit a runtime loop to keep code size manageable. */
4784 tree base = buf->base;
4785 unsigned int prev_align = buf->align;
4786 HOST_WIDE_INT off = buf->off + buf->size;
4787 HOST_WIDE_INT prev_sz = buf->sz;
4788 clear_padding_flush (buf, true);
4789 tree elttype = TREE_TYPE (type);
4790 buf->base = create_tmp_var (build_pointer_type (elttype));
4791 tree end = make_ssa_name (TREE_TYPE (buf->base));
4792 gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4793 base, size_int (off));
4794 gimple_set_location (g, buf->loc);
4795 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4796 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4797 size_int (sz));
4798 gimple_set_location (g, buf->loc);
4799 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4800 buf->sz = fldsz;
4801 buf->align = TYPE_ALIGN (elttype);
4802 buf->off = 0;
4803 buf->size = 0;
4804 clear_padding_emit_loop (buf, elttype, end, for_auto_init);
4805 buf->base = base;
4806 buf->sz = prev_sz;
4807 buf->align = prev_align;
4808 buf->size = off % UNITS_PER_WORD;
4809 buf->off = off - buf->size;
4810 memset (buf->buf, 0, buf->size);
4811 break;
4813 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4814 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4815 break;
4816 case UNION_TYPE:
4817 clear_padding_union (buf, type, sz, for_auto_init);
4818 break;
4819 case REAL_TYPE:
4820 gcc_assert ((size_t) sz <= clear_padding_unit);
4821 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4822 clear_padding_flush (buf, false);
4823 if (clear_padding_real_needs_padding_p (type))
4825 /* Use native_interpret_real + native_encode_expr to figure out
4826 which bits are padding. */
4827 memset (buf->buf + buf->size, ~0, sz);
4828 tree cst = native_interpret_real (type, buf->buf + buf->size, sz);
4829 gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
4830 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4831 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4832 for (size_t i = 0; i < (size_t) sz; i++)
4833 buf->buf[buf->size + i] ^= ~0;
4835 else
4836 memset (buf->buf + buf->size, 0, sz);
4837 buf->size += sz;
4838 break;
4839 case COMPLEX_TYPE:
4840 fldsz = int_size_in_bytes (TREE_TYPE (type));
4841 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4842 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4843 break;
4844 case VECTOR_TYPE:
4845 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
4846 fldsz = int_size_in_bytes (TREE_TYPE (type));
4847 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4848 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4849 break;
4850 case NULLPTR_TYPE:
4851 gcc_assert ((size_t) sz <= clear_padding_unit);
4852 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4853 clear_padding_flush (buf, false);
4854 memset (buf->buf + buf->size, ~0, sz);
4855 buf->size += sz;
4856 break;
4857 default:
4858 gcc_assert ((size_t) sz <= clear_padding_unit);
4859 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4860 clear_padding_flush (buf, false);
4861 memset (buf->buf + buf->size, 0, sz);
4862 buf->size += sz;
4863 break;
4867 /* Clear padding bits of TYPE in MASK. */
4869 void
4870 clear_type_padding_in_mask (tree type, unsigned char *mask)
4872 clear_padding_struct buf;
4873 buf.loc = UNKNOWN_LOCATION;
4874 buf.clear_in_mask = true;
4875 buf.base = NULL_TREE;
4876 buf.alias_type = NULL_TREE;
4877 buf.gsi = NULL;
4878 buf.align = 0;
4879 buf.off = 0;
4880 buf.padding_bytes = 0;
4881 buf.sz = int_size_in_bytes (type);
4882 buf.size = 0;
4883 buf.union_ptr = mask;
4884 clear_padding_type (&buf, type, buf.sz, false);
4885 clear_padding_flush (&buf, true);
4888 /* Fold __builtin_clear_padding builtin. */
4890 static bool
4891 gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
4893 gimple *stmt = gsi_stmt (*gsi);
4894 gcc_assert (gimple_call_num_args (stmt) == 2);
4895 tree ptr = gimple_call_arg (stmt, 0);
4896 tree typearg = gimple_call_arg (stmt, 1);
4897 /* The 2nd argument of __builtin_clear_padding's value is used to
4898 distinguish whether this call is made by the user or by the compiler
4899 for automatic variable initialization. */
4900 bool for_auto_init = (bool) TREE_INT_CST_LOW (typearg);
4901 tree type = TREE_TYPE (TREE_TYPE (typearg));
4902 location_t loc = gimple_location (stmt);
4903 clear_padding_struct buf;
4904 gimple_stmt_iterator gsiprev = *gsi;
4905 /* This should be folded during the lower pass. */
4906 gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
4907 gcc_assert (COMPLETE_TYPE_P (type));
4908 gsi_prev (&gsiprev);
4910 buf.loc = loc;
4911 buf.clear_in_mask = false;
4912 buf.base = ptr;
4913 buf.alias_type = NULL_TREE;
4914 buf.gsi = gsi;
4915 buf.align = get_pointer_alignment (ptr);
4916 unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
4917 buf.align = MAX (buf.align, talign);
4918 buf.off = 0;
4919 buf.padding_bytes = 0;
4920 buf.size = 0;
4921 buf.sz = int_size_in_bytes (type);
4922 buf.union_ptr = NULL;
4923 if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
4924 sorry_at (loc, "%s not supported for variable length aggregates",
4925 "__builtin_clear_padding");
4926 /* The implementation currently assumes 8-bit host and target
4927 chars which is the case for all currently supported targets
4928 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4929 else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
4930 sorry_at (loc, "%s not supported on this target",
4931 "__builtin_clear_padding");
4932 else if (!clear_padding_type_may_have_padding_p (type))
4934 else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
4936 tree sz = TYPE_SIZE_UNIT (type);
4937 tree elttype = type;
4938 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4939 while (TREE_CODE (elttype) == ARRAY_TYPE
4940 && int_size_in_bytes (elttype) < 0)
4941 elttype = TREE_TYPE (elttype);
4942 HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
4943 gcc_assert (eltsz >= 0);
4944 if (eltsz)
4946 buf.base = create_tmp_var (build_pointer_type (elttype));
4947 tree end = make_ssa_name (TREE_TYPE (buf.base));
4948 gimple *g = gimple_build_assign (buf.base, ptr);
4949 gimple_set_location (g, loc);
4950 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4951 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
4952 gimple_set_location (g, loc);
4953 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4954 buf.sz = eltsz;
4955 buf.align = TYPE_ALIGN (elttype);
4956 buf.alias_type = build_pointer_type (elttype);
4957 clear_padding_emit_loop (&buf, elttype, end, for_auto_init);
4960 else
4962 if (!is_gimple_mem_ref_addr (buf.base))
4964 buf.base = make_ssa_name (TREE_TYPE (ptr));
4965 gimple *g = gimple_build_assign (buf.base, ptr);
4966 gimple_set_location (g, loc);
4967 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4969 buf.alias_type = build_pointer_type (type);
4970 clear_padding_type (&buf, type, buf.sz, for_auto_init);
4971 clear_padding_flush (&buf, true);
4974 gimple_stmt_iterator gsiprev2 = *gsi;
4975 gsi_prev (&gsiprev2);
4976 if (gsi_stmt (gsiprev) == gsi_stmt (gsiprev2))
4977 gsi_replace (gsi, gimple_build_nop (), true);
4978 else
4980 gsi_remove (gsi, true);
4981 *gsi = gsiprev2;
4983 return true;
4986 /* Fold the non-target builtin at *GSI and return whether any simplification
4987 was made. */
4989 static bool
4990 gimple_fold_builtin (gimple_stmt_iterator *gsi)
4992 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
4993 tree callee = gimple_call_fndecl (stmt);
4995 /* Give up for always_inline inline builtins until they are
4996 inlined. */
4997 if (avoid_folding_inline_builtin (callee))
4998 return false;
5000 unsigned n = gimple_call_num_args (stmt);
5001 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
5002 switch (fcode)
5004 case BUILT_IN_BCMP:
5005 return gimple_fold_builtin_bcmp (gsi);
5006 case BUILT_IN_BCOPY:
5007 return gimple_fold_builtin_bcopy (gsi);
5008 case BUILT_IN_BZERO:
5009 return gimple_fold_builtin_bzero (gsi);
5011 case BUILT_IN_MEMSET:
5012 return gimple_fold_builtin_memset (gsi,
5013 gimple_call_arg (stmt, 1),
5014 gimple_call_arg (stmt, 2));
5015 case BUILT_IN_MEMCPY:
5016 case BUILT_IN_MEMPCPY:
5017 case BUILT_IN_MEMMOVE:
5018 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
5019 gimple_call_arg (stmt, 1), fcode);
5020 case BUILT_IN_SPRINTF_CHK:
5021 case BUILT_IN_VSPRINTF_CHK:
5022 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
5023 case BUILT_IN_STRCAT_CHK:
5024 return gimple_fold_builtin_strcat_chk (gsi);
5025 case BUILT_IN_STRNCAT_CHK:
5026 return gimple_fold_builtin_strncat_chk (gsi);
5027 case BUILT_IN_STRLEN:
5028 return gimple_fold_builtin_strlen (gsi);
5029 case BUILT_IN_STRCPY:
5030 return gimple_fold_builtin_strcpy (gsi,
5031 gimple_call_arg (stmt, 0),
5032 gimple_call_arg (stmt, 1));
5033 case BUILT_IN_STRNCPY:
5034 return gimple_fold_builtin_strncpy (gsi,
5035 gimple_call_arg (stmt, 0),
5036 gimple_call_arg (stmt, 1),
5037 gimple_call_arg (stmt, 2));
5038 case BUILT_IN_STRCAT:
5039 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
5040 gimple_call_arg (stmt, 1));
5041 case BUILT_IN_STRNCAT:
5042 return gimple_fold_builtin_strncat (gsi);
5043 case BUILT_IN_INDEX:
5044 case BUILT_IN_STRCHR:
5045 return gimple_fold_builtin_strchr (gsi, false);
5046 case BUILT_IN_RINDEX:
5047 case BUILT_IN_STRRCHR:
5048 return gimple_fold_builtin_strchr (gsi, true);
5049 case BUILT_IN_STRSTR:
5050 return gimple_fold_builtin_strstr (gsi);
5051 case BUILT_IN_STRCMP:
5052 case BUILT_IN_STRCMP_EQ:
5053 case BUILT_IN_STRCASECMP:
5054 case BUILT_IN_STRNCMP:
5055 case BUILT_IN_STRNCMP_EQ:
5056 case BUILT_IN_STRNCASECMP:
5057 return gimple_fold_builtin_string_compare (gsi);
5058 case BUILT_IN_MEMCHR:
5059 return gimple_fold_builtin_memchr (gsi);
5060 case BUILT_IN_FPUTS:
5061 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5062 gimple_call_arg (stmt, 1), false);
5063 case BUILT_IN_FPUTS_UNLOCKED:
5064 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5065 gimple_call_arg (stmt, 1), true);
5066 case BUILT_IN_MEMCPY_CHK:
5067 case BUILT_IN_MEMPCPY_CHK:
5068 case BUILT_IN_MEMMOVE_CHK:
5069 case BUILT_IN_MEMSET_CHK:
5070 return gimple_fold_builtin_memory_chk (gsi,
5071 gimple_call_arg (stmt, 0),
5072 gimple_call_arg (stmt, 1),
5073 gimple_call_arg (stmt, 2),
5074 gimple_call_arg (stmt, 3),
5075 fcode);
5076 case BUILT_IN_STPCPY:
5077 return gimple_fold_builtin_stpcpy (gsi);
5078 case BUILT_IN_STRCPY_CHK:
5079 case BUILT_IN_STPCPY_CHK:
5080 return gimple_fold_builtin_stxcpy_chk (gsi,
5081 gimple_call_arg (stmt, 0),
5082 gimple_call_arg (stmt, 1),
5083 gimple_call_arg (stmt, 2),
5084 fcode);
5085 case BUILT_IN_STRNCPY_CHK:
5086 case BUILT_IN_STPNCPY_CHK:
5087 return gimple_fold_builtin_stxncpy_chk (gsi,
5088 gimple_call_arg (stmt, 0),
5089 gimple_call_arg (stmt, 1),
5090 gimple_call_arg (stmt, 2),
5091 gimple_call_arg (stmt, 3),
5092 fcode);
5093 case BUILT_IN_SNPRINTF_CHK:
5094 case BUILT_IN_VSNPRINTF_CHK:
5095 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
5097 case BUILT_IN_FPRINTF:
5098 case BUILT_IN_FPRINTF_UNLOCKED:
5099 case BUILT_IN_VFPRINTF:
5100 if (n == 2 || n == 3)
5101 return gimple_fold_builtin_fprintf (gsi,
5102 gimple_call_arg (stmt, 0),
5103 gimple_call_arg (stmt, 1),
5104 n == 3
5105 ? gimple_call_arg (stmt, 2)
5106 : NULL_TREE,
5107 fcode);
5108 break;
5109 case BUILT_IN_FPRINTF_CHK:
5110 case BUILT_IN_VFPRINTF_CHK:
5111 if (n == 3 || n == 4)
5112 return gimple_fold_builtin_fprintf (gsi,
5113 gimple_call_arg (stmt, 0),
5114 gimple_call_arg (stmt, 2),
5115 n == 4
5116 ? gimple_call_arg (stmt, 3)
5117 : NULL_TREE,
5118 fcode);
5119 break;
5120 case BUILT_IN_PRINTF:
5121 case BUILT_IN_PRINTF_UNLOCKED:
5122 case BUILT_IN_VPRINTF:
5123 if (n == 1 || n == 2)
5124 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
5125 n == 2
5126 ? gimple_call_arg (stmt, 1)
5127 : NULL_TREE, fcode);
5128 break;
5129 case BUILT_IN_PRINTF_CHK:
5130 case BUILT_IN_VPRINTF_CHK:
5131 if (n == 2 || n == 3)
5132 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
5133 n == 3
5134 ? gimple_call_arg (stmt, 2)
5135 : NULL_TREE, fcode);
5136 break;
5137 case BUILT_IN_ACC_ON_DEVICE:
5138 return gimple_fold_builtin_acc_on_device (gsi,
5139 gimple_call_arg (stmt, 0));
5140 case BUILT_IN_REALLOC:
5141 return gimple_fold_builtin_realloc (gsi);
5143 case BUILT_IN_CLEAR_PADDING:
5144 return gimple_fold_builtin_clear_padding (gsi);
5146 default:;
5149 /* Try the generic builtin folder. */
5150 bool ignore = (gimple_call_lhs (stmt) == NULL);
5151 tree result = fold_call_stmt (stmt, ignore);
5152 if (result)
5154 if (ignore)
5155 STRIP_NOPS (result);
5156 else
5157 result = fold_convert (gimple_call_return_type (stmt), result);
5158 gimplify_and_update_call_from_tree (gsi, result);
5159 return true;
5162 return false;
5165 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5166 function calls to constants, where possible. */
5168 static tree
5169 fold_internal_goacc_dim (const gimple *call)
5171 int axis = oacc_get_ifn_dim_arg (call);
5172 int size = oacc_get_fn_dim_size (current_function_decl, axis);
5173 tree result = NULL_TREE;
5174 tree type = TREE_TYPE (gimple_call_lhs (call));
5176 switch (gimple_call_internal_fn (call))
5178 case IFN_GOACC_DIM_POS:
5179 /* If the size is 1, we know the answer. */
5180 if (size == 1)
5181 result = build_int_cst (type, 0);
5182 break;
5183 case IFN_GOACC_DIM_SIZE:
5184 /* If the size is not dynamic, we know the answer. */
5185 if (size)
5186 result = build_int_cst (type, size);
5187 break;
5188 default:
5189 break;
5192 return result;
5195 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5196 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5197 &var where var is only addressable because of such calls. */
5199 bool
5200 optimize_atomic_compare_exchange_p (gimple *stmt)
5202 if (gimple_call_num_args (stmt) != 6
5203 || !flag_inline_atomics
5204 || !optimize
5205 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
5206 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5207 || !gimple_vdef (stmt)
5208 || !gimple_vuse (stmt))
5209 return false;
5211 tree fndecl = gimple_call_fndecl (stmt);
5212 switch (DECL_FUNCTION_CODE (fndecl))
5214 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
5215 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
5216 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
5217 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
5218 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
5219 break;
5220 default:
5221 return false;
5224 tree expected = gimple_call_arg (stmt, 1);
5225 if (TREE_CODE (expected) != ADDR_EXPR
5226 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
5227 return false;
5229 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
5230 if (!is_gimple_reg_type (etype)
5231 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
5232 || TREE_THIS_VOLATILE (etype)
5233 || VECTOR_TYPE_P (etype)
5234 || TREE_CODE (etype) == COMPLEX_TYPE
5235 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5236 might not preserve all the bits. See PR71716. */
5237 || SCALAR_FLOAT_TYPE_P (etype)
5238 || maybe_ne (TYPE_PRECISION (etype),
5239 GET_MODE_BITSIZE (TYPE_MODE (etype))))
5240 return false;
5242 tree weak = gimple_call_arg (stmt, 3);
5243 if (!integer_zerop (weak) && !integer_onep (weak))
5244 return false;
5246 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5247 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5248 machine_mode mode = TYPE_MODE (itype);
5250 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
5251 == CODE_FOR_nothing
5252 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5253 return false;
5255 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
5256 return false;
5258 return true;
5261 /* Fold
5262 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5263 into
5264 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5265 i = IMAGPART_EXPR <t>;
5266 r = (_Bool) i;
5267 e = REALPART_EXPR <t>; */
5269 void
5270 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5272 gimple *stmt = gsi_stmt (*gsi);
5273 tree fndecl = gimple_call_fndecl (stmt);
5274 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5275 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5276 tree ctype = build_complex_type (itype);
5277 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
5278 bool throws = false;
5279 edge e = NULL;
5280 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5281 expected);
5282 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5283 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5284 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5286 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
5287 build1 (VIEW_CONVERT_EXPR, itype,
5288 gimple_assign_lhs (g)));
5289 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5291 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
5292 + int_size_in_bytes (itype);
5293 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5294 gimple_call_arg (stmt, 0),
5295 gimple_assign_lhs (g),
5296 gimple_call_arg (stmt, 2),
5297 build_int_cst (integer_type_node, flag),
5298 gimple_call_arg (stmt, 4),
5299 gimple_call_arg (stmt, 5));
5300 tree lhs = make_ssa_name (ctype);
5301 gimple_call_set_lhs (g, lhs);
5302 gimple_move_vops (g, stmt);
5303 tree oldlhs = gimple_call_lhs (stmt);
5304 if (stmt_can_throw_internal (cfun, stmt))
5306 throws = true;
5307 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
5309 gimple_call_set_nothrow (as_a <gcall *> (g),
5310 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
5311 gimple_call_set_lhs (stmt, NULL_TREE);
5312 gsi_replace (gsi, g, true);
5313 if (oldlhs)
5315 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
5316 build1 (IMAGPART_EXPR, itype, lhs));
5317 if (throws)
5319 gsi_insert_on_edge_immediate (e, g);
5320 *gsi = gsi_for_stmt (g);
5322 else
5323 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5324 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
5325 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5327 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
5328 build1 (REALPART_EXPR, itype, lhs));
5329 if (throws && oldlhs == NULL_TREE)
5331 gsi_insert_on_edge_immediate (e, g);
5332 *gsi = gsi_for_stmt (g);
5334 else
5335 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5336 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5338 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5339 VIEW_CONVERT_EXPR,
5340 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5341 gimple_assign_lhs (g)));
5342 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5344 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
5345 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5346 *gsi = gsiret;
5349 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5350 doesn't fit into TYPE. The test for overflow should be regardless of
5351 -fwrapv, and even for unsigned types. */
5353 bool
5354 arith_overflowed_p (enum tree_code code, const_tree type,
5355 const_tree arg0, const_tree arg1)
5357 widest2_int warg0 = widest2_int_cst (arg0);
5358 widest2_int warg1 = widest2_int_cst (arg1);
5359 widest2_int wres;
5360 switch (code)
5362 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
5363 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
5364 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
5365 default: gcc_unreachable ();
5367 signop sign = TYPE_SIGN (type);
5368 if (sign == UNSIGNED && wi::neg_p (wres))
5369 return true;
5370 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
5373 /* If IFN_{MASK,LEN,MASK_LEN}_LOAD/STORE call CALL is unconditional,
5374 return a MEM_REF for the memory it references, otherwise return null.
5375 VECTYPE is the type of the memory vector. MASK_P indicates it's for
5376 MASK if true, otherwise it's for LEN. */
5378 static tree
5379 gimple_fold_partial_load_store_mem_ref (gcall *call, tree vectype, bool mask_p)
5381 tree ptr = gimple_call_arg (call, 0);
5382 tree alias_align = gimple_call_arg (call, 1);
5383 if (!tree_fits_uhwi_p (alias_align))
5384 return NULL_TREE;
5386 if (mask_p)
5388 tree mask = gimple_call_arg (call, 2);
5389 if (!integer_all_onesp (mask))
5390 return NULL_TREE;
5392 else
5394 internal_fn ifn = gimple_call_internal_fn (call);
5395 int len_index = internal_fn_len_index (ifn);
5396 tree basic_len = gimple_call_arg (call, len_index);
5397 if (!poly_int_tree_p (basic_len))
5398 return NULL_TREE;
5399 tree bias = gimple_call_arg (call, len_index + 1);
5400 gcc_assert (TREE_CODE (bias) == INTEGER_CST);
5401 /* For LEN_LOAD/LEN_STORE/MASK_LEN_LOAD/MASK_LEN_STORE,
5402 we don't fold when (bias + len) != VF. */
5403 if (maybe_ne (wi::to_poly_widest (basic_len) + wi::to_widest (bias),
5404 GET_MODE_NUNITS (TYPE_MODE (vectype))))
5405 return NULL_TREE;
5407 /* For MASK_LEN_{LOAD,STORE}, we should also check whether
5408 the mask is all ones mask. */
5409 if (ifn == IFN_MASK_LEN_LOAD || ifn == IFN_MASK_LEN_STORE)
5411 tree mask = gimple_call_arg (call, internal_fn_mask_index (ifn));
5412 if (!integer_all_onesp (mask))
5413 return NULL_TREE;
5417 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
5418 if (TYPE_ALIGN (vectype) != align)
5419 vectype = build_aligned_type (vectype, align);
5420 tree offset = build_zero_cst (TREE_TYPE (alias_align));
5421 return fold_build2 (MEM_REF, vectype, ptr, offset);
5424 /* Try to fold IFN_{MASK,LEN}_LOAD call CALL. Return true on success.
5425 MASK_P indicates it's for MASK if true, otherwise it's for LEN. */
5427 static bool
5428 gimple_fold_partial_load (gimple_stmt_iterator *gsi, gcall *call, bool mask_p)
5430 tree lhs = gimple_call_lhs (call);
5431 if (!lhs)
5432 return false;
5434 if (tree rhs
5435 = gimple_fold_partial_load_store_mem_ref (call, TREE_TYPE (lhs), mask_p))
5437 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5438 gimple_set_location (new_stmt, gimple_location (call));
5439 gimple_move_vops (new_stmt, call);
5440 gsi_replace (gsi, new_stmt, false);
5441 return true;
5443 return false;
5446 /* Try to fold IFN_{MASK,LEN}_STORE call CALL. Return true on success.
5447 MASK_P indicates it's for MASK if true, otherwise it's for LEN. */
5449 static bool
5450 gimple_fold_partial_store (gimple_stmt_iterator *gsi, gcall *call,
5451 bool mask_p)
5453 internal_fn ifn = gimple_call_internal_fn (call);
5454 tree rhs = gimple_call_arg (call, internal_fn_stored_value_index (ifn));
5455 if (tree lhs
5456 = gimple_fold_partial_load_store_mem_ref (call, TREE_TYPE (rhs), mask_p))
5458 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5459 gimple_set_location (new_stmt, gimple_location (call));
5460 gimple_move_vops (new_stmt, call);
5461 gsi_replace (gsi, new_stmt, false);
5462 return true;
5464 return false;
5467 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5468 The statement may be replaced by another statement, e.g., if the call
5469 simplifies to a constant value. Return true if any changes were made.
5470 It is assumed that the operands have been previously folded. */
5472 static bool
5473 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
5475 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
5476 tree callee;
5477 bool changed = false;
5479 /* Check for virtual calls that became direct calls. */
5480 callee = gimple_call_fn (stmt);
5481 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
5483 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
5485 if (dump_file && virtual_method_call_p (callee)
5486 && !possible_polymorphic_call_target_p
5487 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
5488 (OBJ_TYPE_REF_EXPR (callee)))))
5490 fprintf (dump_file,
5491 "Type inheritance inconsistent devirtualization of ");
5492 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5493 fprintf (dump_file, " to ");
5494 print_generic_expr (dump_file, callee, TDF_SLIM);
5495 fprintf (dump_file, "\n");
5498 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
5499 changed = true;
5501 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
5503 bool final;
5504 vec <cgraph_node *>targets
5505 = possible_polymorphic_call_targets (callee, stmt, &final);
5506 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5508 tree lhs = gimple_call_lhs (stmt);
5509 if (dump_enabled_p ())
5511 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5512 "folding virtual function call to %s\n",
5513 targets.length () == 1
5514 ? targets[0]->name ()
5515 : "__builtin_unreachable");
5517 if (targets.length () == 1)
5519 tree fndecl = targets[0]->decl;
5520 gimple_call_set_fndecl (stmt, fndecl);
5521 changed = true;
5522 /* If changing the call to __cxa_pure_virtual
5523 or similar noreturn function, adjust gimple_call_fntype
5524 too. */
5525 if (gimple_call_noreturn_p (stmt)
5526 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
5527 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
5528 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5529 == void_type_node))
5530 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
5531 /* If the call becomes noreturn, remove the lhs. */
5532 if (lhs
5533 && gimple_call_noreturn_p (stmt)
5534 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
5535 || should_remove_lhs_p (lhs)))
5537 if (TREE_CODE (lhs) == SSA_NAME)
5539 tree var = create_tmp_var (TREE_TYPE (lhs));
5540 tree def = get_or_create_ssa_default_def (cfun, var);
5541 gimple *new_stmt = gimple_build_assign (lhs, def);
5542 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
5544 gimple_call_set_lhs (stmt, NULL_TREE);
5546 maybe_remove_unused_call_args (cfun, stmt);
5548 else
5550 location_t loc = gimple_location (stmt);
5551 gimple *new_stmt = gimple_build_builtin_unreachable (loc);
5552 gimple_call_set_ctrl_altering (new_stmt, false);
5553 /* If the call had a SSA name as lhs morph that into
5554 an uninitialized value. */
5555 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5557 tree var = create_tmp_var (TREE_TYPE (lhs));
5558 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
5559 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5560 set_ssa_default_def (cfun, var, lhs);
5562 gimple_move_vops (new_stmt, stmt);
5563 gsi_replace (gsi, new_stmt, false);
5564 return true;
5570 /* Check for indirect calls that became direct calls, and then
5571 no longer require a static chain. */
5572 if (gimple_call_chain (stmt))
5574 tree fn = gimple_call_fndecl (stmt);
5575 if (fn && !DECL_STATIC_CHAIN (fn))
5577 gimple_call_set_chain (stmt, NULL);
5578 changed = true;
5582 if (inplace)
5583 return changed;
5585 /* Check for builtins that CCP can handle using information not
5586 available in the generic fold routines. */
5587 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5589 if (gimple_fold_builtin (gsi))
5590 changed = true;
5592 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
5594 changed |= targetm.gimple_fold_builtin (gsi);
5596 else if (gimple_call_internal_p (stmt))
5598 enum tree_code subcode = ERROR_MARK;
5599 tree result = NULL_TREE;
5600 bool cplx_result = false;
5601 bool uaddc_usubc = false;
5602 tree overflow = NULL_TREE;
5603 switch (gimple_call_internal_fn (stmt))
5605 case IFN_BUILTIN_EXPECT:
5606 result = fold_builtin_expect (gimple_location (stmt),
5607 gimple_call_arg (stmt, 0),
5608 gimple_call_arg (stmt, 1),
5609 gimple_call_arg (stmt, 2),
5610 NULL_TREE);
5611 break;
5612 case IFN_UBSAN_OBJECT_SIZE:
5614 tree offset = gimple_call_arg (stmt, 1);
5615 tree objsize = gimple_call_arg (stmt, 2);
5616 if (integer_all_onesp (objsize)
5617 || (TREE_CODE (offset) == INTEGER_CST
5618 && TREE_CODE (objsize) == INTEGER_CST
5619 && tree_int_cst_le (offset, objsize)))
5621 replace_call_with_value (gsi, NULL_TREE);
5622 return true;
5625 break;
5626 case IFN_UBSAN_PTR:
5627 if (integer_zerop (gimple_call_arg (stmt, 1)))
5629 replace_call_with_value (gsi, NULL_TREE);
5630 return true;
5632 break;
5633 case IFN_UBSAN_BOUNDS:
5635 tree index = gimple_call_arg (stmt, 1);
5636 tree bound = gimple_call_arg (stmt, 2);
5637 if (TREE_CODE (index) == INTEGER_CST
5638 && TREE_CODE (bound) == INTEGER_CST)
5640 index = fold_convert (TREE_TYPE (bound), index);
5641 if (TREE_CODE (index) == INTEGER_CST
5642 && tree_int_cst_lt (index, bound))
5644 replace_call_with_value (gsi, NULL_TREE);
5645 return true;
5649 break;
5650 case IFN_GOACC_DIM_SIZE:
5651 case IFN_GOACC_DIM_POS:
5652 result = fold_internal_goacc_dim (stmt);
5653 break;
5654 case IFN_UBSAN_CHECK_ADD:
5655 subcode = PLUS_EXPR;
5656 break;
5657 case IFN_UBSAN_CHECK_SUB:
5658 subcode = MINUS_EXPR;
5659 break;
5660 case IFN_UBSAN_CHECK_MUL:
5661 subcode = MULT_EXPR;
5662 break;
5663 case IFN_ADD_OVERFLOW:
5664 subcode = PLUS_EXPR;
5665 cplx_result = true;
5666 break;
5667 case IFN_SUB_OVERFLOW:
5668 subcode = MINUS_EXPR;
5669 cplx_result = true;
5670 break;
5671 case IFN_MUL_OVERFLOW:
5672 subcode = MULT_EXPR;
5673 cplx_result = true;
5674 break;
5675 case IFN_UADDC:
5676 subcode = PLUS_EXPR;
5677 cplx_result = true;
5678 uaddc_usubc = true;
5679 break;
5680 case IFN_USUBC:
5681 subcode = MINUS_EXPR;
5682 cplx_result = true;
5683 uaddc_usubc = true;
5684 break;
5685 case IFN_MASK_LOAD:
5686 changed |= gimple_fold_partial_load (gsi, stmt, true);
5687 break;
5688 case IFN_MASK_STORE:
5689 changed |= gimple_fold_partial_store (gsi, stmt, true);
5690 break;
5691 case IFN_LEN_LOAD:
5692 case IFN_MASK_LEN_LOAD:
5693 changed |= gimple_fold_partial_load (gsi, stmt, false);
5694 break;
5695 case IFN_LEN_STORE:
5696 case IFN_MASK_LEN_STORE:
5697 changed |= gimple_fold_partial_store (gsi, stmt, false);
5698 break;
5699 default:
5700 break;
5702 if (subcode != ERROR_MARK)
5704 tree arg0 = gimple_call_arg (stmt, 0);
5705 tree arg1 = gimple_call_arg (stmt, 1);
5706 tree arg2 = NULL_TREE;
5707 tree type = TREE_TYPE (arg0);
5708 if (cplx_result)
5710 tree lhs = gimple_call_lhs (stmt);
5711 if (lhs == NULL_TREE)
5712 type = NULL_TREE;
5713 else
5714 type = TREE_TYPE (TREE_TYPE (lhs));
5715 if (uaddc_usubc)
5716 arg2 = gimple_call_arg (stmt, 2);
5718 if (type == NULL_TREE)
5720 else if (uaddc_usubc)
5722 if (!integer_zerop (arg2))
5724 /* x = y + 0 + 0; x = y - 0 - 0; */
5725 else if (integer_zerop (arg1))
5726 result = arg0;
5727 /* x = 0 + y + 0; */
5728 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
5729 result = arg1;
5730 /* x = y - y - 0; */
5731 else if (subcode == MINUS_EXPR
5732 && operand_equal_p (arg0, arg1, 0))
5733 result = integer_zero_node;
5735 /* x = y + 0; x = y - 0; x = y * 0; */
5736 else if (integer_zerop (arg1))
5737 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
5738 /* x = 0 + y; x = 0 * y; */
5739 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
5740 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
5741 /* x = y - y; */
5742 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
5743 result = integer_zero_node;
5744 /* x = y * 1; x = 1 * y; */
5745 else if (subcode == MULT_EXPR && integer_onep (arg1))
5746 result = arg0;
5747 else if (subcode == MULT_EXPR && integer_onep (arg0))
5748 result = arg1;
5749 if (result)
5751 if (result == integer_zero_node)
5752 result = build_zero_cst (type);
5753 else if (cplx_result && TREE_TYPE (result) != type)
5755 if (TREE_CODE (result) == INTEGER_CST)
5757 if (arith_overflowed_p (PLUS_EXPR, type, result,
5758 integer_zero_node))
5759 overflow = build_one_cst (type);
5761 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
5762 && TYPE_UNSIGNED (type))
5763 || (TYPE_PRECISION (type)
5764 < (TYPE_PRECISION (TREE_TYPE (result))
5765 + (TYPE_UNSIGNED (TREE_TYPE (result))
5766 && !TYPE_UNSIGNED (type)))))
5767 result = NULL_TREE;
5768 if (result)
5769 result = fold_convert (type, result);
5774 if (result)
5776 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
5777 result = drop_tree_overflow (result);
5778 if (cplx_result)
5780 if (overflow == NULL_TREE)
5781 overflow = build_zero_cst (TREE_TYPE (result));
5782 tree ctype = build_complex_type (TREE_TYPE (result));
5783 if (TREE_CODE (result) == INTEGER_CST
5784 && TREE_CODE (overflow) == INTEGER_CST)
5785 result = build_complex (ctype, result, overflow);
5786 else
5787 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
5788 ctype, result, overflow);
5790 gimplify_and_update_call_from_tree (gsi, result);
5791 changed = true;
5795 return changed;
5799 /* Return true whether NAME has a use on STMT. Note this can return
5800 false even though there's a use on STMT if SSA operands are not
5801 up-to-date. */
5803 static bool
5804 has_use_on_stmt (tree name, gimple *stmt)
5806 ssa_op_iter iter;
5807 tree op;
5808 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5809 if (op == name)
5810 return true;
5811 return false;
5814 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5815 gimple_simplify.
5817 Replaces *GSI with the simplification result in RCODE and OPS
5818 and the associated statements in *SEQ. Does the replacement
5819 according to INPLACE and returns true if the operation succeeded. */
5821 static bool
5822 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5823 gimple_match_op *res_op,
5824 gimple_seq *seq, bool inplace)
5826 gimple *stmt = gsi_stmt (*gsi);
5827 tree *ops = res_op->ops;
5828 unsigned int num_ops = res_op->num_ops;
5830 /* Play safe and do not allow abnormals to be mentioned in
5831 newly created statements. See also maybe_push_res_to_seq.
5832 As an exception allow such uses if there was a use of the
5833 same SSA name on the old stmt. */
5834 for (unsigned int i = 0; i < num_ops; ++i)
5835 if (TREE_CODE (ops[i]) == SSA_NAME
5836 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
5837 && !has_use_on_stmt (ops[i], stmt))
5838 return false;
5840 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
5841 for (unsigned int i = 0; i < 2; ++i)
5842 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
5843 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
5844 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
5845 return false;
5847 /* Don't insert new statements when INPLACE is true, even if we could
5848 reuse STMT for the final statement. */
5849 if (inplace && !gimple_seq_empty_p (*seq))
5850 return false;
5852 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
5854 gcc_assert (res_op->code.is_tree_code ());
5855 auto code = tree_code (res_op->code);
5856 if (TREE_CODE_CLASS (code) == tcc_comparison
5857 /* GIMPLE_CONDs condition may not throw. */
5858 && (!flag_exceptions
5859 || !cfun->can_throw_non_call_exceptions
5860 || !operation_could_trap_p (code,
5861 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
5862 false, NULL_TREE)))
5863 gimple_cond_set_condition (cond_stmt, code, ops[0], ops[1]);
5864 else if (code == SSA_NAME)
5865 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
5866 build_zero_cst (TREE_TYPE (ops[0])));
5867 else if (code == INTEGER_CST)
5869 if (integer_zerop (ops[0]))
5870 gimple_cond_make_false (cond_stmt);
5871 else
5872 gimple_cond_make_true (cond_stmt);
5874 else if (!inplace)
5876 tree res = maybe_push_res_to_seq (res_op, seq);
5877 if (!res)
5878 return false;
5879 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
5880 build_zero_cst (TREE_TYPE (res)));
5882 else
5883 return false;
5884 if (dump_file && (dump_flags & TDF_DETAILS))
5886 fprintf (dump_file, "gimple_simplified to ");
5887 if (!gimple_seq_empty_p (*seq))
5888 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5889 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5890 0, TDF_SLIM);
5892 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5893 return true;
5895 else if (is_gimple_assign (stmt)
5896 && res_op->code.is_tree_code ())
5898 auto code = tree_code (res_op->code);
5899 if (!inplace
5900 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (code))
5902 maybe_build_generic_op (res_op);
5903 gimple_assign_set_rhs_with_ops (gsi, code,
5904 res_op->op_or_null (0),
5905 res_op->op_or_null (1),
5906 res_op->op_or_null (2));
5907 if (dump_file && (dump_flags & TDF_DETAILS))
5909 fprintf (dump_file, "gimple_simplified to ");
5910 if (!gimple_seq_empty_p (*seq))
5911 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5912 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5913 0, TDF_SLIM);
5915 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5916 return true;
5919 else if (res_op->code.is_fn_code ()
5920 && gimple_call_combined_fn (stmt) == combined_fn (res_op->code))
5922 gcc_assert (num_ops == gimple_call_num_args (stmt));
5923 for (unsigned int i = 0; i < num_ops; ++i)
5924 gimple_call_set_arg (stmt, i, ops[i]);
5925 if (dump_file && (dump_flags & TDF_DETAILS))
5927 fprintf (dump_file, "gimple_simplified to ");
5928 if (!gimple_seq_empty_p (*seq))
5929 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5930 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
5932 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5933 return true;
5935 else if (!inplace)
5937 if (gimple_has_lhs (stmt))
5939 tree lhs = gimple_get_lhs (stmt);
5940 if (!maybe_push_res_to_seq (res_op, seq, lhs))
5941 return false;
5942 if (dump_file && (dump_flags & TDF_DETAILS))
5944 fprintf (dump_file, "gimple_simplified to ");
5945 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5947 gsi_replace_with_seq_vops (gsi, *seq);
5948 return true;
5950 else
5951 gcc_unreachable ();
5954 return false;
5957 /* Canonicalize MEM_REFs invariant address operand after propagation. */
5959 static bool
5960 maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
5962 bool res = false;
5963 tree *orig_t = t;
5965 if (TREE_CODE (*t) == ADDR_EXPR)
5966 t = &TREE_OPERAND (*t, 0);
5968 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5969 generic vector extension. The actual vector referenced is
5970 view-converted to an array type for this purpose. If the index
5971 is constant the canonical representation in the middle-end is a
5972 BIT_FIELD_REF so re-write the former to the latter here. */
5973 if (TREE_CODE (*t) == ARRAY_REF
5974 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
5975 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
5976 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
5978 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
5979 if (VECTOR_TYPE_P (vtype))
5981 tree low = array_ref_low_bound (*t);
5982 if (TREE_CODE (low) == INTEGER_CST)
5984 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
5986 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
5987 wi::to_widest (low));
5988 idx = wi::mul (idx, wi::to_widest
5989 (TYPE_SIZE (TREE_TYPE (*t))));
5990 widest_int ext
5991 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
5992 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
5994 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
5995 TREE_TYPE (*t),
5996 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
5997 TYPE_SIZE (TREE_TYPE (*t)),
5998 wide_int_to_tree (bitsizetype, idx));
5999 res = true;
6006 while (handled_component_p (*t))
6007 t = &TREE_OPERAND (*t, 0);
6009 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
6010 of invariant addresses into a SSA name MEM_REF address. */
6011 if (TREE_CODE (*t) == MEM_REF
6012 || TREE_CODE (*t) == TARGET_MEM_REF)
6014 tree addr = TREE_OPERAND (*t, 0);
6015 if (TREE_CODE (addr) == ADDR_EXPR
6016 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
6017 || handled_component_p (TREE_OPERAND (addr, 0))))
6019 tree base;
6020 poly_int64 coffset;
6021 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
6022 &coffset);
6023 if (!base)
6025 if (is_debug)
6026 return false;
6027 gcc_unreachable ();
6030 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
6031 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
6032 TREE_OPERAND (*t, 1),
6033 size_int (coffset));
6034 res = true;
6036 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
6037 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
6040 /* Canonicalize back MEM_REFs to plain reference trees if the object
6041 accessed is a decl that has the same access semantics as the MEM_REF. */
6042 if (TREE_CODE (*t) == MEM_REF
6043 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
6044 && integer_zerop (TREE_OPERAND (*t, 1))
6045 && MR_DEPENDENCE_CLIQUE (*t) == 0)
6047 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6048 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
6049 if (/* Same volatile qualification. */
6050 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
6051 /* Same TBAA behavior with -fstrict-aliasing. */
6052 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
6053 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
6054 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
6055 /* Same alignment. */
6056 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
6057 /* We have to look out here to not drop a required conversion
6058 from the rhs to the lhs if *t appears on the lhs or vice-versa
6059 if it appears on the rhs. Thus require strict type
6060 compatibility. */
6061 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
6063 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6064 res = true;
6068 else if (TREE_CODE (*orig_t) == ADDR_EXPR
6069 && TREE_CODE (*t) == MEM_REF
6070 && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
6072 tree base;
6073 poly_int64 coffset;
6074 base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
6075 &coffset);
6076 if (base)
6078 gcc_assert (TREE_CODE (base) == MEM_REF);
6079 poly_int64 moffset;
6080 if (mem_ref_offset (base).to_shwi (&moffset))
6082 coffset += moffset;
6083 if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
6085 coffset += moffset;
6086 *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
6087 return true;
6093 /* Canonicalize TARGET_MEM_REF in particular with respect to
6094 the indexes becoming constant. */
6095 else if (TREE_CODE (*t) == TARGET_MEM_REF)
6097 tree tem = maybe_fold_tmr (*t);
6098 if (tem)
6100 *t = tem;
6101 if (TREE_CODE (*orig_t) == ADDR_EXPR)
6102 recompute_tree_invariant_for_addr_expr (*orig_t);
6103 res = true;
6107 return res;
6110 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6111 distinguishes both cases. */
6113 static bool
6114 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
6116 bool changed = false;
6117 gimple *stmt = gsi_stmt (*gsi);
6118 bool nowarning = warning_suppressed_p (stmt, OPT_Wstrict_overflow);
6119 unsigned i;
6120 fold_defer_overflow_warnings ();
6122 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6123 after propagation.
6124 ??? This shouldn't be done in generic folding but in the
6125 propagation helpers which also know whether an address was
6126 propagated.
6127 Also canonicalize operand order. */
6128 switch (gimple_code (stmt))
6130 case GIMPLE_ASSIGN:
6131 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
6133 tree *rhs = gimple_assign_rhs1_ptr (stmt);
6134 if ((REFERENCE_CLASS_P (*rhs)
6135 || TREE_CODE (*rhs) == ADDR_EXPR)
6136 && maybe_canonicalize_mem_ref_addr (rhs))
6137 changed = true;
6138 tree *lhs = gimple_assign_lhs_ptr (stmt);
6139 if (REFERENCE_CLASS_P (*lhs)
6140 && maybe_canonicalize_mem_ref_addr (lhs))
6141 changed = true;
6142 /* Canonicalize &MEM[ssa_n, CST] to ssa_n p+ CST.
6143 This cannot be done in maybe_canonicalize_mem_ref_addr
6144 as the gimple now has two operands rather than one.
6145 The same reason why this can't be done in
6146 maybe_canonicalize_mem_ref_addr is the same reason why
6147 this can't be done inplace. */
6148 if (!inplace && TREE_CODE (*rhs) == ADDR_EXPR)
6150 tree inner = TREE_OPERAND (*rhs, 0);
6151 if (TREE_CODE (inner) == MEM_REF
6152 && TREE_CODE (TREE_OPERAND (inner, 0)) == SSA_NAME
6153 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6155 tree ptr = TREE_OPERAND (inner, 0);
6156 tree addon = TREE_OPERAND (inner, 1);
6157 addon = fold_convert (sizetype, addon);
6158 gimple_assign_set_rhs_with_ops (gsi, POINTER_PLUS_EXPR,
6159 ptr, addon);
6160 changed = true;
6161 stmt = gsi_stmt (*gsi);
6165 else
6167 /* Canonicalize operand order. */
6168 enum tree_code code = gimple_assign_rhs_code (stmt);
6169 if (TREE_CODE_CLASS (code) == tcc_comparison
6170 || commutative_tree_code (code)
6171 || commutative_ternary_tree_code (code))
6173 tree rhs1 = gimple_assign_rhs1 (stmt);
6174 tree rhs2 = gimple_assign_rhs2 (stmt);
6175 if (tree_swap_operands_p (rhs1, rhs2))
6177 gimple_assign_set_rhs1 (stmt, rhs2);
6178 gimple_assign_set_rhs2 (stmt, rhs1);
6179 if (TREE_CODE_CLASS (code) == tcc_comparison)
6180 gimple_assign_set_rhs_code (stmt,
6181 swap_tree_comparison (code));
6182 changed = true;
6186 break;
6187 case GIMPLE_CALL:
6189 gcall *call = as_a<gcall *> (stmt);
6190 for (i = 0; i < gimple_call_num_args (call); ++i)
6192 tree *arg = gimple_call_arg_ptr (call, i);
6193 if (REFERENCE_CLASS_P (*arg)
6194 && maybe_canonicalize_mem_ref_addr (arg))
6195 changed = true;
6197 tree *lhs = gimple_call_lhs_ptr (call);
6198 if (*lhs
6199 && REFERENCE_CLASS_P (*lhs)
6200 && maybe_canonicalize_mem_ref_addr (lhs))
6201 changed = true;
6202 if (*lhs)
6204 combined_fn cfn = gimple_call_combined_fn (call);
6205 internal_fn ifn = associated_internal_fn (cfn, TREE_TYPE (*lhs));
6206 int opno = first_commutative_argument (ifn);
6207 if (opno >= 0)
6209 tree arg1 = gimple_call_arg (call, opno);
6210 tree arg2 = gimple_call_arg (call, opno + 1);
6211 if (tree_swap_operands_p (arg1, arg2))
6213 gimple_call_set_arg (call, opno, arg2);
6214 gimple_call_set_arg (call, opno + 1, arg1);
6215 changed = true;
6219 break;
6221 case GIMPLE_ASM:
6223 gasm *asm_stmt = as_a <gasm *> (stmt);
6224 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
6226 tree link = gimple_asm_output_op (asm_stmt, i);
6227 tree op = TREE_VALUE (link);
6228 if (REFERENCE_CLASS_P (op)
6229 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6230 changed = true;
6232 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
6234 tree link = gimple_asm_input_op (asm_stmt, i);
6235 tree op = TREE_VALUE (link);
6236 if ((REFERENCE_CLASS_P (op)
6237 || TREE_CODE (op) == ADDR_EXPR)
6238 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6239 changed = true;
6242 break;
6243 case GIMPLE_DEBUG:
6244 if (gimple_debug_bind_p (stmt))
6246 tree *val = gimple_debug_bind_get_value_ptr (stmt);
6247 if (*val
6248 && (REFERENCE_CLASS_P (*val)
6249 || TREE_CODE (*val) == ADDR_EXPR)
6250 && maybe_canonicalize_mem_ref_addr (val, true))
6251 changed = true;
6253 break;
6254 case GIMPLE_COND:
6256 /* Canonicalize operand order. */
6257 tree lhs = gimple_cond_lhs (stmt);
6258 tree rhs = gimple_cond_rhs (stmt);
6259 if (tree_swap_operands_p (lhs, rhs))
6261 gcond *gc = as_a <gcond *> (stmt);
6262 gimple_cond_set_lhs (gc, rhs);
6263 gimple_cond_set_rhs (gc, lhs);
6264 gimple_cond_set_code (gc,
6265 swap_tree_comparison (gimple_cond_code (gc)));
6266 changed = true;
6269 default:;
6272 /* Dispatch to pattern-based folding. */
6273 if (!inplace
6274 || is_gimple_assign (stmt)
6275 || gimple_code (stmt) == GIMPLE_COND)
6277 gimple_seq seq = NULL;
6278 gimple_match_op res_op;
6279 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
6280 valueize, valueize))
6282 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
6283 changed = true;
6284 else
6285 gimple_seq_discard (seq);
6289 stmt = gsi_stmt (*gsi);
6291 /* Fold the main computation performed by the statement. */
6292 switch (gimple_code (stmt))
6294 case GIMPLE_ASSIGN:
6296 /* Try to canonicalize for boolean-typed X the comparisons
6297 X == 0, X == 1, X != 0, and X != 1. */
6298 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
6299 || gimple_assign_rhs_code (stmt) == NE_EXPR)
6301 tree lhs = gimple_assign_lhs (stmt);
6302 tree op1 = gimple_assign_rhs1 (stmt);
6303 tree op2 = gimple_assign_rhs2 (stmt);
6304 tree type = TREE_TYPE (op1);
6306 /* Check whether the comparison operands are of the same boolean
6307 type as the result type is.
6308 Check that second operand is an integer-constant with value
6309 one or zero. */
6310 if (TREE_CODE (op2) == INTEGER_CST
6311 && (integer_zerop (op2) || integer_onep (op2))
6312 && useless_type_conversion_p (TREE_TYPE (lhs), type))
6314 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
6315 bool is_logical_not = false;
6317 /* X == 0 and X != 1 is a logical-not.of X
6318 X == 1 and X != 0 is X */
6319 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
6320 || (cmp_code == NE_EXPR && integer_onep (op2)))
6321 is_logical_not = true;
6323 if (is_logical_not == false)
6324 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
6325 /* Only for one-bit precision typed X the transformation
6326 !X -> ~X is valied. */
6327 else if (TYPE_PRECISION (type) == 1)
6328 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
6329 /* Otherwise we use !X -> X ^ 1. */
6330 else
6331 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
6332 build_int_cst (type, 1));
6333 changed = true;
6334 break;
6338 unsigned old_num_ops = gimple_num_ops (stmt);
6339 tree lhs = gimple_assign_lhs (stmt);
6340 tree new_rhs = fold_gimple_assign (gsi);
6341 if (new_rhs
6342 && !useless_type_conversion_p (TREE_TYPE (lhs),
6343 TREE_TYPE (new_rhs)))
6344 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6345 if (new_rhs
6346 && (!inplace
6347 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6349 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6350 changed = true;
6352 break;
6355 case GIMPLE_CALL:
6356 changed |= gimple_fold_call (gsi, inplace);
6357 break;
6359 case GIMPLE_DEBUG:
6360 if (gimple_debug_bind_p (stmt))
6362 tree val = gimple_debug_bind_get_value (stmt);
6363 if (val && REFERENCE_CLASS_P (val))
6365 tree tem = maybe_fold_reference (val);
6366 if (tem)
6368 gimple_debug_bind_set_value (stmt, tem);
6369 changed = true;
6373 break;
6375 case GIMPLE_RETURN:
6377 greturn *ret_stmt = as_a<greturn *> (stmt);
6378 tree ret = gimple_return_retval(ret_stmt);
6380 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6382 tree val = valueize (ret);
6383 if (val && val != ret
6384 && may_propagate_copy (ret, val))
6386 gimple_return_set_retval (ret_stmt, val);
6387 changed = true;
6391 break;
6393 default:;
6396 stmt = gsi_stmt (*gsi);
6398 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
6399 return changed;
6402 /* Valueziation callback that ends up not following SSA edges. */
6404 tree
6405 no_follow_ssa_edges (tree)
6407 return NULL_TREE;
6410 /* Valueization callback that ends up following single-use SSA edges only. */
6412 tree
6413 follow_single_use_edges (tree val)
6415 if (TREE_CODE (val) == SSA_NAME
6416 && !has_single_use (val))
6417 return NULL_TREE;
6418 return val;
6421 /* Valueization callback that follows all SSA edges. */
6423 tree
6424 follow_all_ssa_edges (tree val)
6426 return val;
6429 /* Fold the statement pointed to by GSI. In some cases, this function may
6430 replace the whole statement with a new one. Returns true iff folding
6431 makes any changes.
6432 The statement pointed to by GSI should be in valid gimple form but may
6433 be in unfolded state as resulting from for example constant propagation
6434 which can produce *&x = 0. */
6436 bool
6437 fold_stmt (gimple_stmt_iterator *gsi)
6439 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
6442 bool
6443 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
6445 return fold_stmt_1 (gsi, false, valueize);
6448 /* Perform the minimal folding on statement *GSI. Only operations like
6449 *&x created by constant propagation are handled. The statement cannot
6450 be replaced with a new one. Return true if the statement was
6451 changed, false otherwise.
6452 The statement *GSI should be in valid gimple form but may
6453 be in unfolded state as resulting from for example constant propagation
6454 which can produce *&x = 0. */
6456 bool
6457 fold_stmt_inplace (gimple_stmt_iterator *gsi)
6459 gimple *stmt = gsi_stmt (*gsi);
6460 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
6461 gcc_assert (gsi_stmt (*gsi) == stmt);
6462 return changed;
6465 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6466 if EXPR is null or we don't know how.
6467 If non-null, the result always has boolean type. */
6469 static tree
6470 canonicalize_bool (tree expr, bool invert)
6472 if (!expr)
6473 return NULL_TREE;
6474 else if (invert)
6476 if (integer_nonzerop (expr))
6477 return boolean_false_node;
6478 else if (integer_zerop (expr))
6479 return boolean_true_node;
6480 else if (TREE_CODE (expr) == SSA_NAME)
6481 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
6482 build_int_cst (TREE_TYPE (expr), 0));
6483 else if (COMPARISON_CLASS_P (expr))
6484 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
6485 boolean_type_node,
6486 TREE_OPERAND (expr, 0),
6487 TREE_OPERAND (expr, 1));
6488 else
6489 return NULL_TREE;
6491 else
6493 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6494 return expr;
6495 if (integer_nonzerop (expr))
6496 return boolean_true_node;
6497 else if (integer_zerop (expr))
6498 return boolean_false_node;
6499 else if (TREE_CODE (expr) == SSA_NAME)
6500 return fold_build2 (NE_EXPR, boolean_type_node, expr,
6501 build_int_cst (TREE_TYPE (expr), 0));
6502 else if (COMPARISON_CLASS_P (expr))
6503 return fold_build2 (TREE_CODE (expr),
6504 boolean_type_node,
6505 TREE_OPERAND (expr, 0),
6506 TREE_OPERAND (expr, 1));
6507 else
6508 return NULL_TREE;
6512 /* Check to see if a boolean expression EXPR is logically equivalent to the
6513 comparison (OP1 CODE OP2). Check for various identities involving
6514 SSA_NAMEs. */
6516 static bool
6517 same_bool_comparison_p (const_tree expr, enum tree_code code,
6518 const_tree op1, const_tree op2)
6520 gimple *s;
6522 /* The obvious case. */
6523 if (TREE_CODE (expr) == code
6524 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
6525 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
6526 return true;
6528 /* Check for comparing (name, name != 0) and the case where expr
6529 is an SSA_NAME with a definition matching the comparison. */
6530 if (TREE_CODE (expr) == SSA_NAME
6531 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6533 if (operand_equal_p (expr, op1, 0))
6534 return ((code == NE_EXPR && integer_zerop (op2))
6535 || (code == EQ_EXPR && integer_nonzerop (op2)));
6536 s = SSA_NAME_DEF_STMT (expr);
6537 if (is_gimple_assign (s)
6538 && gimple_assign_rhs_code (s) == code
6539 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
6540 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
6541 return true;
6544 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6545 of name is a comparison, recurse. */
6546 if (TREE_CODE (op1) == SSA_NAME
6547 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
6549 s = SSA_NAME_DEF_STMT (op1);
6550 if (is_gimple_assign (s)
6551 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
6553 enum tree_code c = gimple_assign_rhs_code (s);
6554 if ((c == NE_EXPR && integer_zerop (op2))
6555 || (c == EQ_EXPR && integer_nonzerop (op2)))
6556 return same_bool_comparison_p (expr, c,
6557 gimple_assign_rhs1 (s),
6558 gimple_assign_rhs2 (s));
6559 if ((c == EQ_EXPR && integer_zerop (op2))
6560 || (c == NE_EXPR && integer_nonzerop (op2)))
6561 return same_bool_comparison_p (expr,
6562 invert_tree_comparison (c, false),
6563 gimple_assign_rhs1 (s),
6564 gimple_assign_rhs2 (s));
6567 return false;
6570 /* Check to see if two boolean expressions OP1 and OP2 are logically
6571 equivalent. */
6573 static bool
6574 same_bool_result_p (const_tree op1, const_tree op2)
6576 /* Simple cases first. */
6577 if (operand_equal_p (op1, op2, 0))
6578 return true;
6580 /* Check the cases where at least one of the operands is a comparison.
6581 These are a bit smarter than operand_equal_p in that they apply some
6582 identifies on SSA_NAMEs. */
6583 if (COMPARISON_CLASS_P (op2)
6584 && same_bool_comparison_p (op1, TREE_CODE (op2),
6585 TREE_OPERAND (op2, 0),
6586 TREE_OPERAND (op2, 1)))
6587 return true;
6588 if (COMPARISON_CLASS_P (op1)
6589 && same_bool_comparison_p (op2, TREE_CODE (op1),
6590 TREE_OPERAND (op1, 0),
6591 TREE_OPERAND (op1, 1)))
6592 return true;
6594 /* Default case. */
6595 return false;
6598 /* Forward declarations for some mutually recursive functions. */
6600 static tree
6601 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6602 enum tree_code code2, tree op2a, tree op2b, basic_block);
6603 static tree
6604 and_var_with_comparison (tree type, tree var, bool invert,
6605 enum tree_code code2, tree op2a, tree op2b,
6606 basic_block);
6607 static tree
6608 and_var_with_comparison_1 (tree type, gimple *stmt,
6609 enum tree_code code2, tree op2a, tree op2b,
6610 basic_block);
6611 static tree
6612 or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
6613 enum tree_code code2, tree op2a, tree op2b,
6614 basic_block);
6615 static tree
6616 or_var_with_comparison (tree, tree var, bool invert,
6617 enum tree_code code2, tree op2a, tree op2b,
6618 basic_block);
6619 static tree
6620 or_var_with_comparison_1 (tree, gimple *stmt,
6621 enum tree_code code2, tree op2a, tree op2b,
6622 basic_block);
6624 /* Helper function for and_comparisons_1: try to simplify the AND of the
6625 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6626 If INVERT is true, invert the value of the VAR before doing the AND.
6627 Return NULL_EXPR if we can't simplify this to a single expression. */
6629 static tree
6630 and_var_with_comparison (tree type, tree var, bool invert,
6631 enum tree_code code2, tree op2a, tree op2b,
6632 basic_block outer_cond_bb)
6634 tree t;
6635 gimple *stmt = SSA_NAME_DEF_STMT (var);
6637 /* We can only deal with variables whose definitions are assignments. */
6638 if (!is_gimple_assign (stmt))
6639 return NULL_TREE;
6641 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6642 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6643 Then we only have to consider the simpler non-inverted cases. */
6644 if (invert)
6645 t = or_var_with_comparison_1 (type, stmt,
6646 invert_tree_comparison (code2, false),
6647 op2a, op2b, outer_cond_bb);
6648 else
6649 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b,
6650 outer_cond_bb);
6651 return canonicalize_bool (t, invert);
6654 /* Try to simplify the AND of the ssa variable defined by the assignment
6655 STMT with the comparison specified by (OP2A CODE2 OP2B).
6656 Return NULL_EXPR if we can't simplify this to a single expression. */
6658 static tree
6659 and_var_with_comparison_1 (tree type, gimple *stmt,
6660 enum tree_code code2, tree op2a, tree op2b,
6661 basic_block outer_cond_bb)
6663 tree var = gimple_assign_lhs (stmt);
6664 tree true_test_var = NULL_TREE;
6665 tree false_test_var = NULL_TREE;
6666 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6668 /* Check for identities like (var AND (var == 0)) => false. */
6669 if (TREE_CODE (op2a) == SSA_NAME
6670 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6672 if ((code2 == NE_EXPR && integer_zerop (op2b))
6673 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6675 true_test_var = op2a;
6676 if (var == true_test_var)
6677 return var;
6679 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6680 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6682 false_test_var = op2a;
6683 if (var == false_test_var)
6684 return boolean_false_node;
6688 /* If the definition is a comparison, recurse on it. */
6689 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6691 tree t = and_comparisons_1 (type, innercode,
6692 gimple_assign_rhs1 (stmt),
6693 gimple_assign_rhs2 (stmt),
6694 code2,
6695 op2a,
6696 op2b, outer_cond_bb);
6697 if (t)
6698 return t;
6701 /* If the definition is an AND or OR expression, we may be able to
6702 simplify by reassociating. */
6703 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6704 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
6706 tree inner1 = gimple_assign_rhs1 (stmt);
6707 tree inner2 = gimple_assign_rhs2 (stmt);
6708 gimple *s;
6709 tree t;
6710 tree partial = NULL_TREE;
6711 bool is_and = (innercode == BIT_AND_EXPR);
6713 /* Check for boolean identities that don't require recursive examination
6714 of inner1/inner2:
6715 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6716 inner1 AND (inner1 OR inner2) => inner1
6717 !inner1 AND (inner1 AND inner2) => false
6718 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6719 Likewise for similar cases involving inner2. */
6720 if (inner1 == true_test_var)
6721 return (is_and ? var : inner1);
6722 else if (inner2 == true_test_var)
6723 return (is_and ? var : inner2);
6724 else if (inner1 == false_test_var)
6725 return (is_and
6726 ? boolean_false_node
6727 : and_var_with_comparison (type, inner2, false, code2, op2a,
6728 op2b, outer_cond_bb));
6729 else if (inner2 == false_test_var)
6730 return (is_and
6731 ? boolean_false_node
6732 : and_var_with_comparison (type, inner1, false, code2, op2a,
6733 op2b, outer_cond_bb));
6735 /* Next, redistribute/reassociate the AND across the inner tests.
6736 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6737 if (TREE_CODE (inner1) == SSA_NAME
6738 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6739 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6740 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6741 gimple_assign_rhs1 (s),
6742 gimple_assign_rhs2 (s),
6743 code2, op2a, op2b,
6744 outer_cond_bb)))
6746 /* Handle the AND case, where we are reassociating:
6747 (inner1 AND inner2) AND (op2a code2 op2b)
6748 => (t AND inner2)
6749 If the partial result t is a constant, we win. Otherwise
6750 continue on to try reassociating with the other inner test. */
6751 if (is_and)
6753 if (integer_onep (t))
6754 return inner2;
6755 else if (integer_zerop (t))
6756 return boolean_false_node;
6759 /* Handle the OR case, where we are redistributing:
6760 (inner1 OR inner2) AND (op2a code2 op2b)
6761 => (t OR (inner2 AND (op2a code2 op2b))) */
6762 else if (integer_onep (t))
6763 return boolean_true_node;
6765 /* Save partial result for later. */
6766 partial = t;
6769 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6770 if (TREE_CODE (inner2) == SSA_NAME
6771 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6772 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6773 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6774 gimple_assign_rhs1 (s),
6775 gimple_assign_rhs2 (s),
6776 code2, op2a, op2b,
6777 outer_cond_bb)))
6779 /* Handle the AND case, where we are reassociating:
6780 (inner1 AND inner2) AND (op2a code2 op2b)
6781 => (inner1 AND t) */
6782 if (is_and)
6784 if (integer_onep (t))
6785 return inner1;
6786 else if (integer_zerop (t))
6787 return boolean_false_node;
6788 /* If both are the same, we can apply the identity
6789 (x AND x) == x. */
6790 else if (partial && same_bool_result_p (t, partial))
6791 return t;
6794 /* Handle the OR case. where we are redistributing:
6795 (inner1 OR inner2) AND (op2a code2 op2b)
6796 => (t OR (inner1 AND (op2a code2 op2b)))
6797 => (t OR partial) */
6798 else
6800 if (integer_onep (t))
6801 return boolean_true_node;
6802 else if (partial)
6804 /* We already got a simplification for the other
6805 operand to the redistributed OR expression. The
6806 interesting case is when at least one is false.
6807 Or, if both are the same, we can apply the identity
6808 (x OR x) == x. */
6809 if (integer_zerop (partial))
6810 return t;
6811 else if (integer_zerop (t))
6812 return partial;
6813 else if (same_bool_result_p (t, partial))
6814 return t;
6819 return NULL_TREE;
6822 /* Try to simplify the AND of two comparisons defined by
6823 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6824 If this can be done without constructing an intermediate value,
6825 return the resulting tree; otherwise NULL_TREE is returned.
6826 This function is deliberately asymmetric as it recurses on SSA_DEFs
6827 in the first comparison but not the second. */
6829 static tree
6830 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6831 enum tree_code code2, tree op2a, tree op2b,
6832 basic_block outer_cond_bb)
6834 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6836 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6837 if (operand_equal_p (op1a, op2a, 0)
6838 && operand_equal_p (op1b, op2b, 0))
6840 /* Result will be either NULL_TREE, or a combined comparison. */
6841 tree t = combine_comparisons (UNKNOWN_LOCATION,
6842 TRUTH_ANDIF_EXPR, code1, code2,
6843 truth_type, op1a, op1b);
6844 if (t)
6845 return t;
6848 /* Likewise the swapped case of the above. */
6849 if (operand_equal_p (op1a, op2b, 0)
6850 && operand_equal_p (op1b, op2a, 0))
6852 /* Result will be either NULL_TREE, or a combined comparison. */
6853 tree t = combine_comparisons (UNKNOWN_LOCATION,
6854 TRUTH_ANDIF_EXPR, code1,
6855 swap_tree_comparison (code2),
6856 truth_type, op1a, op1b);
6857 if (t)
6858 return t;
6861 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6862 NAME's definition is a truth value. See if there are any simplifications
6863 that can be done against the NAME's definition. */
6864 if (TREE_CODE (op1a) == SSA_NAME
6865 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6866 && (integer_zerop (op1b) || integer_onep (op1b)))
6868 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6869 || (code1 == NE_EXPR && integer_onep (op1b)));
6870 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6871 switch (gimple_code (stmt))
6873 case GIMPLE_ASSIGN:
6874 /* Try to simplify by copy-propagating the definition. */
6875 return and_var_with_comparison (type, op1a, invert, code2, op2a,
6876 op2b, outer_cond_bb);
6878 case GIMPLE_PHI:
6879 /* If every argument to the PHI produces the same result when
6880 ANDed with the second comparison, we win.
6881 Do not do this unless the type is bool since we need a bool
6882 result here anyway. */
6883 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6885 tree result = NULL_TREE;
6886 unsigned i;
6887 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6889 tree arg = gimple_phi_arg_def (stmt, i);
6891 /* If this PHI has itself as an argument, ignore it.
6892 If all the other args produce the same result,
6893 we're still OK. */
6894 if (arg == gimple_phi_result (stmt))
6895 continue;
6896 else if (TREE_CODE (arg) == INTEGER_CST)
6898 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
6900 if (!result)
6901 result = boolean_false_node;
6902 else if (!integer_zerop (result))
6903 return NULL_TREE;
6905 else if (!result)
6906 result = fold_build2 (code2, boolean_type_node,
6907 op2a, op2b);
6908 else if (!same_bool_comparison_p (result,
6909 code2, op2a, op2b))
6910 return NULL_TREE;
6912 else if (TREE_CODE (arg) == SSA_NAME
6913 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6915 tree temp;
6916 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6917 /* In simple cases we can look through PHI nodes,
6918 but we have to be careful with loops.
6919 See PR49073. */
6920 if (! dom_info_available_p (CDI_DOMINATORS)
6921 || gimple_bb (def_stmt) == gimple_bb (stmt)
6922 || dominated_by_p (CDI_DOMINATORS,
6923 gimple_bb (def_stmt),
6924 gimple_bb (stmt)))
6925 return NULL_TREE;
6926 temp = and_var_with_comparison (type, arg, invert, code2,
6927 op2a, op2b,
6928 outer_cond_bb);
6929 if (!temp)
6930 return NULL_TREE;
6931 else if (!result)
6932 result = temp;
6933 else if (!same_bool_result_p (result, temp))
6934 return NULL_TREE;
6936 else
6937 return NULL_TREE;
6939 return result;
6942 default:
6943 break;
6946 return NULL_TREE;
6949 static basic_block fosa_bb;
6950 static vec<std::pair<tree, flow_sensitive_info_storage> > *fosa_unwind;
6951 static tree
6952 follow_outer_ssa_edges (tree val)
6954 if (TREE_CODE (val) == SSA_NAME
6955 && !SSA_NAME_IS_DEFAULT_DEF (val))
6957 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (val));
6958 if (!def_bb
6959 || def_bb == fosa_bb
6960 || (dom_info_available_p (CDI_DOMINATORS)
6961 && (def_bb == fosa_bb
6962 || dominated_by_p (CDI_DOMINATORS, fosa_bb, def_bb))))
6963 return val;
6964 /* We cannot temporarily rewrite stmts with undefined overflow
6965 behavior, so avoid expanding them. */
6966 if ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (val))
6967 || POINTER_TYPE_P (TREE_TYPE (val)))
6968 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (val)))
6969 return NULL_TREE;
6970 flow_sensitive_info_storage storage;
6971 storage.save_and_clear (val);
6972 /* If the definition does not dominate fosa_bb temporarily reset
6973 flow-sensitive info. */
6974 fosa_unwind->safe_push (std::make_pair (val, storage));
6975 return val;
6977 return val;
6980 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6981 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6982 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6983 simplify this to a single expression. As we are going to lower the cost
6984 of building SSA names / gimple stmts significantly, we need to allocate
6985 them ont the stack. This will cause the code to be a bit ugly. */
6987 static tree
6988 maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
6989 enum tree_code code1,
6990 tree op1a, tree op1b,
6991 enum tree_code code2, tree op2a,
6992 tree op2b,
6993 basic_block outer_cond_bb)
6995 /* Allocate gimple stmt1 on the stack. */
6996 gassign *stmt1
6997 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6998 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
6999 gimple_assign_set_rhs_code (stmt1, code1);
7000 gimple_assign_set_rhs1 (stmt1, op1a);
7001 gimple_assign_set_rhs2 (stmt1, op1b);
7002 gimple_set_bb (stmt1, NULL);
7004 /* Allocate gimple stmt2 on the stack. */
7005 gassign *stmt2
7006 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
7007 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
7008 gimple_assign_set_rhs_code (stmt2, code2);
7009 gimple_assign_set_rhs1 (stmt2, op2a);
7010 gimple_assign_set_rhs2 (stmt2, op2b);
7011 gimple_set_bb (stmt2, NULL);
7013 /* Allocate SSA names(lhs1) on the stack. */
7014 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
7015 memset (lhs1, 0, sizeof (tree_ssa_name));
7016 TREE_SET_CODE (lhs1, SSA_NAME);
7017 TREE_TYPE (lhs1) = type;
7018 init_ssa_name_imm_use (lhs1);
7020 /* Allocate SSA names(lhs2) on the stack. */
7021 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
7022 memset (lhs2, 0, sizeof (tree_ssa_name));
7023 TREE_SET_CODE (lhs2, SSA_NAME);
7024 TREE_TYPE (lhs2) = type;
7025 init_ssa_name_imm_use (lhs2);
7027 gimple_assign_set_lhs (stmt1, lhs1);
7028 gimple_assign_set_lhs (stmt2, lhs2);
7030 gimple_match_op op (gimple_match_cond::UNCOND, code,
7031 type, gimple_assign_lhs (stmt1),
7032 gimple_assign_lhs (stmt2));
7033 fosa_bb = outer_cond_bb;
7034 auto_vec<std::pair<tree, flow_sensitive_info_storage>, 8> unwind_stack;
7035 fosa_unwind = &unwind_stack;
7036 if (op.resimplify (NULL, (!outer_cond_bb
7037 ? follow_all_ssa_edges : follow_outer_ssa_edges)))
7039 fosa_unwind = NULL;
7040 for (auto p : unwind_stack)
7041 p.second.restore (p.first);
7042 if (gimple_simplified_result_is_gimple_val (&op))
7044 tree res = op.ops[0];
7045 if (res == lhs1)
7046 return build2 (code1, type, op1a, op1b);
7047 else if (res == lhs2)
7048 return build2 (code2, type, op2a, op2b);
7049 else
7050 return res;
7052 else if (op.code.is_tree_code ()
7053 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
7055 tree op0 = op.ops[0];
7056 tree op1 = op.ops[1];
7057 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
7058 return NULL_TREE; /* not simple */
7060 return build2 ((enum tree_code)op.code, op.type, op0, op1);
7063 fosa_unwind = NULL;
7064 for (auto p : unwind_stack)
7065 p.second.restore (p.first);
7067 return NULL_TREE;
7070 /* Try to simplify the AND of two comparisons, specified by
7071 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7072 If this can be simplified to a single expression (without requiring
7073 introducing more SSA variables to hold intermediate values),
7074 return the resulting tree. Otherwise return NULL_TREE.
7075 If the result expression is non-null, it has boolean type. */
7077 tree
7078 maybe_fold_and_comparisons (tree type,
7079 enum tree_code code1, tree op1a, tree op1b,
7080 enum tree_code code2, tree op2a, tree op2b,
7081 basic_block outer_cond_bb)
7083 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b,
7084 outer_cond_bb))
7085 return t;
7087 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b,
7088 outer_cond_bb))
7089 return t;
7091 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
7092 op1a, op1b, code2, op2a,
7093 op2b, outer_cond_bb))
7094 return t;
7096 return NULL_TREE;
7099 /* Helper function for or_comparisons_1: try to simplify the OR of the
7100 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
7101 If INVERT is true, invert the value of VAR before doing the OR.
7102 Return NULL_EXPR if we can't simplify this to a single expression. */
7104 static tree
7105 or_var_with_comparison (tree type, tree var, bool invert,
7106 enum tree_code code2, tree op2a, tree op2b,
7107 basic_block outer_cond_bb)
7109 tree t;
7110 gimple *stmt = SSA_NAME_DEF_STMT (var);
7112 /* We can only deal with variables whose definitions are assignments. */
7113 if (!is_gimple_assign (stmt))
7114 return NULL_TREE;
7116 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
7117 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
7118 Then we only have to consider the simpler non-inverted cases. */
7119 if (invert)
7120 t = and_var_with_comparison_1 (type, stmt,
7121 invert_tree_comparison (code2, false),
7122 op2a, op2b, outer_cond_bb);
7123 else
7124 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b,
7125 outer_cond_bb);
7126 return canonicalize_bool (t, invert);
7129 /* Try to simplify the OR of the ssa variable defined by the assignment
7130 STMT with the comparison specified by (OP2A CODE2 OP2B).
7131 Return NULL_EXPR if we can't simplify this to a single expression. */
7133 static tree
7134 or_var_with_comparison_1 (tree type, gimple *stmt,
7135 enum tree_code code2, tree op2a, tree op2b,
7136 basic_block outer_cond_bb)
7138 tree var = gimple_assign_lhs (stmt);
7139 tree true_test_var = NULL_TREE;
7140 tree false_test_var = NULL_TREE;
7141 enum tree_code innercode = gimple_assign_rhs_code (stmt);
7143 /* Check for identities like (var OR (var != 0)) => true . */
7144 if (TREE_CODE (op2a) == SSA_NAME
7145 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
7147 if ((code2 == NE_EXPR && integer_zerop (op2b))
7148 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
7150 true_test_var = op2a;
7151 if (var == true_test_var)
7152 return var;
7154 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
7155 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
7157 false_test_var = op2a;
7158 if (var == false_test_var)
7159 return boolean_true_node;
7163 /* If the definition is a comparison, recurse on it. */
7164 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
7166 tree t = or_comparisons_1 (type, innercode,
7167 gimple_assign_rhs1 (stmt),
7168 gimple_assign_rhs2 (stmt),
7169 code2, op2a, op2b, outer_cond_bb);
7170 if (t)
7171 return t;
7174 /* If the definition is an AND or OR expression, we may be able to
7175 simplify by reassociating. */
7176 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
7177 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
7179 tree inner1 = gimple_assign_rhs1 (stmt);
7180 tree inner2 = gimple_assign_rhs2 (stmt);
7181 gimple *s;
7182 tree t;
7183 tree partial = NULL_TREE;
7184 bool is_or = (innercode == BIT_IOR_EXPR);
7186 /* Check for boolean identities that don't require recursive examination
7187 of inner1/inner2:
7188 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7189 inner1 OR (inner1 AND inner2) => inner1
7190 !inner1 OR (inner1 OR inner2) => true
7191 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7193 if (inner1 == true_test_var)
7194 return (is_or ? var : inner1);
7195 else if (inner2 == true_test_var)
7196 return (is_or ? var : inner2);
7197 else if (inner1 == false_test_var)
7198 return (is_or
7199 ? boolean_true_node
7200 : or_var_with_comparison (type, inner2, false, code2, op2a,
7201 op2b, outer_cond_bb));
7202 else if (inner2 == false_test_var)
7203 return (is_or
7204 ? boolean_true_node
7205 : or_var_with_comparison (type, inner1, false, code2, op2a,
7206 op2b, outer_cond_bb));
7208 /* Next, redistribute/reassociate the OR across the inner tests.
7209 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7210 if (TREE_CODE (inner1) == SSA_NAME
7211 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
7212 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7213 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7214 gimple_assign_rhs1 (s),
7215 gimple_assign_rhs2 (s),
7216 code2, op2a, op2b,
7217 outer_cond_bb)))
7219 /* Handle the OR case, where we are reassociating:
7220 (inner1 OR inner2) OR (op2a code2 op2b)
7221 => (t OR inner2)
7222 If the partial result t is a constant, we win. Otherwise
7223 continue on to try reassociating with the other inner test. */
7224 if (is_or)
7226 if (integer_onep (t))
7227 return boolean_true_node;
7228 else if (integer_zerop (t))
7229 return inner2;
7232 /* Handle the AND case, where we are redistributing:
7233 (inner1 AND inner2) OR (op2a code2 op2b)
7234 => (t AND (inner2 OR (op2a code op2b))) */
7235 else if (integer_zerop (t))
7236 return boolean_false_node;
7238 /* Save partial result for later. */
7239 partial = t;
7242 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7243 if (TREE_CODE (inner2) == SSA_NAME
7244 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
7245 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7246 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7247 gimple_assign_rhs1 (s),
7248 gimple_assign_rhs2 (s),
7249 code2, op2a, op2b,
7250 outer_cond_bb)))
7252 /* Handle the OR case, where we are reassociating:
7253 (inner1 OR inner2) OR (op2a code2 op2b)
7254 => (inner1 OR t)
7255 => (t OR partial) */
7256 if (is_or)
7258 if (integer_zerop (t))
7259 return inner1;
7260 else if (integer_onep (t))
7261 return boolean_true_node;
7262 /* If both are the same, we can apply the identity
7263 (x OR x) == x. */
7264 else if (partial && same_bool_result_p (t, partial))
7265 return t;
7268 /* Handle the AND case, where we are redistributing:
7269 (inner1 AND inner2) OR (op2a code2 op2b)
7270 => (t AND (inner1 OR (op2a code2 op2b)))
7271 => (t AND partial) */
7272 else
7274 if (integer_zerop (t))
7275 return boolean_false_node;
7276 else if (partial)
7278 /* We already got a simplification for the other
7279 operand to the redistributed AND expression. The
7280 interesting case is when at least one is true.
7281 Or, if both are the same, we can apply the identity
7282 (x AND x) == x. */
7283 if (integer_onep (partial))
7284 return t;
7285 else if (integer_onep (t))
7286 return partial;
7287 else if (same_bool_result_p (t, partial))
7288 return t;
7293 return NULL_TREE;
7296 /* Try to simplify the OR of two comparisons defined by
7297 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7298 If this can be done without constructing an intermediate value,
7299 return the resulting tree; otherwise NULL_TREE is returned.
7300 This function is deliberately asymmetric as it recurses on SSA_DEFs
7301 in the first comparison but not the second. */
7303 static tree
7304 or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
7305 enum tree_code code2, tree op2a, tree op2b,
7306 basic_block outer_cond_bb)
7308 tree truth_type = truth_type_for (TREE_TYPE (op1a));
7310 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7311 if (operand_equal_p (op1a, op2a, 0)
7312 && operand_equal_p (op1b, op2b, 0))
7314 /* Result will be either NULL_TREE, or a combined comparison. */
7315 tree t = combine_comparisons (UNKNOWN_LOCATION,
7316 TRUTH_ORIF_EXPR, code1, code2,
7317 truth_type, op1a, op1b);
7318 if (t)
7319 return t;
7322 /* Likewise the swapped case of the above. */
7323 if (operand_equal_p (op1a, op2b, 0)
7324 && operand_equal_p (op1b, op2a, 0))
7326 /* Result will be either NULL_TREE, or a combined comparison. */
7327 tree t = combine_comparisons (UNKNOWN_LOCATION,
7328 TRUTH_ORIF_EXPR, code1,
7329 swap_tree_comparison (code2),
7330 truth_type, op1a, op1b);
7331 if (t)
7332 return t;
7335 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7336 NAME's definition is a truth value. See if there are any simplifications
7337 that can be done against the NAME's definition. */
7338 if (TREE_CODE (op1a) == SSA_NAME
7339 && (code1 == NE_EXPR || code1 == EQ_EXPR)
7340 && (integer_zerop (op1b) || integer_onep (op1b)))
7342 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7343 || (code1 == NE_EXPR && integer_onep (op1b)));
7344 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
7345 switch (gimple_code (stmt))
7347 case GIMPLE_ASSIGN:
7348 /* Try to simplify by copy-propagating the definition. */
7349 return or_var_with_comparison (type, op1a, invert, code2, op2a,
7350 op2b, outer_cond_bb);
7352 case GIMPLE_PHI:
7353 /* If every argument to the PHI produces the same result when
7354 ORed with the second comparison, we win.
7355 Do not do this unless the type is bool since we need a bool
7356 result here anyway. */
7357 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7359 tree result = NULL_TREE;
7360 unsigned i;
7361 for (i = 0; i < gimple_phi_num_args (stmt); i++)
7363 tree arg = gimple_phi_arg_def (stmt, i);
7365 /* If this PHI has itself as an argument, ignore it.
7366 If all the other args produce the same result,
7367 we're still OK. */
7368 if (arg == gimple_phi_result (stmt))
7369 continue;
7370 else if (TREE_CODE (arg) == INTEGER_CST)
7372 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
7374 if (!result)
7375 result = boolean_true_node;
7376 else if (!integer_onep (result))
7377 return NULL_TREE;
7379 else if (!result)
7380 result = fold_build2 (code2, boolean_type_node,
7381 op2a, op2b);
7382 else if (!same_bool_comparison_p (result,
7383 code2, op2a, op2b))
7384 return NULL_TREE;
7386 else if (TREE_CODE (arg) == SSA_NAME
7387 && !SSA_NAME_IS_DEFAULT_DEF (arg))
7389 tree temp;
7390 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
7391 /* In simple cases we can look through PHI nodes,
7392 but we have to be careful with loops.
7393 See PR49073. */
7394 if (! dom_info_available_p (CDI_DOMINATORS)
7395 || gimple_bb (def_stmt) == gimple_bb (stmt)
7396 || dominated_by_p (CDI_DOMINATORS,
7397 gimple_bb (def_stmt),
7398 gimple_bb (stmt)))
7399 return NULL_TREE;
7400 temp = or_var_with_comparison (type, arg, invert, code2,
7401 op2a, op2b, outer_cond_bb);
7402 if (!temp)
7403 return NULL_TREE;
7404 else if (!result)
7405 result = temp;
7406 else if (!same_bool_result_p (result, temp))
7407 return NULL_TREE;
7409 else
7410 return NULL_TREE;
7412 return result;
7415 default:
7416 break;
7419 return NULL_TREE;
7422 /* Try to simplify the OR of two comparisons, specified by
7423 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7424 If this can be simplified to a single expression (without requiring
7425 introducing more SSA variables to hold intermediate values),
7426 return the resulting tree. Otherwise return NULL_TREE.
7427 If the result expression is non-null, it has boolean type. */
7429 tree
7430 maybe_fold_or_comparisons (tree type,
7431 enum tree_code code1, tree op1a, tree op1b,
7432 enum tree_code code2, tree op2a, tree op2b,
7433 basic_block outer_cond_bb)
7435 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b,
7436 outer_cond_bb))
7437 return t;
7439 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b,
7440 outer_cond_bb))
7441 return t;
7443 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
7444 op1a, op1b, code2, op2a,
7445 op2b, outer_cond_bb))
7446 return t;
7448 return NULL_TREE;
7451 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7453 Either NULL_TREE, a simplified but non-constant or a constant
7454 is returned.
7456 ??? This should go into a gimple-fold-inline.h file to be eventually
7457 privatized with the single valueize function used in the various TUs
7458 to avoid the indirect function call overhead. */
7460 tree
7461 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
7462 tree (*gvalueize) (tree))
7464 gimple_match_op res_op;
7465 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7466 edges if there are intermediate VARYING defs. For this reason
7467 do not follow SSA edges here even though SCCVN can technically
7468 just deal fine with that. */
7469 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
7471 tree res = NULL_TREE;
7472 if (gimple_simplified_result_is_gimple_val (&res_op))
7473 res = res_op.ops[0];
7474 else if (mprts_hook)
7475 res = mprts_hook (&res_op);
7476 if (res)
7478 if (dump_file && dump_flags & TDF_DETAILS)
7480 fprintf (dump_file, "Match-and-simplified ");
7481 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
7482 fprintf (dump_file, " to ");
7483 print_generic_expr (dump_file, res);
7484 fprintf (dump_file, "\n");
7486 return res;
7490 location_t loc = gimple_location (stmt);
7491 switch (gimple_code (stmt))
7493 case GIMPLE_ASSIGN:
7495 enum tree_code subcode = gimple_assign_rhs_code (stmt);
7497 switch (get_gimple_rhs_class (subcode))
7499 case GIMPLE_SINGLE_RHS:
7501 tree rhs = gimple_assign_rhs1 (stmt);
7502 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
7504 if (TREE_CODE (rhs) == SSA_NAME)
7506 /* If the RHS is an SSA_NAME, return its known constant value,
7507 if any. */
7508 return (*valueize) (rhs);
7510 /* Handle propagating invariant addresses into address
7511 operations. */
7512 else if (TREE_CODE (rhs) == ADDR_EXPR
7513 && !is_gimple_min_invariant (rhs))
7515 poly_int64 offset = 0;
7516 tree base;
7517 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
7518 &offset,
7519 valueize);
7520 if (base
7521 && (CONSTANT_CLASS_P (base)
7522 || decl_address_invariant_p (base)))
7523 return build_invariant_address (TREE_TYPE (rhs),
7524 base, offset);
7526 else if (TREE_CODE (rhs) == CONSTRUCTOR
7527 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
7528 && known_eq (CONSTRUCTOR_NELTS (rhs),
7529 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
7531 unsigned i, nelts;
7532 tree val;
7534 nelts = CONSTRUCTOR_NELTS (rhs);
7535 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
7536 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7538 val = (*valueize) (val);
7539 if (TREE_CODE (val) == INTEGER_CST
7540 || TREE_CODE (val) == REAL_CST
7541 || TREE_CODE (val) == FIXED_CST)
7542 vec.quick_push (val);
7543 else
7544 return NULL_TREE;
7547 return vec.build ();
7549 if (subcode == OBJ_TYPE_REF)
7551 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
7552 /* If callee is constant, we can fold away the wrapper. */
7553 if (is_gimple_min_invariant (val))
7554 return val;
7557 if (kind == tcc_reference)
7559 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
7560 || TREE_CODE (rhs) == REALPART_EXPR
7561 || TREE_CODE (rhs) == IMAGPART_EXPR)
7562 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7564 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7565 return fold_unary_loc (EXPR_LOCATION (rhs),
7566 TREE_CODE (rhs),
7567 TREE_TYPE (rhs), val);
7569 else if (TREE_CODE (rhs) == BIT_FIELD_REF
7570 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7572 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7573 return fold_ternary_loc (EXPR_LOCATION (rhs),
7574 TREE_CODE (rhs),
7575 TREE_TYPE (rhs), val,
7576 TREE_OPERAND (rhs, 1),
7577 TREE_OPERAND (rhs, 2));
7579 else if (TREE_CODE (rhs) == MEM_REF
7580 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7582 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7583 if (TREE_CODE (val) == ADDR_EXPR
7584 && is_gimple_min_invariant (val))
7586 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
7587 unshare_expr (val),
7588 TREE_OPERAND (rhs, 1));
7589 if (tem)
7590 rhs = tem;
7593 return fold_const_aggregate_ref_1 (rhs, valueize);
7595 else if (kind == tcc_declaration)
7596 return get_symbol_constant_value (rhs);
7597 return rhs;
7600 case GIMPLE_UNARY_RHS:
7601 return NULL_TREE;
7603 case GIMPLE_BINARY_RHS:
7604 /* Translate &x + CST into an invariant form suitable for
7605 further propagation. */
7606 if (subcode == POINTER_PLUS_EXPR)
7608 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7609 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7610 if (TREE_CODE (op0) == ADDR_EXPR
7611 && TREE_CODE (op1) == INTEGER_CST)
7613 tree off = fold_convert (ptr_type_node, op1);
7614 return build1_loc
7615 (loc, ADDR_EXPR, TREE_TYPE (op0),
7616 fold_build2 (MEM_REF,
7617 TREE_TYPE (TREE_TYPE (op0)),
7618 unshare_expr (op0), off));
7621 /* Canonicalize bool != 0 and bool == 0 appearing after
7622 valueization. While gimple_simplify handles this
7623 it can get confused by the ~X == 1 -> X == 0 transform
7624 which we cant reduce to a SSA name or a constant
7625 (and we have no way to tell gimple_simplify to not
7626 consider those transforms in the first place). */
7627 else if (subcode == EQ_EXPR
7628 || subcode == NE_EXPR)
7630 tree lhs = gimple_assign_lhs (stmt);
7631 tree op0 = gimple_assign_rhs1 (stmt);
7632 if (useless_type_conversion_p (TREE_TYPE (lhs),
7633 TREE_TYPE (op0)))
7635 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7636 op0 = (*valueize) (op0);
7637 if (TREE_CODE (op0) == INTEGER_CST)
7638 std::swap (op0, op1);
7639 if (TREE_CODE (op1) == INTEGER_CST
7640 && ((subcode == NE_EXPR && integer_zerop (op1))
7641 || (subcode == EQ_EXPR && integer_onep (op1))))
7642 return op0;
7645 return NULL_TREE;
7647 case GIMPLE_TERNARY_RHS:
7649 /* Handle ternary operators that can appear in GIMPLE form. */
7650 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7651 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7652 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
7653 return fold_ternary_loc (loc, subcode,
7654 TREE_TYPE (gimple_assign_lhs (stmt)),
7655 op0, op1, op2);
7658 default:
7659 gcc_unreachable ();
7663 case GIMPLE_CALL:
7665 tree fn;
7666 gcall *call_stmt = as_a <gcall *> (stmt);
7668 if (gimple_call_internal_p (stmt))
7670 enum tree_code subcode = ERROR_MARK;
7671 switch (gimple_call_internal_fn (stmt))
7673 case IFN_UBSAN_CHECK_ADD:
7674 subcode = PLUS_EXPR;
7675 break;
7676 case IFN_UBSAN_CHECK_SUB:
7677 subcode = MINUS_EXPR;
7678 break;
7679 case IFN_UBSAN_CHECK_MUL:
7680 subcode = MULT_EXPR;
7681 break;
7682 case IFN_BUILTIN_EXPECT:
7684 tree arg0 = gimple_call_arg (stmt, 0);
7685 tree op0 = (*valueize) (arg0);
7686 if (TREE_CODE (op0) == INTEGER_CST)
7687 return op0;
7688 return NULL_TREE;
7690 default:
7691 return NULL_TREE;
7693 tree arg0 = gimple_call_arg (stmt, 0);
7694 tree arg1 = gimple_call_arg (stmt, 1);
7695 tree op0 = (*valueize) (arg0);
7696 tree op1 = (*valueize) (arg1);
7698 if (TREE_CODE (op0) != INTEGER_CST
7699 || TREE_CODE (op1) != INTEGER_CST)
7701 switch (subcode)
7703 case MULT_EXPR:
7704 /* x * 0 = 0 * x = 0 without overflow. */
7705 if (integer_zerop (op0) || integer_zerop (op1))
7706 return build_zero_cst (TREE_TYPE (arg0));
7707 break;
7708 case MINUS_EXPR:
7709 /* y - y = 0 without overflow. */
7710 if (operand_equal_p (op0, op1, 0))
7711 return build_zero_cst (TREE_TYPE (arg0));
7712 break;
7713 default:
7714 break;
7717 tree res
7718 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
7719 if (res
7720 && TREE_CODE (res) == INTEGER_CST
7721 && !TREE_OVERFLOW (res))
7722 return res;
7723 return NULL_TREE;
7726 fn = (*valueize) (gimple_call_fn (stmt));
7727 if (TREE_CODE (fn) == ADDR_EXPR
7728 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
7729 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
7730 && gimple_builtin_call_types_compatible_p (stmt,
7731 TREE_OPERAND (fn, 0)))
7733 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
7734 tree retval;
7735 unsigned i;
7736 for (i = 0; i < gimple_call_num_args (stmt); ++i)
7737 args[i] = (*valueize) (gimple_call_arg (stmt, i));
7738 retval = fold_builtin_call_array (loc,
7739 gimple_call_return_type (call_stmt),
7740 fn, gimple_call_num_args (stmt), args);
7741 if (retval)
7743 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7744 STRIP_NOPS (retval);
7745 retval = fold_convert (gimple_call_return_type (call_stmt),
7746 retval);
7748 return retval;
7750 return NULL_TREE;
7753 default:
7754 return NULL_TREE;
7758 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7759 Returns NULL_TREE if folding to a constant is not possible, otherwise
7760 returns a constant according to is_gimple_min_invariant. */
7762 tree
7763 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
7765 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
7766 if (res && is_gimple_min_invariant (res))
7767 return res;
7768 return NULL_TREE;
7772 /* The following set of functions are supposed to fold references using
7773 their constant initializers. */
7775 /* See if we can find constructor defining value of BASE.
7776 When we know the consructor with constant offset (such as
7777 base is array[40] and we do know constructor of array), then
7778 BIT_OFFSET is adjusted accordingly.
7780 As a special case, return error_mark_node when constructor
7781 is not explicitly available, but it is known to be zero
7782 such as 'static const int a;'. */
7783 static tree
7784 get_base_constructor (tree base, poly_int64_pod *bit_offset,
7785 tree (*valueize)(tree))
7787 poly_int64 bit_offset2, size, max_size;
7788 bool reverse;
7790 if (TREE_CODE (base) == MEM_REF)
7792 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
7793 if (!boff.to_shwi (bit_offset))
7794 return NULL_TREE;
7796 if (valueize
7797 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
7798 base = valueize (TREE_OPERAND (base, 0));
7799 if (!base || TREE_CODE (base) != ADDR_EXPR)
7800 return NULL_TREE;
7801 base = TREE_OPERAND (base, 0);
7803 else if (valueize
7804 && TREE_CODE (base) == SSA_NAME)
7805 base = valueize (base);
7807 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7808 DECL_INITIAL. If BASE is a nested reference into another
7809 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7810 the inner reference. */
7811 switch (TREE_CODE (base))
7813 case VAR_DECL:
7814 case CONST_DECL:
7816 tree init = ctor_for_folding (base);
7818 /* Our semantic is exact opposite of ctor_for_folding;
7819 NULL means unknown, while error_mark_node is 0. */
7820 if (init == error_mark_node)
7821 return NULL_TREE;
7822 if (!init)
7823 return error_mark_node;
7824 return init;
7827 case VIEW_CONVERT_EXPR:
7828 return get_base_constructor (TREE_OPERAND (base, 0),
7829 bit_offset, valueize);
7831 case ARRAY_REF:
7832 case COMPONENT_REF:
7833 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
7834 &reverse);
7835 if (!known_size_p (max_size) || maybe_ne (size, max_size))
7836 return NULL_TREE;
7837 *bit_offset += bit_offset2;
7838 return get_base_constructor (base, bit_offset, valueize);
7840 case CONSTRUCTOR:
7841 return base;
7843 default:
7844 if (CONSTANT_CLASS_P (base))
7845 return base;
7847 return NULL_TREE;
7851 /* CTOR is a CONSTRUCTOR of an array or vector type. Fold a reference of SIZE
7852 bits to the memory at bit OFFSET. If non-null, TYPE is the expected type of
7853 the reference; otherwise the type of the referenced element is used instead.
7854 When SIZE is zero, attempt to fold a reference to the entire element OFFSET
7855 refers to. Increment *SUBOFF by the bit offset of the accessed element. */
7857 static tree
7858 fold_array_ctor_reference (tree type, tree ctor,
7859 unsigned HOST_WIDE_INT offset,
7860 unsigned HOST_WIDE_INT size,
7861 tree from_decl,
7862 unsigned HOST_WIDE_INT *suboff)
7864 offset_int low_bound;
7865 offset_int elt_size;
7866 offset_int access_index;
7867 tree domain_type = NULL_TREE;
7868 HOST_WIDE_INT inner_offset;
7870 /* Compute low bound and elt size. */
7871 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
7872 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
7873 if (domain_type && TYPE_MIN_VALUE (domain_type))
7875 /* Static constructors for variably sized objects make no sense. */
7876 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
7877 return NULL_TREE;
7878 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
7880 else
7881 low_bound = 0;
7882 /* Static constructors for variably sized objects make no sense. */
7883 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
7884 return NULL_TREE;
7885 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
7887 /* When TYPE is non-null, verify that it specifies a constant-sized
7888 access of a multiple of the array element size. Avoid division
7889 by zero below when ELT_SIZE is zero, such as with the result of
7890 an initializer for a zero-length array or an empty struct. */
7891 if (elt_size == 0
7892 || (type
7893 && (!TYPE_SIZE_UNIT (type)
7894 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
7895 return NULL_TREE;
7897 /* Compute the array index we look for. */
7898 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
7899 elt_size);
7900 access_index += low_bound;
7902 /* And offset within the access. */
7903 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
7905 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
7906 if (size > elt_sz * BITS_PER_UNIT)
7908 /* native_encode_expr constraints. */
7909 if (size > MAX_BITSIZE_MODE_ANY_MODE
7910 || size % BITS_PER_UNIT != 0
7911 || inner_offset % BITS_PER_UNIT != 0
7912 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
7913 return NULL_TREE;
7915 unsigned ctor_idx;
7916 tree val = get_array_ctor_element_at_index (ctor, access_index,
7917 &ctor_idx);
7918 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7919 return build_zero_cst (type);
7921 /* native-encode adjacent ctor elements. */
7922 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7923 unsigned bufoff = 0;
7924 offset_int index = 0;
7925 offset_int max_index = access_index;
7926 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7927 if (!val)
7928 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7929 else if (!CONSTANT_CLASS_P (val))
7930 return NULL_TREE;
7931 if (!elt->index)
7933 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7935 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7936 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7938 else
7939 index = max_index = wi::to_offset (elt->index);
7940 index = wi::umax (index, access_index);
7943 if (bufoff + elt_sz > sizeof (buf))
7944 elt_sz = sizeof (buf) - bufoff;
7945 int len = native_encode_expr (val, buf + bufoff, elt_sz,
7946 inner_offset / BITS_PER_UNIT);
7947 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
7948 return NULL_TREE;
7949 inner_offset = 0;
7950 bufoff += len;
7952 access_index += 1;
7953 if (wi::cmpu (access_index, index) == 0)
7954 val = elt->value;
7955 else if (wi::cmpu (access_index, max_index) > 0)
7957 ctor_idx++;
7958 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7960 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7961 ++max_index;
7963 else
7965 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7966 index = 0;
7967 max_index = access_index;
7968 if (!elt->index)
7970 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7972 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7973 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7975 else
7976 index = max_index = wi::to_offset (elt->index);
7977 index = wi::umax (index, access_index);
7978 if (wi::cmpu (access_index, index) == 0)
7979 val = elt->value;
7980 else
7981 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7985 while (bufoff < size / BITS_PER_UNIT);
7986 *suboff += size;
7987 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
7990 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
7992 if (!size && TREE_CODE (val) != CONSTRUCTOR)
7994 /* For the final reference to the entire accessed element
7995 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7996 may be null) in favor of the type of the element, and set
7997 SIZE to the size of the accessed element. */
7998 inner_offset = 0;
7999 type = TREE_TYPE (val);
8000 size = elt_sz * BITS_PER_UNIT;
8002 else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
8003 && TREE_CODE (val) == CONSTRUCTOR
8004 && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
8005 /* If this isn't the last element in the CTOR and a CTOR itself
8006 and it does not cover the whole object we are requesting give up
8007 since we're not set up for combining from multiple CTORs. */
8008 return NULL_TREE;
8010 *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
8011 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
8012 suboff);
8015 /* Memory not explicitly mentioned in constructor is 0 (or
8016 the reference is out of range). */
8017 return type ? build_zero_cst (type) : NULL_TREE;
8020 /* CTOR is a CONSTRUCTOR of a record or union type. Fold a reference of SIZE
8021 bits to the memory at bit OFFSET. If non-null, TYPE is the expected type of
8022 the reference; otherwise the type of the referenced member is used instead.
8023 When SIZE is zero, attempt to fold a reference to the entire member OFFSET
8024 refers to. Increment *SUBOFF by the bit offset of the accessed member. */
8026 static tree
8027 fold_nonarray_ctor_reference (tree type, tree ctor,
8028 unsigned HOST_WIDE_INT offset,
8029 unsigned HOST_WIDE_INT size,
8030 tree from_decl,
8031 unsigned HOST_WIDE_INT *suboff)
8033 unsigned HOST_WIDE_INT cnt;
8034 tree cfield, cval;
8036 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
8038 tree byte_offset = DECL_FIELD_OFFSET (cfield);
8039 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
8040 tree field_size = DECL_SIZE (cfield);
8042 if (!field_size)
8044 /* Determine the size of the flexible array member from
8045 the size of the initializer provided for it. */
8046 field_size = TYPE_SIZE (TREE_TYPE (cval));
8049 /* Variable sized objects in static constructors makes no sense,
8050 but field_size can be NULL for flexible array members. */
8051 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
8052 && TREE_CODE (byte_offset) == INTEGER_CST
8053 && (field_size != NULL_TREE
8054 ? TREE_CODE (field_size) == INTEGER_CST
8055 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
8057 /* Compute bit offset of the field. */
8058 offset_int bitoffset
8059 = (wi::to_offset (field_offset)
8060 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
8061 /* Compute bit offset where the field ends. */
8062 offset_int bitoffset_end;
8063 if (field_size != NULL_TREE)
8064 bitoffset_end = bitoffset + wi::to_offset (field_size);
8065 else
8066 bitoffset_end = 0;
8068 /* Compute the bit offset of the end of the desired access.
8069 As a special case, if the size of the desired access is
8070 zero, assume the access is to the entire field (and let
8071 the caller make any necessary adjustments by storing
8072 the actual bounds of the field in FIELDBOUNDS). */
8073 offset_int access_end = offset_int (offset);
8074 if (size)
8075 access_end += size;
8076 else
8077 access_end = bitoffset_end;
8079 /* Is there any overlap between the desired access at
8080 [OFFSET, OFFSET+SIZE) and the offset of the field within
8081 the object at [BITOFFSET, BITOFFSET_END)? */
8082 if (wi::cmps (access_end, bitoffset) > 0
8083 && (field_size == NULL_TREE
8084 || wi::lts_p (offset, bitoffset_end)))
8086 *suboff += bitoffset.to_uhwi ();
8088 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
8090 /* For the final reference to the entire accessed member
8091 (SIZE is zero), reset OFFSET, disegard TYPE (which may
8092 be null) in favor of the type of the member, and set
8093 SIZE to the size of the accessed member. */
8094 offset = bitoffset.to_uhwi ();
8095 type = TREE_TYPE (cval);
8096 size = (bitoffset_end - bitoffset).to_uhwi ();
8099 /* We do have overlap. Now see if the field is large enough
8100 to cover the access. Give up for accesses that extend
8101 beyond the end of the object or that span multiple fields. */
8102 if (wi::cmps (access_end, bitoffset_end) > 0)
8103 return NULL_TREE;
8104 if (offset < bitoffset)
8105 return NULL_TREE;
8107 offset_int inner_offset = offset_int (offset) - bitoffset;
8109 /* Integral bit-fields are left-justified on big-endian targets, so
8110 we must arrange for native_encode_int to start at their MSB. */
8111 if (DECL_BIT_FIELD (cfield) && INTEGRAL_TYPE_P (TREE_TYPE (cfield)))
8113 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8114 return NULL_TREE;
8115 const unsigned int encoding_size
8116 = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (cfield)));
8117 if (BYTES_BIG_ENDIAN)
8118 inner_offset += encoding_size - wi::to_offset (field_size);
8121 return fold_ctor_reference (type, cval,
8122 inner_offset.to_uhwi (), size,
8123 from_decl, suboff);
8127 if (!type)
8128 return NULL_TREE;
8130 return build_zero_cst (type);
8133 /* CTOR is a value initializing memory. Fold a reference of TYPE and
8134 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
8135 is zero, attempt to fold a reference to the entire subobject
8136 which OFFSET refers to. This is used when folding accesses to
8137 string members of aggregates. When non-null, set *SUBOFF to
8138 the bit offset of the accessed subobject. */
8140 tree
8141 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
8142 const poly_uint64 &poly_size, tree from_decl,
8143 unsigned HOST_WIDE_INT *suboff /* = NULL */)
8145 tree ret;
8147 /* We found the field with exact match. */
8148 if (type
8149 && useless_type_conversion_p (type, TREE_TYPE (ctor))
8150 && known_eq (poly_offset, 0U))
8151 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
8153 /* The remaining optimizations need a constant size and offset. */
8154 unsigned HOST_WIDE_INT size, offset;
8155 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
8156 return NULL_TREE;
8158 /* We are at the end of walk, see if we can view convert the
8159 result. */
8160 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
8161 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
8162 && known_eq (wi::to_poly_widest (TYPE_SIZE (type)), size)
8163 && known_eq (wi::to_poly_widest (TYPE_SIZE (TREE_TYPE (ctor))), size))
8165 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
8166 if (ret)
8168 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
8169 if (ret)
8170 STRIP_USELESS_TYPE_CONVERSION (ret);
8172 return ret;
8175 /* For constants and byte-aligned/sized reads, try to go through
8176 native_encode/interpret. */
8177 if (CONSTANT_CLASS_P (ctor)
8178 && BITS_PER_UNIT == 8
8179 && offset % BITS_PER_UNIT == 0
8180 && offset / BITS_PER_UNIT <= INT_MAX
8181 && size % BITS_PER_UNIT == 0
8182 && size <= MAX_BITSIZE_MODE_ANY_MODE
8183 && can_native_interpret_type_p (type))
8185 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8186 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
8187 offset / BITS_PER_UNIT);
8188 if (len > 0)
8189 return native_interpret_expr (type, buf, len);
8192 /* For constructors, try first a recursive local processing, but in any case
8193 this requires the native storage order. */
8194 if (TREE_CODE (ctor) == CONSTRUCTOR
8195 && !(AGGREGATE_TYPE_P (TREE_TYPE (ctor))
8196 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (ctor))))
8198 unsigned HOST_WIDE_INT dummy = 0;
8199 if (!suboff)
8200 suboff = &dummy;
8202 tree ret;
8203 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
8204 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
8205 ret = fold_array_ctor_reference (type, ctor, offset, size,
8206 from_decl, suboff);
8207 else
8208 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
8209 from_decl, suboff);
8211 /* Otherwise fall back to native_encode_initializer. This may be done
8212 only from the outermost fold_ctor_reference call (because it itself
8213 recurses into CONSTRUCTORs and doesn't update suboff). */
8214 if (ret == NULL_TREE
8215 && suboff == &dummy
8216 && BITS_PER_UNIT == 8
8217 && offset % BITS_PER_UNIT == 0
8218 && offset / BITS_PER_UNIT <= INT_MAX
8219 && size % BITS_PER_UNIT == 0
8220 && size <= MAX_BITSIZE_MODE_ANY_MODE
8221 && can_native_interpret_type_p (type))
8223 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8224 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
8225 offset / BITS_PER_UNIT);
8226 if (len > 0)
8227 return native_interpret_expr (type, buf, len);
8230 return ret;
8233 return NULL_TREE;
8236 /* Return the tree representing the element referenced by T if T is an
8237 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8238 names using VALUEIZE. Return NULL_TREE otherwise. */
8240 tree
8241 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
8243 tree ctor, idx, base;
8244 poly_int64 offset, size, max_size;
8245 tree tem;
8246 bool reverse;
8248 if (TREE_THIS_VOLATILE (t))
8249 return NULL_TREE;
8251 if (DECL_P (t))
8252 return get_symbol_constant_value (t);
8254 tem = fold_read_from_constant_string (t);
8255 if (tem)
8256 return tem;
8258 switch (TREE_CODE (t))
8260 case ARRAY_REF:
8261 case ARRAY_RANGE_REF:
8262 /* Constant indexes are handled well by get_base_constructor.
8263 Only special case variable offsets.
8264 FIXME: This code can't handle nested references with variable indexes
8265 (they will be handled only by iteration of ccp). Perhaps we can bring
8266 get_ref_base_and_extent here and make it use a valueize callback. */
8267 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
8268 && valueize
8269 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
8270 && poly_int_tree_p (idx))
8272 tree low_bound, unit_size;
8274 /* If the resulting bit-offset is constant, track it. */
8275 if ((low_bound = array_ref_low_bound (t),
8276 poly_int_tree_p (low_bound))
8277 && (unit_size = array_ref_element_size (t),
8278 tree_fits_uhwi_p (unit_size)))
8280 poly_offset_int woffset
8281 = wi::sext (wi::to_poly_offset (idx)
8282 - wi::to_poly_offset (low_bound),
8283 TYPE_PRECISION (sizetype));
8284 woffset *= tree_to_uhwi (unit_size);
8285 woffset *= BITS_PER_UNIT;
8286 if (woffset.to_shwi (&offset))
8288 base = TREE_OPERAND (t, 0);
8289 ctor = get_base_constructor (base, &offset, valueize);
8290 /* Empty constructor. Always fold to 0. */
8291 if (ctor == error_mark_node)
8292 return build_zero_cst (TREE_TYPE (t));
8293 /* Out of bound array access. Value is undefined,
8294 but don't fold. */
8295 if (maybe_lt (offset, 0))
8296 return NULL_TREE;
8297 /* We cannot determine ctor. */
8298 if (!ctor)
8299 return NULL_TREE;
8300 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
8301 tree_to_uhwi (unit_size)
8302 * BITS_PER_UNIT,
8303 base);
8307 /* Fallthru. */
8309 case COMPONENT_REF:
8310 case BIT_FIELD_REF:
8311 case TARGET_MEM_REF:
8312 case MEM_REF:
8313 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
8314 ctor = get_base_constructor (base, &offset, valueize);
8316 /* Empty constructor. Always fold to 0. */
8317 if (ctor == error_mark_node)
8318 return build_zero_cst (TREE_TYPE (t));
8319 /* We do not know precise address. */
8320 if (!known_size_p (max_size) || maybe_ne (max_size, size))
8321 return NULL_TREE;
8322 /* We cannot determine ctor. */
8323 if (!ctor)
8324 return NULL_TREE;
8326 /* Out of bound array access. Value is undefined, but don't fold. */
8327 if (maybe_lt (offset, 0))
8328 return NULL_TREE;
8330 tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
8331 if (tem)
8332 return tem;
8334 /* For bit field reads try to read the representative and
8335 adjust. */
8336 if (TREE_CODE (t) == COMPONENT_REF
8337 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
8338 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
8340 HOST_WIDE_INT csize, coffset;
8341 tree field = TREE_OPERAND (t, 1);
8342 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8343 if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
8344 && size.is_constant (&csize)
8345 && offset.is_constant (&coffset)
8346 && (coffset % BITS_PER_UNIT != 0
8347 || csize % BITS_PER_UNIT != 0)
8348 && !reverse
8349 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
8351 poly_int64 bitoffset;
8352 poly_uint64 field_offset, repr_offset;
8353 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8354 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
8355 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
8356 else
8357 bitoffset = 0;
8358 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8359 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
8360 HOST_WIDE_INT bitoff;
8361 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8362 - TYPE_PRECISION (TREE_TYPE (field)));
8363 if (bitoffset.is_constant (&bitoff)
8364 && bitoff >= 0
8365 && bitoff <= diff)
8367 offset -= bitoff;
8368 size = tree_to_uhwi (DECL_SIZE (repr));
8370 tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
8371 size, base);
8372 if (tem && TREE_CODE (tem) == INTEGER_CST)
8374 if (!BYTES_BIG_ENDIAN)
8375 tem = wide_int_to_tree (TREE_TYPE (field),
8376 wi::lrshift (wi::to_wide (tem),
8377 bitoff));
8378 else
8379 tem = wide_int_to_tree (TREE_TYPE (field),
8380 wi::lrshift (wi::to_wide (tem),
8381 diff - bitoff));
8382 return tem;
8387 break;
8389 case REALPART_EXPR:
8390 case IMAGPART_EXPR:
8392 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
8393 if (c && TREE_CODE (c) == COMPLEX_CST)
8394 return fold_build1_loc (EXPR_LOCATION (t),
8395 TREE_CODE (t), TREE_TYPE (t), c);
8396 break;
8399 default:
8400 break;
8403 return NULL_TREE;
8406 tree
8407 fold_const_aggregate_ref (tree t)
8409 return fold_const_aggregate_ref_1 (t, NULL);
8412 /* Lookup virtual method with index TOKEN in a virtual table V
8413 at OFFSET.
8414 Set CAN_REFER if non-NULL to false if method
8415 is not referable or if the virtual table is ill-formed (such as rewriten
8416 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8418 tree
8419 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
8420 tree v,
8421 unsigned HOST_WIDE_INT offset,
8422 bool *can_refer)
8424 tree vtable = v, init, fn;
8425 unsigned HOST_WIDE_INT size;
8426 unsigned HOST_WIDE_INT elt_size, access_index;
8427 tree domain_type;
8429 if (can_refer)
8430 *can_refer = true;
8432 /* First of all double check we have virtual table. */
8433 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
8435 /* Pass down that we lost track of the target. */
8436 if (can_refer)
8437 *can_refer = false;
8438 return NULL_TREE;
8441 init = ctor_for_folding (v);
8443 /* The virtual tables should always be born with constructors
8444 and we always should assume that they are avaialble for
8445 folding. At the moment we do not stream them in all cases,
8446 but it should never happen that ctor seem unreachable. */
8447 gcc_assert (init);
8448 if (init == error_mark_node)
8450 /* Pass down that we lost track of the target. */
8451 if (can_refer)
8452 *can_refer = false;
8453 return NULL_TREE;
8455 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
8456 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
8457 offset *= BITS_PER_UNIT;
8458 offset += token * size;
8460 /* Lookup the value in the constructor that is assumed to be array.
8461 This is equivalent to
8462 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8463 offset, size, NULL);
8464 but in a constant time. We expect that frontend produced a simple
8465 array without indexed initializers. */
8467 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
8468 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
8469 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
8470 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
8472 access_index = offset / BITS_PER_UNIT / elt_size;
8473 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
8475 /* The C++ FE can now produce indexed fields, and we check if the indexes
8476 match. */
8477 if (access_index < CONSTRUCTOR_NELTS (init))
8479 fn = CONSTRUCTOR_ELT (init, access_index)->value;
8480 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
8481 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8482 STRIP_NOPS (fn);
8484 else
8485 fn = NULL;
8487 /* For type inconsistent program we may end up looking up virtual method
8488 in virtual table that does not contain TOKEN entries. We may overrun
8489 the virtual table and pick up a constant or RTTI info pointer.
8490 In any case the call is undefined. */
8491 if (!fn
8492 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
8493 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
8494 fn = builtin_decl_unreachable ();
8495 else
8497 fn = TREE_OPERAND (fn, 0);
8499 /* When cgraph node is missing and function is not public, we cannot
8500 devirtualize. This can happen in WHOPR when the actual method
8501 ends up in other partition, because we found devirtualization
8502 possibility too late. */
8503 if (!can_refer_decl_in_current_unit_p (fn, vtable))
8505 if (can_refer)
8507 *can_refer = false;
8508 return fn;
8510 return NULL_TREE;
8514 /* Make sure we create a cgraph node for functions we'll reference.
8515 They can be non-existent if the reference comes from an entry
8516 of an external vtable for example. */
8517 cgraph_node::get_create (fn);
8519 return fn;
8522 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8523 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8524 KNOWN_BINFO carries the binfo describing the true type of
8525 OBJ_TYPE_REF_OBJECT(REF).
8526 Set CAN_REFER if non-NULL to false if method
8527 is not referable or if the virtual table is ill-formed (such as rewriten
8528 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8530 tree
8531 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
8532 bool *can_refer)
8534 unsigned HOST_WIDE_INT offset;
8535 tree v;
8537 v = BINFO_VTABLE (known_binfo);
8538 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8539 if (!v)
8540 return NULL_TREE;
8542 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
8544 if (can_refer)
8545 *can_refer = false;
8546 return NULL_TREE;
8548 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
8551 /* Given a pointer value T, return a simplified version of an
8552 indirection through T, or NULL_TREE if no simplification is
8553 possible. Note that the resulting type may be different from
8554 the type pointed to in the sense that it is still compatible
8555 from the langhooks point of view. */
8557 tree
8558 gimple_fold_indirect_ref (tree t)
8560 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
8561 tree sub = t;
8562 tree subtype;
8564 STRIP_NOPS (sub);
8565 subtype = TREE_TYPE (sub);
8566 if (!POINTER_TYPE_P (subtype)
8567 || TYPE_REF_CAN_ALIAS_ALL (ptype))
8568 return NULL_TREE;
8570 if (TREE_CODE (sub) == ADDR_EXPR)
8572 tree op = TREE_OPERAND (sub, 0);
8573 tree optype = TREE_TYPE (op);
8574 /* *&p => p */
8575 if (useless_type_conversion_p (type, optype))
8576 return op;
8578 /* *(foo *)&fooarray => fooarray[0] */
8579 if (TREE_CODE (optype) == ARRAY_TYPE
8580 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
8581 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8583 tree type_domain = TYPE_DOMAIN (optype);
8584 tree min_val = size_zero_node;
8585 if (type_domain && TYPE_MIN_VALUE (type_domain))
8586 min_val = TYPE_MIN_VALUE (type_domain);
8587 if (TREE_CODE (min_val) == INTEGER_CST)
8588 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
8590 /* *(foo *)&complexfoo => __real__ complexfoo */
8591 else if (TREE_CODE (optype) == COMPLEX_TYPE
8592 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8593 return fold_build1 (REALPART_EXPR, type, op);
8594 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8595 else if (TREE_CODE (optype) == VECTOR_TYPE
8596 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8598 tree part_width = TYPE_SIZE (type);
8599 tree index = bitsize_int (0);
8600 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
8604 /* *(p + CST) -> ... */
8605 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
8606 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
8608 tree addr = TREE_OPERAND (sub, 0);
8609 tree off = TREE_OPERAND (sub, 1);
8610 tree addrtype;
8612 STRIP_NOPS (addr);
8613 addrtype = TREE_TYPE (addr);
8615 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8616 if (TREE_CODE (addr) == ADDR_EXPR
8617 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
8618 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
8619 && tree_fits_uhwi_p (off))
8621 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
8622 tree part_width = TYPE_SIZE (type);
8623 unsigned HOST_WIDE_INT part_widthi
8624 = tree_to_shwi (part_width) / BITS_PER_UNIT;
8625 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
8626 tree index = bitsize_int (indexi);
8627 if (known_lt (offset / part_widthi,
8628 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
8629 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
8630 part_width, index);
8633 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8634 if (TREE_CODE (addr) == ADDR_EXPR
8635 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
8636 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
8638 tree size = TYPE_SIZE_UNIT (type);
8639 if (tree_int_cst_equal (size, off))
8640 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
8643 /* *(p + CST) -> MEM_REF <p, CST>. */
8644 if (TREE_CODE (addr) != ADDR_EXPR
8645 || DECL_P (TREE_OPERAND (addr, 0)))
8646 return fold_build2 (MEM_REF, type,
8647 addr,
8648 wide_int_to_tree (ptype, wi::to_wide (off)));
8651 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8652 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
8653 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
8654 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
8656 tree type_domain;
8657 tree min_val = size_zero_node;
8658 tree osub = sub;
8659 sub = gimple_fold_indirect_ref (sub);
8660 if (! sub)
8661 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
8662 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
8663 if (type_domain && TYPE_MIN_VALUE (type_domain))
8664 min_val = TYPE_MIN_VALUE (type_domain);
8665 if (TREE_CODE (min_val) == INTEGER_CST)
8666 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
8669 return NULL_TREE;
8672 /* Return true if CODE is an operation that when operating on signed
8673 integer types involves undefined behavior on overflow and the
8674 operation can be expressed with unsigned arithmetic. */
8676 bool
8677 arith_code_with_undefined_signed_overflow (tree_code code)
8679 switch (code)
8681 case ABS_EXPR:
8682 case PLUS_EXPR:
8683 case MINUS_EXPR:
8684 case MULT_EXPR:
8685 case NEGATE_EXPR:
8686 case POINTER_PLUS_EXPR:
8687 return true;
8688 default:
8689 return false;
8693 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8694 operation that can be transformed to unsigned arithmetic by converting
8695 its operand, carrying out the operation in the corresponding unsigned
8696 type and converting the result back to the original type.
8698 If IN_PLACE is true, adjust the stmt in place and return NULL.
8699 Otherwise returns a sequence of statements that replace STMT and also
8700 contain a modified form of STMT itself. */
8702 gimple_seq
8703 rewrite_to_defined_overflow (gimple *stmt, bool in_place /* = false */)
8705 if (dump_file && (dump_flags & TDF_DETAILS))
8707 fprintf (dump_file, "rewriting stmt with undefined signed "
8708 "overflow ");
8709 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
8712 tree lhs = gimple_assign_lhs (stmt);
8713 tree type = unsigned_type_for (TREE_TYPE (lhs));
8714 gimple_seq stmts = NULL;
8715 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
8716 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
8717 else
8718 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
8720 tree op = gimple_op (stmt, i);
8721 op = gimple_convert (&stmts, type, op);
8722 gimple_set_op (stmt, i, op);
8724 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
8725 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
8726 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
8727 gimple_set_modified (stmt, true);
8728 if (in_place)
8730 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
8731 if (stmts)
8732 gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
8733 stmts = NULL;
8735 else
8736 gimple_seq_add_stmt (&stmts, stmt);
8737 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
8738 if (in_place)
8740 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
8741 gsi_insert_after (&gsi, cvt, GSI_SAME_STMT);
8742 update_stmt (stmt);
8744 else
8745 gimple_seq_add_stmt (&stmts, cvt);
8747 return stmts;
8751 /* The valueization hook we use for the gimple_build API simplification.
8752 This makes us match fold_buildN behavior by only combining with
8753 statements in the sequence(s) we are currently building. */
8755 static tree
8756 gimple_build_valueize (tree op)
8758 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
8759 return op;
8760 return NULL_TREE;
8763 /* Helper for gimple_build to perform the final insertion of stmts on SEQ. */
8765 static inline void
8766 gimple_build_insert_seq (gimple_stmt_iterator *gsi,
8767 bool before, gsi_iterator_update update,
8768 gimple_seq seq)
8770 if (before)
8772 if (gsi->bb)
8773 gsi_insert_seq_before (gsi, seq, update);
8774 else
8775 gsi_insert_seq_before_without_update (gsi, seq, update);
8777 else
8779 if (gsi->bb)
8780 gsi_insert_seq_after (gsi, seq, update);
8781 else
8782 gsi_insert_seq_after_without_update (gsi, seq, update);
8786 /* Build the expression CODE OP0 of type TYPE with location LOC,
8787 simplifying it first if possible. Returns the built
8788 expression value and inserts statements possibly defining it
8789 before GSI if BEFORE is true or after GSI if false and advance
8790 the iterator accordingly.
8791 If gsi refers to a basic block simplifying is allowed to look
8792 at all SSA defs while when it does not it is restricted to
8793 SSA defs that are not associated with a basic block yet,
8794 indicating they belong to the currently building sequence. */
8796 tree
8797 gimple_build (gimple_stmt_iterator *gsi,
8798 bool before, gsi_iterator_update update,
8799 location_t loc, enum tree_code code, tree type, tree op0)
8801 gimple_seq seq = NULL;
8802 tree res
8803 = gimple_simplify (code, type, op0, &seq,
8804 gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
8805 if (!res)
8807 res = create_tmp_reg_or_ssa_name (type);
8808 gimple *stmt;
8809 if (code == REALPART_EXPR
8810 || code == IMAGPART_EXPR
8811 || code == VIEW_CONVERT_EXPR)
8812 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
8813 else
8814 stmt = gimple_build_assign (res, code, op0);
8815 gimple_set_location (stmt, loc);
8816 gimple_seq_add_stmt_without_update (&seq, stmt);
8818 gimple_build_insert_seq (gsi, before, update, seq);
8819 return res;
8822 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8823 simplifying it first if possible. Returns the built
8824 expression value inserting any new statements at GSI honoring BEFORE
8825 and UPDATE. */
8827 tree
8828 gimple_build (gimple_stmt_iterator *gsi,
8829 bool before, gsi_iterator_update update,
8830 location_t loc, enum tree_code code, tree type,
8831 tree op0, tree op1)
8833 gimple_seq seq = NULL;
8834 tree res
8835 = gimple_simplify (code, type, op0, op1, &seq,
8836 gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
8837 if (!res)
8839 res = create_tmp_reg_or_ssa_name (type);
8840 gimple *stmt = gimple_build_assign (res, code, op0, op1);
8841 gimple_set_location (stmt, loc);
8842 gimple_seq_add_stmt_without_update (&seq, stmt);
8844 gimple_build_insert_seq (gsi, before, update, seq);
8845 return res;
8848 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8849 simplifying it first if possible. Returns the built
8850 expression value inserting any new statements at GSI honoring BEFORE
8851 and UPDATE. */
8853 tree
8854 gimple_build (gimple_stmt_iterator *gsi,
8855 bool before, gsi_iterator_update update,
8856 location_t loc, enum tree_code code, tree type,
8857 tree op0, tree op1, tree op2)
8860 gimple_seq seq = NULL;
8861 tree res
8862 = gimple_simplify (code, type, op0, op1, op2, &seq,
8863 gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
8864 if (!res)
8866 res = create_tmp_reg_or_ssa_name (type);
8867 gimple *stmt;
8868 if (code == BIT_FIELD_REF)
8869 stmt = gimple_build_assign (res, code,
8870 build3 (code, type, op0, op1, op2));
8871 else
8872 stmt = gimple_build_assign (res, code, op0, op1, op2);
8873 gimple_set_location (stmt, loc);
8874 gimple_seq_add_stmt_without_update (&seq, stmt);
8876 gimple_build_insert_seq (gsi, before, update, seq);
8877 return res;
8880 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8881 void) with a location LOC. Returns the built expression value (or NULL_TREE
8882 if TYPE is void) inserting any new statements at GSI honoring BEFORE
8883 and UPDATE. */
8885 tree
8886 gimple_build (gimple_stmt_iterator *gsi,
8887 bool before, gsi_iterator_update update,
8888 location_t loc, combined_fn fn, tree type)
8890 tree res = NULL_TREE;
8891 gimple_seq seq = NULL;
8892 gcall *stmt;
8893 if (internal_fn_p (fn))
8894 stmt = gimple_build_call_internal (as_internal_fn (fn), 0);
8895 else
8897 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8898 stmt = gimple_build_call (decl, 0);
8900 if (!VOID_TYPE_P (type))
8902 res = create_tmp_reg_or_ssa_name (type);
8903 gimple_call_set_lhs (stmt, res);
8905 gimple_set_location (stmt, loc);
8906 gimple_seq_add_stmt_without_update (&seq, stmt);
8907 gimple_build_insert_seq (gsi, before, update, seq);
8908 return res;
8911 /* Build the call FN (ARG0) with a result of type TYPE
8912 (or no result if TYPE is void) with location LOC,
8913 simplifying it first if possible. Returns the built
8914 expression value (or NULL_TREE if TYPE is void) inserting any new
8915 statements at GSI honoring BEFORE and UPDATE. */
8917 tree
8918 gimple_build (gimple_stmt_iterator *gsi,
8919 bool before, gsi_iterator_update update,
8920 location_t loc, combined_fn fn,
8921 tree type, tree arg0)
8923 gimple_seq seq = NULL;
8924 tree res = gimple_simplify (fn, type, arg0, &seq, gimple_build_valueize);
8925 if (!res)
8927 gcall *stmt;
8928 if (internal_fn_p (fn))
8929 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
8930 else
8932 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8933 stmt = gimple_build_call (decl, 1, arg0);
8935 if (!VOID_TYPE_P (type))
8937 res = create_tmp_reg_or_ssa_name (type);
8938 gimple_call_set_lhs (stmt, res);
8940 gimple_set_location (stmt, loc);
8941 gimple_seq_add_stmt_without_update (&seq, stmt);
8943 gimple_build_insert_seq (gsi, before, update, seq);
8944 return res;
8947 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
8948 (or no result if TYPE is void) with location LOC,
8949 simplifying it first if possible. Returns the built
8950 expression value (or NULL_TREE if TYPE is void) inserting any new
8951 statements at GSI honoring BEFORE and UPDATE. */
8953 tree
8954 gimple_build (gimple_stmt_iterator *gsi,
8955 bool before, gsi_iterator_update update,
8956 location_t loc, combined_fn fn,
8957 tree type, tree arg0, tree arg1)
8959 gimple_seq seq = NULL;
8960 tree res = gimple_simplify (fn, type, arg0, arg1, &seq,
8961 gimple_build_valueize);
8962 if (!res)
8964 gcall *stmt;
8965 if (internal_fn_p (fn))
8966 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
8967 else
8969 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8970 stmt = gimple_build_call (decl, 2, arg0, arg1);
8972 if (!VOID_TYPE_P (type))
8974 res = create_tmp_reg_or_ssa_name (type);
8975 gimple_call_set_lhs (stmt, res);
8977 gimple_set_location (stmt, loc);
8978 gimple_seq_add_stmt_without_update (&seq, stmt);
8980 gimple_build_insert_seq (gsi, before, update, seq);
8981 return res;
8984 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8985 (or no result if TYPE is void) with location LOC,
8986 simplifying it first if possible. Returns the built
8987 expression value (or NULL_TREE if TYPE is void) inserting any new
8988 statements at GSI honoring BEFORE and UPDATE. */
8990 tree
8991 gimple_build (gimple_stmt_iterator *gsi,
8992 bool before, gsi_iterator_update update,
8993 location_t loc, combined_fn fn,
8994 tree type, tree arg0, tree arg1, tree arg2)
8996 gimple_seq seq = NULL;
8997 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
8998 &seq, gimple_build_valueize);
8999 if (!res)
9001 gcall *stmt;
9002 if (internal_fn_p (fn))
9003 stmt = gimple_build_call_internal (as_internal_fn (fn),
9004 3, arg0, arg1, arg2);
9005 else
9007 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
9008 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
9010 if (!VOID_TYPE_P (type))
9012 res = create_tmp_reg_or_ssa_name (type);
9013 gimple_call_set_lhs (stmt, res);
9015 gimple_set_location (stmt, loc);
9016 gimple_seq_add_stmt_without_update (&seq, stmt);
9018 gimple_build_insert_seq (gsi, before, update, seq);
9019 return res;
9022 /* Build CODE (OP0) with a result of type TYPE (or no result if TYPE is
9023 void) with location LOC, simplifying it first if possible. Returns the
9024 built expression value (or NULL_TREE if TYPE is void) inserting any new
9025 statements at GSI honoring BEFORE and UPDATE. */
9027 tree
9028 gimple_build (gimple_stmt_iterator *gsi,
9029 bool before, gsi_iterator_update update,
9030 location_t loc, code_helper code, tree type, tree op0)
9032 if (code.is_tree_code ())
9033 return gimple_build (gsi, before, update, loc, tree_code (code), type, op0);
9034 return gimple_build (gsi, before, update, loc, combined_fn (code), type, op0);
9037 /* Build CODE (OP0, OP1) with a result of type TYPE (or no result if TYPE is
9038 void) with location LOC, simplifying it first if possible. Returns the
9039 built expression value (or NULL_TREE if TYPE is void) inserting any new
9040 statements at GSI honoring BEFORE and UPDATE. */
9042 tree
9043 gimple_build (gimple_stmt_iterator *gsi,
9044 bool before, gsi_iterator_update update,
9045 location_t loc, code_helper code, tree type, tree op0, tree op1)
9047 if (code.is_tree_code ())
9048 return gimple_build (gsi, before, update,
9049 loc, tree_code (code), type, op0, op1);
9050 return gimple_build (gsi, before, update,
9051 loc, combined_fn (code), type, op0, op1);
9054 /* Build CODE (OP0, OP1, OP2) with a result of type TYPE (or no result if TYPE
9055 is void) with location LOC, simplifying it first if possible. Returns the
9056 built expression value (or NULL_TREE if TYPE is void) inserting any new
9057 statements at GSI honoring BEFORE and UPDATE. */
9059 tree
9060 gimple_build (gimple_stmt_iterator *gsi,
9061 bool before, gsi_iterator_update update,
9062 location_t loc, code_helper code,
9063 tree type, tree op0, tree op1, tree op2)
9065 if (code.is_tree_code ())
9066 return gimple_build (gsi, before, update,
9067 loc, tree_code (code), type, op0, op1, op2);
9068 return gimple_build (gsi, before, update,
9069 loc, combined_fn (code), type, op0, op1, op2);
9072 /* Build the conversion (TYPE) OP with a result of type TYPE
9073 with location LOC if such conversion is neccesary in GIMPLE,
9074 simplifying it first.
9075 Returns the built expression inserting any new statements
9076 at GSI honoring BEFORE and UPDATE. */
9078 tree
9079 gimple_convert (gimple_stmt_iterator *gsi,
9080 bool before, gsi_iterator_update update,
9081 location_t loc, tree type, tree op)
9083 if (useless_type_conversion_p (type, TREE_TYPE (op)))
9084 return op;
9085 return gimple_build (gsi, before, update, loc, NOP_EXPR, type, op);
9088 /* Build the conversion (ptrofftype) OP with a result of a type
9089 compatible with ptrofftype with location LOC if such conversion
9090 is neccesary in GIMPLE, simplifying it first.
9091 Returns the built expression value inserting any new statements
9092 at GSI honoring BEFORE and UPDATE. */
9094 tree
9095 gimple_convert_to_ptrofftype (gimple_stmt_iterator *gsi,
9096 bool before, gsi_iterator_update update,
9097 location_t loc, tree op)
9099 if (ptrofftype_p (TREE_TYPE (op)))
9100 return op;
9101 return gimple_convert (gsi, before, update, loc, sizetype, op);
9104 /* Build a vector of type TYPE in which each element has the value OP.
9105 Return a gimple value for the result, inserting any new statements
9106 at GSI honoring BEFORE and UPDATE. */
9108 tree
9109 gimple_build_vector_from_val (gimple_stmt_iterator *gsi,
9110 bool before, gsi_iterator_update update,
9111 location_t loc, tree type, tree op)
9113 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
9114 && !CONSTANT_CLASS_P (op))
9115 return gimple_build (gsi, before, update,
9116 loc, VEC_DUPLICATE_EXPR, type, op);
9118 tree res, vec = build_vector_from_val (type, op);
9119 if (is_gimple_val (vec))
9120 return vec;
9121 if (gimple_in_ssa_p (cfun))
9122 res = make_ssa_name (type);
9123 else
9124 res = create_tmp_reg (type);
9125 gimple_seq seq = NULL;
9126 gimple *stmt = gimple_build_assign (res, vec);
9127 gimple_set_location (stmt, loc);
9128 gimple_seq_add_stmt_without_update (&seq, stmt);
9129 gimple_build_insert_seq (gsi, before, update, seq);
9130 return res;
9133 /* Build a vector from BUILDER, handling the case in which some elements
9134 are non-constant. Return a gimple value for the result, inserting
9135 any new instructions to GSI honoring BEFORE and UPDATE.
9137 BUILDER must not have a stepped encoding on entry. This is because
9138 the function is not geared up to handle the arithmetic that would
9139 be needed in the variable case, and any code building a vector that
9140 is known to be constant should use BUILDER->build () directly. */
9142 tree
9143 gimple_build_vector (gimple_stmt_iterator *gsi,
9144 bool before, gsi_iterator_update update,
9145 location_t loc, tree_vector_builder *builder)
9147 gcc_assert (builder->nelts_per_pattern () <= 2);
9148 unsigned int encoded_nelts = builder->encoded_nelts ();
9149 for (unsigned int i = 0; i < encoded_nelts; ++i)
9150 if (!CONSTANT_CLASS_P ((*builder)[i]))
9152 gimple_seq seq = NULL;
9153 tree type = builder->type ();
9154 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
9155 vec<constructor_elt, va_gc> *v;
9156 vec_alloc (v, nelts);
9157 for (i = 0; i < nelts; ++i)
9158 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
9160 tree res;
9161 if (gimple_in_ssa_p (cfun))
9162 res = make_ssa_name (type);
9163 else
9164 res = create_tmp_reg (type);
9165 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
9166 gimple_set_location (stmt, loc);
9167 gimple_seq_add_stmt_without_update (&seq, stmt);
9168 gimple_build_insert_seq (gsi, before, update, seq);
9169 return res;
9171 return builder->build ();
9174 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
9175 and generate a value guaranteed to be rounded upwards to ALIGN.
9177 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
9179 tree
9180 gimple_build_round_up (gimple_stmt_iterator *gsi,
9181 bool before, gsi_iterator_update update,
9182 location_t loc, tree type,
9183 tree old_size, unsigned HOST_WIDE_INT align)
9185 unsigned HOST_WIDE_INT tg_mask = align - 1;
9186 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
9187 gcc_assert (INTEGRAL_TYPE_P (type));
9188 tree tree_mask = build_int_cst (type, tg_mask);
9189 tree oversize = gimple_build (gsi, before, update,
9190 loc, PLUS_EXPR, type, old_size, tree_mask);
9192 tree mask = build_int_cst (type, -align);
9193 return gimple_build (gsi, before, update,
9194 loc, BIT_AND_EXPR, type, oversize, mask);
9197 /* Return true if the result of assignment STMT is known to be non-negative.
9198 If the return value is based on the assumption that signed overflow is
9199 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9200 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9202 static bool
9203 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9204 int depth)
9206 enum tree_code code = gimple_assign_rhs_code (stmt);
9207 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
9208 switch (get_gimple_rhs_class (code))
9210 case GIMPLE_UNARY_RHS:
9211 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
9212 type,
9213 gimple_assign_rhs1 (stmt),
9214 strict_overflow_p, depth);
9215 case GIMPLE_BINARY_RHS:
9216 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
9217 type,
9218 gimple_assign_rhs1 (stmt),
9219 gimple_assign_rhs2 (stmt),
9220 strict_overflow_p, depth);
9221 case GIMPLE_TERNARY_RHS:
9222 return false;
9223 case GIMPLE_SINGLE_RHS:
9224 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
9225 strict_overflow_p, depth);
9226 case GIMPLE_INVALID_RHS:
9227 break;
9229 gcc_unreachable ();
9232 /* Return true if return value of call STMT is known to be non-negative.
9233 If the return value is based on the assumption that signed overflow is
9234 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9235 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9237 static bool
9238 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9239 int depth)
9241 tree arg0 = gimple_call_num_args (stmt) > 0 ?
9242 gimple_call_arg (stmt, 0) : NULL_TREE;
9243 tree arg1 = gimple_call_num_args (stmt) > 1 ?
9244 gimple_call_arg (stmt, 1) : NULL_TREE;
9245 tree lhs = gimple_call_lhs (stmt);
9246 return (lhs
9247 && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs),
9248 gimple_call_combined_fn (stmt),
9249 arg0, arg1,
9250 strict_overflow_p, depth));
9253 /* Return true if return value of call STMT is known to be non-negative.
9254 If the return value is based on the assumption that signed overflow is
9255 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9256 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9258 static bool
9259 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9260 int depth)
9262 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9264 tree arg = gimple_phi_arg_def (stmt, i);
9265 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
9266 return false;
9268 return true;
9271 /* Return true if STMT is known to compute a non-negative value.
9272 If the return value is based on the assumption that signed overflow is
9273 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9274 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9276 bool
9277 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9278 int depth)
9280 tree type = gimple_range_type (stmt);
9281 if (type && frange::supports_p (type))
9283 frange r;
9284 bool sign;
9285 if (get_global_range_query ()->range_of_stmt (r, stmt)
9286 && r.signbit_p (sign))
9287 return !sign;
9289 switch (gimple_code (stmt))
9291 case GIMPLE_ASSIGN:
9292 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
9293 depth);
9294 case GIMPLE_CALL:
9295 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
9296 depth);
9297 case GIMPLE_PHI:
9298 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
9299 depth);
9300 default:
9301 return false;
9305 /* Return true if the floating-point value computed by assignment STMT
9306 is known to have an integer value. We also allow +Inf, -Inf and NaN
9307 to be considered integer values. Return false for signaling NaN.
9309 DEPTH is the current nesting depth of the query. */
9311 static bool
9312 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
9314 enum tree_code code = gimple_assign_rhs_code (stmt);
9315 switch (get_gimple_rhs_class (code))
9317 case GIMPLE_UNARY_RHS:
9318 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
9319 gimple_assign_rhs1 (stmt), depth);
9320 case GIMPLE_BINARY_RHS:
9321 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
9322 gimple_assign_rhs1 (stmt),
9323 gimple_assign_rhs2 (stmt), depth);
9324 case GIMPLE_TERNARY_RHS:
9325 return false;
9326 case GIMPLE_SINGLE_RHS:
9327 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
9328 case GIMPLE_INVALID_RHS:
9329 break;
9331 gcc_unreachable ();
9334 /* Return true if the floating-point value computed by call STMT is known
9335 to have an integer value. We also allow +Inf, -Inf and NaN to be
9336 considered integer values. Return false for signaling NaN.
9338 DEPTH is the current nesting depth of the query. */
9340 static bool
9341 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
9343 tree arg0 = (gimple_call_num_args (stmt) > 0
9344 ? gimple_call_arg (stmt, 0)
9345 : NULL_TREE);
9346 tree arg1 = (gimple_call_num_args (stmt) > 1
9347 ? gimple_call_arg (stmt, 1)
9348 : NULL_TREE);
9349 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
9350 arg0, arg1, depth);
9353 /* Return true if the floating-point result of phi STMT is known to have
9354 an integer value. We also allow +Inf, -Inf and NaN to be considered
9355 integer values. Return false for signaling NaN.
9357 DEPTH is the current nesting depth of the query. */
9359 static bool
9360 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
9362 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9364 tree arg = gimple_phi_arg_def (stmt, i);
9365 if (!integer_valued_real_single_p (arg, depth + 1))
9366 return false;
9368 return true;
9371 /* Return true if the floating-point value computed by STMT is known
9372 to have an integer value. We also allow +Inf, -Inf and NaN to be
9373 considered integer values. Return false for signaling NaN.
9375 DEPTH is the current nesting depth of the query. */
9377 bool
9378 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
9380 switch (gimple_code (stmt))
9382 case GIMPLE_ASSIGN:
9383 return gimple_assign_integer_valued_real_p (stmt, depth);
9384 case GIMPLE_CALL:
9385 return gimple_call_integer_valued_real_p (stmt, depth);
9386 case GIMPLE_PHI:
9387 return gimple_phi_integer_valued_real_p (stmt, depth);
9388 default:
9389 return false;