c, c++: attribute format on a ctor with a vbase [PR101833, PR47634]
[official-gcc.git] / gcc / gimple-fold.cc
blob7baec119ba35cc95cca5e05e591bb390769dc403
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2022 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.cc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-access.h"
34 #include "gimple-ssa-warn-restrict.h"
35 #include "fold-const.h"
36 #include "stmt.h"
37 #include "expr.h"
38 #include "stor-layout.h"
39 #include "dumpfile.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "tree-into-ssa.h"
44 #include "tree-dfa.h"
45 #include "tree-object-size.h"
46 #include "tree-ssa.h"
47 #include "tree-ssa-propagate.h"
48 #include "ipa-utils.h"
49 #include "tree-ssa-address.h"
50 #include "langhooks.h"
51 #include "gimplify-me.h"
52 #include "dbgcnt.h"
53 #include "builtins.h"
54 #include "tree-eh.h"
55 #include "gimple-match.h"
56 #include "gomp-constants.h"
57 #include "optabs-query.h"
58 #include "omp-general.h"
59 #include "tree-cfg.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "asan.h"
64 #include "diagnostic-core.h"
65 #include "intl.h"
66 #include "calls.h"
67 #include "tree-vector-builder.h"
68 #include "tree-ssa-strlen.h"
69 #include "varasm.h"
70 #include "internal-fn.h"
72 enum strlen_range_kind {
73 /* Compute the exact constant string length. */
74 SRK_STRLEN,
75 /* Compute the maximum constant string length. */
76 SRK_STRLENMAX,
77 /* Compute a range of string lengths bounded by object sizes. When
78 the length of a string cannot be determined, consider as the upper
79 bound the size of the enclosing object the string may be a member
80 or element of. Also determine the size of the largest character
81 array the string may refer to. */
82 SRK_LENRANGE,
83 /* Determine the integer value of the argument (not string length). */
84 SRK_INT_VALUE
87 static bool
88 get_range_strlen (tree, bitmap, strlen_range_kind, c_strlen_data *, unsigned);
90 /* Return true when DECL can be referenced from current unit.
91 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
92 We can get declarations that are not possible to reference for various
93 reasons:
95 1) When analyzing C++ virtual tables.
96 C++ virtual tables do have known constructors even
97 when they are keyed to other compilation unit.
98 Those tables can contain pointers to methods and vars
99 in other units. Those methods have both STATIC and EXTERNAL
100 set.
101 2) In WHOPR mode devirtualization might lead to reference
102 to method that was partitioned elsehwere.
103 In this case we have static VAR_DECL or FUNCTION_DECL
104 that has no corresponding callgraph/varpool node
105 declaring the body.
106 3) COMDAT functions referred by external vtables that
107 we devirtualize only during final compilation stage.
108 At this time we already decided that we will not output
109 the function body and thus we can't reference the symbol
110 directly. */
112 static bool
113 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
115 varpool_node *vnode;
116 struct cgraph_node *node;
117 symtab_node *snode;
119 if (DECL_ABSTRACT_P (decl))
120 return false;
122 /* We are concerned only about static/external vars and functions. */
123 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
124 || !VAR_OR_FUNCTION_DECL_P (decl))
125 return true;
127 /* Static objects can be referred only if they are defined and not optimized
128 out yet. */
129 if (!TREE_PUBLIC (decl))
131 if (DECL_EXTERNAL (decl))
132 return false;
133 /* Before we start optimizing unreachable code we can be sure all
134 static objects are defined. */
135 if (symtab->function_flags_ready)
136 return true;
137 snode = symtab_node::get (decl);
138 if (!snode || !snode->definition)
139 return false;
140 node = dyn_cast <cgraph_node *> (snode);
141 return !node || !node->inlined_to;
144 /* We will later output the initializer, so we can refer to it.
145 So we are concerned only when DECL comes from initializer of
146 external var or var that has been optimized out. */
147 if (!from_decl
148 || !VAR_P (from_decl)
149 || (!DECL_EXTERNAL (from_decl)
150 && (vnode = varpool_node::get (from_decl)) != NULL
151 && vnode->definition)
152 || (flag_ltrans
153 && (vnode = varpool_node::get (from_decl)) != NULL
154 && vnode->in_other_partition))
155 return true;
156 /* We are folding reference from external vtable. The vtable may reffer
157 to a symbol keyed to other compilation unit. The other compilation
158 unit may be in separate DSO and the symbol may be hidden. */
159 if (DECL_VISIBILITY_SPECIFIED (decl)
160 && DECL_EXTERNAL (decl)
161 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
162 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
163 return false;
164 /* When function is public, we always can introduce new reference.
165 Exception are the COMDAT functions where introducing a direct
166 reference imply need to include function body in the curren tunit. */
167 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
168 return true;
169 /* We have COMDAT. We are going to check if we still have definition
170 or if the definition is going to be output in other partition.
171 Bypass this when gimplifying; all needed functions will be produced.
173 As observed in PR20991 for already optimized out comdat virtual functions
174 it may be tempting to not necessarily give up because the copy will be
175 output elsewhere when corresponding vtable is output.
176 This is however not possible - ABI specify that COMDATs are output in
177 units where they are used and when the other unit was compiled with LTO
178 it is possible that vtable was kept public while the function itself
179 was privatized. */
180 if (!symtab->function_flags_ready)
181 return true;
183 snode = symtab_node::get (decl);
184 if (!snode
185 || ((!snode->definition || DECL_EXTERNAL (decl))
186 && (!snode->in_other_partition
187 || (!snode->forced_by_abi && !snode->force_output))))
188 return false;
189 node = dyn_cast <cgraph_node *> (snode);
190 return !node || !node->inlined_to;
193 /* Create a temporary for TYPE for a statement STMT. If the current function
194 is in SSA form, a SSA name is created. Otherwise a temporary register
195 is made. */
197 tree
198 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
200 if (gimple_in_ssa_p (cfun))
201 return make_ssa_name (type, stmt);
202 else
203 return create_tmp_reg (type);
206 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
207 acceptable form for is_gimple_min_invariant.
208 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
210 tree
211 canonicalize_constructor_val (tree cval, tree from_decl)
213 if (CONSTANT_CLASS_P (cval))
214 return cval;
216 tree orig_cval = cval;
217 STRIP_NOPS (cval);
218 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
219 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
221 tree ptr = TREE_OPERAND (cval, 0);
222 if (is_gimple_min_invariant (ptr))
223 cval = build1_loc (EXPR_LOCATION (cval),
224 ADDR_EXPR, TREE_TYPE (ptr),
225 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
226 ptr,
227 fold_convert (ptr_type_node,
228 TREE_OPERAND (cval, 1))));
230 if (TREE_CODE (cval) == ADDR_EXPR)
232 tree base = NULL_TREE;
233 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
235 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
236 if (base)
237 TREE_OPERAND (cval, 0) = base;
239 else
240 base = get_base_address (TREE_OPERAND (cval, 0));
241 if (!base)
242 return NULL_TREE;
244 if (VAR_OR_FUNCTION_DECL_P (base)
245 && !can_refer_decl_in_current_unit_p (base, from_decl))
246 return NULL_TREE;
247 if (TREE_TYPE (base) == error_mark_node)
248 return NULL_TREE;
249 if (VAR_P (base))
250 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
251 but since the use can be in a debug stmt we can't. */
253 else if (TREE_CODE (base) == FUNCTION_DECL)
255 /* Make sure we create a cgraph node for functions we'll reference.
256 They can be non-existent if the reference comes from an entry
257 of an external vtable for example. */
258 cgraph_node::get_create (base);
260 /* Fixup types in global initializers. */
261 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
262 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
264 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
265 cval = fold_convert (TREE_TYPE (orig_cval), cval);
266 return cval;
268 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
269 if (TREE_CODE (cval) == INTEGER_CST)
271 if (TREE_OVERFLOW_P (cval))
272 cval = drop_tree_overflow (cval);
273 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
274 cval = fold_convert (TREE_TYPE (orig_cval), cval);
275 return cval;
277 return orig_cval;
280 /* If SYM is a constant variable with known value, return the value.
281 NULL_TREE is returned otherwise. */
283 tree
284 get_symbol_constant_value (tree sym)
286 tree val = ctor_for_folding (sym);
287 if (val != error_mark_node)
289 if (val)
291 val = canonicalize_constructor_val (unshare_expr (val), sym);
292 if (val
293 && is_gimple_min_invariant (val)
294 && useless_type_conversion_p (TREE_TYPE (sym), TREE_TYPE (val)))
295 return val;
296 else
297 return NULL_TREE;
299 /* Variables declared 'const' without an initializer
300 have zero as the initializer if they may not be
301 overridden at link or run time. */
302 if (!val
303 && is_gimple_reg_type (TREE_TYPE (sym)))
304 return build_zero_cst (TREE_TYPE (sym));
307 return NULL_TREE;
312 /* Subroutine of fold_stmt. We perform constant folding of the
313 memory reference tree EXPR. */
315 static tree
316 maybe_fold_reference (tree expr)
318 tree result = NULL_TREE;
320 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
321 || TREE_CODE (expr) == REALPART_EXPR
322 || TREE_CODE (expr) == IMAGPART_EXPR)
323 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
324 result = fold_unary_loc (EXPR_LOCATION (expr),
325 TREE_CODE (expr),
326 TREE_TYPE (expr),
327 TREE_OPERAND (expr, 0));
328 else if (TREE_CODE (expr) == BIT_FIELD_REF
329 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
330 result = fold_ternary_loc (EXPR_LOCATION (expr),
331 TREE_CODE (expr),
332 TREE_TYPE (expr),
333 TREE_OPERAND (expr, 0),
334 TREE_OPERAND (expr, 1),
335 TREE_OPERAND (expr, 2));
336 else
337 result = fold_const_aggregate_ref (expr);
339 if (result && is_gimple_min_invariant (result))
340 return result;
342 return NULL_TREE;
345 /* Return true if EXPR is an acceptable right-hand-side for a
346 GIMPLE assignment. We validate the entire tree, not just
347 the root node, thus catching expressions that embed complex
348 operands that are not permitted in GIMPLE. This function
349 is needed because the folding routines in fold-const.cc
350 may return such expressions in some cases, e.g., an array
351 access with an embedded index addition. It may make more
352 sense to have folding routines that are sensitive to the
353 constraints on GIMPLE operands, rather than abandoning any
354 any attempt to fold if the usual folding turns out to be too
355 aggressive. */
357 bool
358 valid_gimple_rhs_p (tree expr)
360 enum tree_code code = TREE_CODE (expr);
362 switch (TREE_CODE_CLASS (code))
364 case tcc_declaration:
365 if (!is_gimple_variable (expr))
366 return false;
367 break;
369 case tcc_constant:
370 /* All constants are ok. */
371 break;
373 case tcc_comparison:
374 /* GENERIC allows comparisons with non-boolean types, reject
375 those for GIMPLE. Let vector-typed comparisons pass - rules
376 for GENERIC and GIMPLE are the same here. */
377 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
378 && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
379 || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
380 && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
381 return false;
383 /* Fallthru. */
384 case tcc_binary:
385 if (!is_gimple_val (TREE_OPERAND (expr, 0))
386 || !is_gimple_val (TREE_OPERAND (expr, 1)))
387 return false;
388 break;
390 case tcc_unary:
391 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
392 return false;
393 break;
395 case tcc_expression:
396 switch (code)
398 case ADDR_EXPR:
400 tree t;
401 if (is_gimple_min_invariant (expr))
402 return true;
403 t = TREE_OPERAND (expr, 0);
404 while (handled_component_p (t))
406 /* ??? More checks needed, see the GIMPLE verifier. */
407 if ((TREE_CODE (t) == ARRAY_REF
408 || TREE_CODE (t) == ARRAY_RANGE_REF)
409 && !is_gimple_val (TREE_OPERAND (t, 1)))
410 return false;
411 t = TREE_OPERAND (t, 0);
413 if (!is_gimple_id (t))
414 return false;
416 break;
418 default:
419 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
421 if ((code == COND_EXPR
422 ? !is_gimple_condexpr (TREE_OPERAND (expr, 0))
423 : !is_gimple_val (TREE_OPERAND (expr, 0)))
424 || !is_gimple_val (TREE_OPERAND (expr, 1))
425 || !is_gimple_val (TREE_OPERAND (expr, 2)))
426 return false;
427 break;
429 return false;
431 break;
433 case tcc_vl_exp:
434 return false;
436 case tcc_exceptional:
437 if (code == CONSTRUCTOR)
439 unsigned i;
440 tree elt;
441 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
442 if (!is_gimple_val (elt))
443 return false;
444 return true;
446 if (code != SSA_NAME)
447 return false;
448 break;
450 case tcc_reference:
451 if (code == BIT_FIELD_REF)
452 return is_gimple_val (TREE_OPERAND (expr, 0));
453 return false;
455 default:
456 return false;
459 return true;
463 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
464 replacement rhs for the statement or NULL_TREE if no simplification
465 could be made. It is assumed that the operands have been previously
466 folded. */
468 static tree
469 fold_gimple_assign (gimple_stmt_iterator *si)
471 gimple *stmt = gsi_stmt (*si);
472 enum tree_code subcode = gimple_assign_rhs_code (stmt);
473 location_t loc = gimple_location (stmt);
475 tree result = NULL_TREE;
477 switch (get_gimple_rhs_class (subcode))
479 case GIMPLE_SINGLE_RHS:
481 tree rhs = gimple_assign_rhs1 (stmt);
483 if (TREE_CLOBBER_P (rhs))
484 return NULL_TREE;
486 if (REFERENCE_CLASS_P (rhs))
487 return maybe_fold_reference (rhs);
489 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
491 tree val = OBJ_TYPE_REF_EXPR (rhs);
492 if (is_gimple_min_invariant (val))
493 return val;
494 else if (flag_devirtualize && virtual_method_call_p (rhs))
496 bool final;
497 vec <cgraph_node *>targets
498 = possible_polymorphic_call_targets (rhs, stmt, &final);
499 if (final && targets.length () <= 1 && dbg_cnt (devirt))
501 if (dump_enabled_p ())
503 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
504 "resolving virtual function address "
505 "reference to function %s\n",
506 targets.length () == 1
507 ? targets[0]->name ()
508 : "NULL");
510 if (targets.length () == 1)
512 val = fold_convert (TREE_TYPE (val),
513 build_fold_addr_expr_loc
514 (loc, targets[0]->decl));
515 STRIP_USELESS_TYPE_CONVERSION (val);
517 else
518 /* We cannot use __builtin_unreachable here because it
519 cannot have address taken. */
520 val = build_int_cst (TREE_TYPE (val), 0);
521 return val;
526 else if (TREE_CODE (rhs) == ADDR_EXPR)
528 tree ref = TREE_OPERAND (rhs, 0);
529 if (TREE_CODE (ref) == MEM_REF
530 && integer_zerop (TREE_OPERAND (ref, 1)))
532 result = TREE_OPERAND (ref, 0);
533 if (!useless_type_conversion_p (TREE_TYPE (rhs),
534 TREE_TYPE (result)))
535 result = build1 (NOP_EXPR, TREE_TYPE (rhs), result);
536 return result;
540 else if (TREE_CODE (rhs) == CONSTRUCTOR
541 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
543 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
544 unsigned i;
545 tree val;
547 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
548 if (! CONSTANT_CLASS_P (val))
549 return NULL_TREE;
551 return build_vector_from_ctor (TREE_TYPE (rhs),
552 CONSTRUCTOR_ELTS (rhs));
555 else if (DECL_P (rhs)
556 && is_gimple_reg_type (TREE_TYPE (rhs)))
557 return get_symbol_constant_value (rhs);
559 break;
561 case GIMPLE_UNARY_RHS:
562 break;
564 case GIMPLE_BINARY_RHS:
565 break;
567 case GIMPLE_TERNARY_RHS:
568 result = fold_ternary_loc (loc, subcode,
569 TREE_TYPE (gimple_assign_lhs (stmt)),
570 gimple_assign_rhs1 (stmt),
571 gimple_assign_rhs2 (stmt),
572 gimple_assign_rhs3 (stmt));
574 if (result)
576 STRIP_USELESS_TYPE_CONVERSION (result);
577 if (valid_gimple_rhs_p (result))
578 return result;
580 break;
582 case GIMPLE_INVALID_RHS:
583 gcc_unreachable ();
586 return NULL_TREE;
590 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
591 adjusting the replacement stmts location and virtual operands.
592 If the statement has a lhs the last stmt in the sequence is expected
593 to assign to that lhs. */
595 static void
596 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
598 gimple *stmt = gsi_stmt (*si_p);
600 if (gimple_has_location (stmt))
601 annotate_all_with_location (stmts, gimple_location (stmt));
603 /* First iterate over the replacement statements backward, assigning
604 virtual operands to their defining statements. */
605 gimple *laststore = NULL;
606 for (gimple_stmt_iterator i = gsi_last (stmts);
607 !gsi_end_p (i); gsi_prev (&i))
609 gimple *new_stmt = gsi_stmt (i);
610 if ((gimple_assign_single_p (new_stmt)
611 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
612 || (is_gimple_call (new_stmt)
613 && (gimple_call_flags (new_stmt)
614 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
616 tree vdef;
617 if (!laststore)
618 vdef = gimple_vdef (stmt);
619 else
620 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
621 gimple_set_vdef (new_stmt, vdef);
622 if (vdef && TREE_CODE (vdef) == SSA_NAME)
623 SSA_NAME_DEF_STMT (vdef) = new_stmt;
624 laststore = new_stmt;
628 /* Second iterate over the statements forward, assigning virtual
629 operands to their uses. */
630 tree reaching_vuse = gimple_vuse (stmt);
631 for (gimple_stmt_iterator i = gsi_start (stmts);
632 !gsi_end_p (i); gsi_next (&i))
634 gimple *new_stmt = gsi_stmt (i);
635 /* If the new statement possibly has a VUSE, update it with exact SSA
636 name we know will reach this one. */
637 if (gimple_has_mem_ops (new_stmt))
638 gimple_set_vuse (new_stmt, reaching_vuse);
639 gimple_set_modified (new_stmt, true);
640 if (gimple_vdef (new_stmt))
641 reaching_vuse = gimple_vdef (new_stmt);
644 /* If the new sequence does not do a store release the virtual
645 definition of the original statement. */
646 if (reaching_vuse
647 && reaching_vuse == gimple_vuse (stmt))
649 tree vdef = gimple_vdef (stmt);
650 if (vdef
651 && TREE_CODE (vdef) == SSA_NAME)
653 unlink_stmt_vdef (stmt);
654 release_ssa_name (vdef);
658 /* Finally replace the original statement with the sequence. */
659 gsi_replace_with_seq (si_p, stmts, false);
662 /* Helper function for update_gimple_call and
663 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
664 with GIMPLE_CALL NEW_STMT. */
666 static void
667 finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
668 gimple *stmt)
670 tree lhs = gimple_call_lhs (stmt);
671 gimple_call_set_lhs (new_stmt, lhs);
672 if (lhs && TREE_CODE (lhs) == SSA_NAME)
673 SSA_NAME_DEF_STMT (lhs) = new_stmt;
674 gimple_move_vops (new_stmt, stmt);
675 gimple_set_location (new_stmt, gimple_location (stmt));
676 if (gimple_block (new_stmt) == NULL_TREE)
677 gimple_set_block (new_stmt, gimple_block (stmt));
678 gsi_replace (si_p, new_stmt, false);
681 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
682 with number of arguments NARGS, where the arguments in GIMPLE form
683 follow NARGS argument. */
685 bool
686 update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
688 va_list ap;
689 gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
691 gcc_assert (is_gimple_call (stmt));
692 va_start (ap, nargs);
693 new_stmt = gimple_build_call_valist (fn, nargs, ap);
694 finish_update_gimple_call (si_p, new_stmt, stmt);
695 va_end (ap);
696 return true;
699 /* Return true if EXPR is a CALL_EXPR suitable for representation
700 as a single GIMPLE_CALL statement. If the arguments require
701 further gimplification, return false. */
703 static bool
704 valid_gimple_call_p (tree expr)
706 unsigned i, nargs;
708 if (TREE_CODE (expr) != CALL_EXPR)
709 return false;
711 nargs = call_expr_nargs (expr);
712 for (i = 0; i < nargs; i++)
714 tree arg = CALL_EXPR_ARG (expr, i);
715 if (is_gimple_reg_type (TREE_TYPE (arg)))
717 if (!is_gimple_val (arg))
718 return false;
720 else
721 if (!is_gimple_lvalue (arg))
722 return false;
725 return true;
728 /* Convert EXPR into a GIMPLE value suitable for substitution on the
729 RHS of an assignment. Insert the necessary statements before
730 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
731 is replaced. If the call is expected to produces a result, then it
732 is replaced by an assignment of the new RHS to the result variable.
733 If the result is to be ignored, then the call is replaced by a
734 GIMPLE_NOP. A proper VDEF chain is retained by making the first
735 VUSE and the last VDEF of the whole sequence be the same as the replaced
736 statement and using new SSA names for stores in between. */
738 void
739 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
741 tree lhs;
742 gimple *stmt, *new_stmt;
743 gimple_stmt_iterator i;
744 gimple_seq stmts = NULL;
746 stmt = gsi_stmt (*si_p);
748 gcc_assert (is_gimple_call (stmt));
750 if (valid_gimple_call_p (expr))
752 /* The call has simplified to another call. */
753 tree fn = CALL_EXPR_FN (expr);
754 unsigned i;
755 unsigned nargs = call_expr_nargs (expr);
756 vec<tree> args = vNULL;
757 gcall *new_stmt;
759 if (nargs > 0)
761 args.create (nargs);
762 args.safe_grow_cleared (nargs, true);
764 for (i = 0; i < nargs; i++)
765 args[i] = CALL_EXPR_ARG (expr, i);
768 new_stmt = gimple_build_call_vec (fn, args);
769 finish_update_gimple_call (si_p, new_stmt, stmt);
770 args.release ();
771 return;
774 lhs = gimple_call_lhs (stmt);
775 if (lhs == NULL_TREE)
777 push_gimplify_context (gimple_in_ssa_p (cfun));
778 gimplify_and_add (expr, &stmts);
779 pop_gimplify_context (NULL);
781 /* We can end up with folding a memcpy of an empty class assignment
782 which gets optimized away by C++ gimplification. */
783 if (gimple_seq_empty_p (stmts))
785 if (gimple_in_ssa_p (cfun))
787 unlink_stmt_vdef (stmt);
788 release_defs (stmt);
790 gsi_replace (si_p, gimple_build_nop (), false);
791 return;
794 else
796 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
797 new_stmt = gimple_build_assign (lhs, tmp);
798 i = gsi_last (stmts);
799 gsi_insert_after_without_update (&i, new_stmt,
800 GSI_CONTINUE_LINKING);
803 gsi_replace_with_seq_vops (si_p, stmts);
807 /* Replace the call at *GSI with the gimple value VAL. */
809 void
810 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
812 gimple *stmt = gsi_stmt (*gsi);
813 tree lhs = gimple_call_lhs (stmt);
814 gimple *repl;
815 if (lhs)
817 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
818 val = fold_convert (TREE_TYPE (lhs), val);
819 repl = gimple_build_assign (lhs, val);
821 else
822 repl = gimple_build_nop ();
823 tree vdef = gimple_vdef (stmt);
824 if (vdef && TREE_CODE (vdef) == SSA_NAME)
826 unlink_stmt_vdef (stmt);
827 release_ssa_name (vdef);
829 gsi_replace (gsi, repl, false);
832 /* Replace the call at *GSI with the new call REPL and fold that
833 again. */
835 static void
836 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
838 gimple *stmt = gsi_stmt (*gsi);
839 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
840 gimple_set_location (repl, gimple_location (stmt));
841 gimple_move_vops (repl, stmt);
842 gsi_replace (gsi, repl, false);
843 fold_stmt (gsi);
846 /* Return true if VAR is a VAR_DECL or a component thereof. */
848 static bool
849 var_decl_component_p (tree var)
851 tree inner = var;
852 while (handled_component_p (inner))
853 inner = TREE_OPERAND (inner, 0);
854 return (DECL_P (inner)
855 || (TREE_CODE (inner) == MEM_REF
856 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
859 /* Return TRUE if the SIZE argument, representing the size of an
860 object, is in a range of values of which exactly zero is valid. */
862 static bool
863 size_must_be_zero_p (tree size)
865 if (integer_zerop (size))
866 return true;
868 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
869 return false;
871 tree type = TREE_TYPE (size);
872 int prec = TYPE_PRECISION (type);
874 /* Compute the value of SSIZE_MAX, the largest positive value that
875 can be stored in ssize_t, the signed counterpart of size_t. */
876 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
877 value_range valid_range (build_int_cst (type, 0),
878 wide_int_to_tree (type, ssize_max));
879 value_range vr;
880 if (cfun)
881 get_range_query (cfun)->range_of_expr (vr, size);
882 else
883 get_global_range_query ()->range_of_expr (vr, size);
884 if (vr.undefined_p ())
885 vr.set_varying (TREE_TYPE (size));
886 vr.intersect (valid_range);
887 return vr.zero_p ();
890 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
891 diagnose (otherwise undefined) overlapping copies without preventing
892 folding. When folded, GCC guarantees that overlapping memcpy has
893 the same semantics as memmove. Call to the library memcpy need not
894 provide the same guarantee. Return false if no simplification can
895 be made. */
897 static bool
898 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
899 tree dest, tree src, enum built_in_function code)
901 gimple *stmt = gsi_stmt (*gsi);
902 tree lhs = gimple_call_lhs (stmt);
903 tree len = gimple_call_arg (stmt, 2);
904 location_t loc = gimple_location (stmt);
906 /* If the LEN parameter is a constant zero or in range where
907 the only valid value is zero, return DEST. */
908 if (size_must_be_zero_p (len))
910 gimple *repl;
911 if (gimple_call_lhs (stmt))
912 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
913 else
914 repl = gimple_build_nop ();
915 tree vdef = gimple_vdef (stmt);
916 if (vdef && TREE_CODE (vdef) == SSA_NAME)
918 unlink_stmt_vdef (stmt);
919 release_ssa_name (vdef);
921 gsi_replace (gsi, repl, false);
922 return true;
925 /* If SRC and DEST are the same (and not volatile), return
926 DEST{,+LEN,+LEN-1}. */
927 if (operand_equal_p (src, dest, 0))
929 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
930 It's safe and may even be emitted by GCC itself (see bug
931 32667). */
932 unlink_stmt_vdef (stmt);
933 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
934 release_ssa_name (gimple_vdef (stmt));
935 if (!lhs)
937 gsi_replace (gsi, gimple_build_nop (), false);
938 return true;
940 goto done;
942 else
944 /* We cannot (easily) change the type of the copy if it is a storage
945 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
946 modify the storage order of objects (see storage_order_barrier_p). */
947 tree srctype
948 = POINTER_TYPE_P (TREE_TYPE (src))
949 ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
950 tree desttype
951 = POINTER_TYPE_P (TREE_TYPE (dest))
952 ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
953 tree destvar, srcvar, srcoff;
954 unsigned int src_align, dest_align;
955 unsigned HOST_WIDE_INT tmp_len;
956 const char *tmp_str;
958 /* Build accesses at offset zero with a ref-all character type. */
959 tree off0
960 = build_int_cst (build_pointer_type_for_mode (char_type_node,
961 ptr_mode, true), 0);
963 /* If we can perform the copy efficiently with first doing all loads
964 and then all stores inline it that way. Currently efficiently
965 means that we can load all the memory into a single integer
966 register which is what MOVE_MAX gives us. */
967 src_align = get_pointer_alignment (src);
968 dest_align = get_pointer_alignment (dest);
969 if (tree_fits_uhwi_p (len)
970 && compare_tree_int (len, MOVE_MAX) <= 0
971 /* FIXME: Don't transform copies from strings with known length.
972 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
973 from being handled, and the case was XFAILed for that reason.
974 Now that it is handled and the XFAIL removed, as soon as other
975 strlenopt tests that rely on it for passing are adjusted, this
976 hack can be removed. */
977 && !c_strlen (src, 1)
978 && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
979 && memchr (tmp_str, 0, tmp_len) == NULL)
980 && !(srctype
981 && AGGREGATE_TYPE_P (srctype)
982 && TYPE_REVERSE_STORAGE_ORDER (srctype))
983 && !(desttype
984 && AGGREGATE_TYPE_P (desttype)
985 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
987 unsigned ilen = tree_to_uhwi (len);
988 if (pow2p_hwi (ilen))
990 /* Detect out-of-bounds accesses without issuing warnings.
991 Avoid folding out-of-bounds copies but to avoid false
992 positives for unreachable code defer warning until after
993 DCE has worked its magic.
994 -Wrestrict is still diagnosed. */
995 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
996 dest, src, len, len,
997 false, false))
998 if (warning != OPT_Wrestrict)
999 return false;
1001 scalar_int_mode mode;
1002 if (int_mode_for_size (ilen * 8, 0).exists (&mode)
1003 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
1004 /* If the destination pointer is not aligned we must be able
1005 to emit an unaligned store. */
1006 && (dest_align >= GET_MODE_ALIGNMENT (mode)
1007 || !targetm.slow_unaligned_access (mode, dest_align)
1008 || (optab_handler (movmisalign_optab, mode)
1009 != CODE_FOR_nothing)))
1011 tree type = build_nonstandard_integer_type (ilen * 8, 1);
1012 tree srctype = type;
1013 tree desttype = type;
1014 if (src_align < GET_MODE_ALIGNMENT (mode))
1015 srctype = build_aligned_type (type, src_align);
1016 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
1017 tree tem = fold_const_aggregate_ref (srcmem);
1018 if (tem)
1019 srcmem = tem;
1020 else if (src_align < GET_MODE_ALIGNMENT (mode)
1021 && targetm.slow_unaligned_access (mode, src_align)
1022 && (optab_handler (movmisalign_optab, mode)
1023 == CODE_FOR_nothing))
1024 srcmem = NULL_TREE;
1025 if (srcmem)
1027 gimple *new_stmt;
1028 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
1030 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
1031 srcmem
1032 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
1033 new_stmt);
1034 gimple_assign_set_lhs (new_stmt, srcmem);
1035 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1036 gimple_set_location (new_stmt, loc);
1037 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1039 if (dest_align < GET_MODE_ALIGNMENT (mode))
1040 desttype = build_aligned_type (type, dest_align);
1041 new_stmt
1042 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
1043 dest, off0),
1044 srcmem);
1045 gimple_move_vops (new_stmt, stmt);
1046 if (!lhs)
1048 gsi_replace (gsi, new_stmt, false);
1049 return true;
1051 gimple_set_location (new_stmt, loc);
1052 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1053 goto done;
1059 if (code == BUILT_IN_MEMMOVE)
1061 /* Both DEST and SRC must be pointer types.
1062 ??? This is what old code did. Is the testing for pointer types
1063 really mandatory?
1065 If either SRC is readonly or length is 1, we can use memcpy. */
1066 if (!dest_align || !src_align)
1067 return false;
1068 if (readonly_data_expr (src)
1069 || (tree_fits_uhwi_p (len)
1070 && (MIN (src_align, dest_align) / BITS_PER_UNIT
1071 >= tree_to_uhwi (len))))
1073 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1074 if (!fn)
1075 return false;
1076 gimple_call_set_fndecl (stmt, fn);
1077 gimple_call_set_arg (stmt, 0, dest);
1078 gimple_call_set_arg (stmt, 1, src);
1079 fold_stmt (gsi);
1080 return true;
1083 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1084 if (TREE_CODE (src) == ADDR_EXPR
1085 && TREE_CODE (dest) == ADDR_EXPR)
1087 tree src_base, dest_base, fn;
1088 poly_int64 src_offset = 0, dest_offset = 0;
1089 poly_uint64 maxsize;
1091 srcvar = TREE_OPERAND (src, 0);
1092 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
1093 if (src_base == NULL)
1094 src_base = srcvar;
1095 destvar = TREE_OPERAND (dest, 0);
1096 dest_base = get_addr_base_and_unit_offset (destvar,
1097 &dest_offset);
1098 if (dest_base == NULL)
1099 dest_base = destvar;
1100 if (!poly_int_tree_p (len, &maxsize))
1101 maxsize = -1;
1102 if (SSA_VAR_P (src_base)
1103 && SSA_VAR_P (dest_base))
1105 if (operand_equal_p (src_base, dest_base, 0)
1106 && ranges_maybe_overlap_p (src_offset, maxsize,
1107 dest_offset, maxsize))
1108 return false;
1110 else if (TREE_CODE (src_base) == MEM_REF
1111 && TREE_CODE (dest_base) == MEM_REF)
1113 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
1114 TREE_OPERAND (dest_base, 0), 0))
1115 return false;
1116 poly_offset_int full_src_offset
1117 = mem_ref_offset (src_base) + src_offset;
1118 poly_offset_int full_dest_offset
1119 = mem_ref_offset (dest_base) + dest_offset;
1120 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
1121 full_dest_offset, maxsize))
1122 return false;
1124 else
1125 return false;
1127 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1128 if (!fn)
1129 return false;
1130 gimple_call_set_fndecl (stmt, fn);
1131 gimple_call_set_arg (stmt, 0, dest);
1132 gimple_call_set_arg (stmt, 1, src);
1133 fold_stmt (gsi);
1134 return true;
1137 /* If the destination and source do not alias optimize into
1138 memcpy as well. */
1139 if ((is_gimple_min_invariant (dest)
1140 || TREE_CODE (dest) == SSA_NAME)
1141 && (is_gimple_min_invariant (src)
1142 || TREE_CODE (src) == SSA_NAME))
1144 ao_ref destr, srcr;
1145 ao_ref_init_from_ptr_and_size (&destr, dest, len);
1146 ao_ref_init_from_ptr_and_size (&srcr, src, len);
1147 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1149 tree fn;
1150 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1151 if (!fn)
1152 return false;
1153 gimple_call_set_fndecl (stmt, fn);
1154 gimple_call_set_arg (stmt, 0, dest);
1155 gimple_call_set_arg (stmt, 1, src);
1156 fold_stmt (gsi);
1157 return true;
1161 return false;
1164 if (!tree_fits_shwi_p (len))
1165 return false;
1166 if (!srctype
1167 || (AGGREGATE_TYPE_P (srctype)
1168 && TYPE_REVERSE_STORAGE_ORDER (srctype)))
1169 return false;
1170 if (!desttype
1171 || (AGGREGATE_TYPE_P (desttype)
1172 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
1173 return false;
1174 /* In the following try to find a type that is most natural to be
1175 used for the memcpy source and destination and that allows
1176 the most optimization when memcpy is turned into a plain assignment
1177 using that type. In theory we could always use a char[len] type
1178 but that only gains us that the destination and source possibly
1179 no longer will have their address taken. */
1180 if (TREE_CODE (srctype) == ARRAY_TYPE
1181 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1182 srctype = TREE_TYPE (srctype);
1183 if (TREE_CODE (desttype) == ARRAY_TYPE
1184 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1185 desttype = TREE_TYPE (desttype);
1186 if (TREE_ADDRESSABLE (srctype)
1187 || TREE_ADDRESSABLE (desttype))
1188 return false;
1190 /* Make sure we are not copying using a floating-point mode or
1191 a type whose size possibly does not match its precision. */
1192 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1193 || TREE_CODE (desttype) == BOOLEAN_TYPE
1194 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1195 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1196 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1197 || TREE_CODE (srctype) == BOOLEAN_TYPE
1198 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1199 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1200 if (!srctype)
1201 srctype = desttype;
1202 if (!desttype)
1203 desttype = srctype;
1204 if (!srctype)
1205 return false;
1207 src_align = get_pointer_alignment (src);
1208 dest_align = get_pointer_alignment (dest);
1210 /* Choose between src and destination type for the access based
1211 on alignment, whether the access constitutes a register access
1212 and whether it may actually expose a declaration for SSA rewrite
1213 or SRA decomposition. Also try to expose a string constant, we
1214 might be able to concatenate several of them later into a single
1215 string store. */
1216 destvar = NULL_TREE;
1217 srcvar = NULL_TREE;
1218 if (TREE_CODE (dest) == ADDR_EXPR
1219 && var_decl_component_p (TREE_OPERAND (dest, 0))
1220 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1221 && dest_align >= TYPE_ALIGN (desttype)
1222 && (is_gimple_reg_type (desttype)
1223 || src_align >= TYPE_ALIGN (desttype)))
1224 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1225 else if (TREE_CODE (src) == ADDR_EXPR
1226 && var_decl_component_p (TREE_OPERAND (src, 0))
1227 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1228 && src_align >= TYPE_ALIGN (srctype)
1229 && (is_gimple_reg_type (srctype)
1230 || dest_align >= TYPE_ALIGN (srctype)))
1231 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1232 /* FIXME: Don't transform copies from strings with known original length.
1233 As soon as strlenopt tests that rely on it for passing are adjusted,
1234 this hack can be removed. */
1235 else if (gimple_call_alloca_for_var_p (stmt)
1236 && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1237 && integer_zerop (srcoff)
1238 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1239 && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1240 srctype = TREE_TYPE (srcvar);
1241 else
1242 return false;
1244 /* Now that we chose an access type express the other side in
1245 terms of it if the target allows that with respect to alignment
1246 constraints. */
1247 if (srcvar == NULL_TREE)
1249 if (src_align >= TYPE_ALIGN (desttype))
1250 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1251 else
1253 enum machine_mode mode = TYPE_MODE (desttype);
1254 if ((mode == BLKmode && STRICT_ALIGNMENT)
1255 || (targetm.slow_unaligned_access (mode, src_align)
1256 && (optab_handler (movmisalign_optab, mode)
1257 == CODE_FOR_nothing)))
1258 return false;
1259 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1260 src_align);
1261 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1264 else if (destvar == NULL_TREE)
1266 if (dest_align >= TYPE_ALIGN (srctype))
1267 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1268 else
1270 enum machine_mode mode = TYPE_MODE (srctype);
1271 if ((mode == BLKmode && STRICT_ALIGNMENT)
1272 || (targetm.slow_unaligned_access (mode, dest_align)
1273 && (optab_handler (movmisalign_optab, mode)
1274 == CODE_FOR_nothing)))
1275 return false;
1276 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1277 dest_align);
1278 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1282 /* Same as above, detect out-of-bounds accesses without issuing
1283 warnings. Avoid folding out-of-bounds copies but to avoid
1284 false positives for unreachable code defer warning until
1285 after DCE has worked its magic.
1286 -Wrestrict is still diagnosed. */
1287 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1288 dest, src, len, len,
1289 false, false))
1290 if (warning != OPT_Wrestrict)
1291 return false;
1293 gimple *new_stmt;
1294 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1296 tree tem = fold_const_aggregate_ref (srcvar);
1297 if (tem)
1298 srcvar = tem;
1299 if (! is_gimple_min_invariant (srcvar))
1301 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1302 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1303 new_stmt);
1304 gimple_assign_set_lhs (new_stmt, srcvar);
1305 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1306 gimple_set_location (new_stmt, loc);
1307 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1309 new_stmt = gimple_build_assign (destvar, srcvar);
1310 goto set_vop_and_replace;
1313 /* We get an aggregate copy. If the source is a STRING_CST, then
1314 directly use its type to perform the copy. */
1315 if (TREE_CODE (srcvar) == STRING_CST)
1316 desttype = srctype;
1318 /* Or else, use an unsigned char[] type to perform the copy in order
1319 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1320 types or float modes behavior on copying. */
1321 else
1323 desttype = build_array_type_nelts (unsigned_char_type_node,
1324 tree_to_uhwi (len));
1325 srctype = desttype;
1326 if (src_align > TYPE_ALIGN (srctype))
1327 srctype = build_aligned_type (srctype, src_align);
1328 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1331 if (dest_align > TYPE_ALIGN (desttype))
1332 desttype = build_aligned_type (desttype, dest_align);
1333 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1334 new_stmt = gimple_build_assign (destvar, srcvar);
1336 set_vop_and_replace:
1337 gimple_move_vops (new_stmt, stmt);
1338 if (!lhs)
1340 gsi_replace (gsi, new_stmt, false);
1341 return true;
1343 gimple_set_location (new_stmt, loc);
1344 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1347 done:
1348 gimple_seq stmts = NULL;
1349 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1350 len = NULL_TREE;
1351 else if (code == BUILT_IN_MEMPCPY)
1353 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1354 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1355 TREE_TYPE (dest), dest, len);
1357 else
1358 gcc_unreachable ();
1360 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1361 gimple *repl = gimple_build_assign (lhs, dest);
1362 gsi_replace (gsi, repl, false);
1363 return true;
1366 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1367 to built-in memcmp (a, b, len). */
1369 static bool
1370 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1372 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1374 if (!fn)
1375 return false;
1377 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1379 gimple *stmt = gsi_stmt (*gsi);
1380 tree a = gimple_call_arg (stmt, 0);
1381 tree b = gimple_call_arg (stmt, 1);
1382 tree len = gimple_call_arg (stmt, 2);
1384 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1385 replace_call_with_call_and_fold (gsi, repl);
1387 return true;
1390 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1391 to built-in memmove (dest, src, len). */
1393 static bool
1394 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1396 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1398 if (!fn)
1399 return false;
1401 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1402 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1403 len) into memmove (dest, src, len). */
1405 gimple *stmt = gsi_stmt (*gsi);
1406 tree src = gimple_call_arg (stmt, 0);
1407 tree dest = gimple_call_arg (stmt, 1);
1408 tree len = gimple_call_arg (stmt, 2);
1410 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1411 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1412 replace_call_with_call_and_fold (gsi, repl);
1414 return true;
1417 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1418 to built-in memset (dest, 0, len). */
1420 static bool
1421 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1423 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1425 if (!fn)
1426 return false;
1428 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1430 gimple *stmt = gsi_stmt (*gsi);
1431 tree dest = gimple_call_arg (stmt, 0);
1432 tree len = gimple_call_arg (stmt, 1);
1434 gimple_seq seq = NULL;
1435 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1436 gimple_seq_add_stmt_without_update (&seq, repl);
1437 gsi_replace_with_seq_vops (gsi, seq);
1438 fold_stmt (gsi);
1440 return true;
1443 /* Fold function call to builtin memset or bzero at *GSI setting the
1444 memory of size LEN to VAL. Return whether a simplification was made. */
1446 static bool
1447 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1449 gimple *stmt = gsi_stmt (*gsi);
1450 tree etype;
1451 unsigned HOST_WIDE_INT length, cval;
1453 /* If the LEN parameter is zero, return DEST. */
1454 if (integer_zerop (len))
1456 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1457 return true;
1460 if (! tree_fits_uhwi_p (len))
1461 return false;
1463 if (TREE_CODE (c) != INTEGER_CST)
1464 return false;
1466 tree dest = gimple_call_arg (stmt, 0);
1467 tree var = dest;
1468 if (TREE_CODE (var) != ADDR_EXPR)
1469 return false;
1471 var = TREE_OPERAND (var, 0);
1472 if (TREE_THIS_VOLATILE (var))
1473 return false;
1475 etype = TREE_TYPE (var);
1476 if (TREE_CODE (etype) == ARRAY_TYPE)
1477 etype = TREE_TYPE (etype);
1479 if (!INTEGRAL_TYPE_P (etype)
1480 && !POINTER_TYPE_P (etype))
1481 return NULL_TREE;
1483 if (! var_decl_component_p (var))
1484 return NULL_TREE;
1486 length = tree_to_uhwi (len);
1487 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1488 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1489 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
1490 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1491 return NULL_TREE;
1493 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1494 return NULL_TREE;
1496 if (!type_has_mode_precision_p (etype))
1497 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1498 TYPE_UNSIGNED (etype));
1500 if (integer_zerop (c))
1501 cval = 0;
1502 else
1504 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1505 return NULL_TREE;
1507 cval = TREE_INT_CST_LOW (c);
1508 cval &= 0xff;
1509 cval |= cval << 8;
1510 cval |= cval << 16;
1511 cval |= (cval << 31) << 1;
1514 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1515 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1516 gimple_move_vops (store, stmt);
1517 gimple_set_location (store, gimple_location (stmt));
1518 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1519 if (gimple_call_lhs (stmt))
1521 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1522 gsi_replace (gsi, asgn, false);
1524 else
1526 gimple_stmt_iterator gsi2 = *gsi;
1527 gsi_prev (gsi);
1528 gsi_remove (&gsi2, true);
1531 return true;
1534 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1536 static bool
1537 get_range_strlen_tree (tree arg, bitmap visited, strlen_range_kind rkind,
1538 c_strlen_data *pdata, unsigned eltsize)
1540 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1542 /* The length computed by this invocation of the function. */
1543 tree val = NULL_TREE;
1545 /* True if VAL is an optimistic (tight) bound determined from
1546 the size of the character array in which the string may be
1547 stored. In that case, the computed VAL is used to set
1548 PDATA->MAXBOUND. */
1549 bool tight_bound = false;
1551 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1552 if (TREE_CODE (arg) == ADDR_EXPR
1553 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1555 tree op = TREE_OPERAND (arg, 0);
1556 if (integer_zerop (TREE_OPERAND (op, 1)))
1558 tree aop0 = TREE_OPERAND (op, 0);
1559 if (TREE_CODE (aop0) == INDIRECT_REF
1560 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1561 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1562 pdata, eltsize);
1564 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1565 && rkind == SRK_LENRANGE)
1567 /* Fail if an array is the last member of a struct object
1568 since it could be treated as a (fake) flexible array
1569 member. */
1570 tree idx = TREE_OPERAND (op, 1);
1572 arg = TREE_OPERAND (op, 0);
1573 tree optype = TREE_TYPE (arg);
1574 if (tree dom = TYPE_DOMAIN (optype))
1575 if (tree bound = TYPE_MAX_VALUE (dom))
1576 if (TREE_CODE (bound) == INTEGER_CST
1577 && TREE_CODE (idx) == INTEGER_CST
1578 && tree_int_cst_lt (bound, idx))
1579 return false;
1583 if (rkind == SRK_INT_VALUE)
1585 /* We are computing the maximum value (not string length). */
1586 val = arg;
1587 if (TREE_CODE (val) != INTEGER_CST
1588 || tree_int_cst_sgn (val) < 0)
1589 return false;
1591 else
1593 c_strlen_data lendata = { };
1594 val = c_strlen (arg, 1, &lendata, eltsize);
1596 if (!val && lendata.decl)
1598 /* ARG refers to an unterminated const character array.
1599 DATA.DECL with size DATA.LEN. */
1600 val = lendata.minlen;
1601 pdata->decl = lendata.decl;
1605 /* Set if VAL represents the maximum length based on array size (set
1606 when exact length cannot be determined). */
1607 bool maxbound = false;
1609 if (!val && rkind == SRK_LENRANGE)
1611 if (TREE_CODE (arg) == ADDR_EXPR)
1612 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1613 pdata, eltsize);
1615 if (TREE_CODE (arg) == ARRAY_REF)
1617 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1619 /* Determine the "innermost" array type. */
1620 while (TREE_CODE (optype) == ARRAY_TYPE
1621 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1622 optype = TREE_TYPE (optype);
1624 /* Avoid arrays of pointers. */
1625 tree eltype = TREE_TYPE (optype);
1626 if (TREE_CODE (optype) != ARRAY_TYPE
1627 || !INTEGRAL_TYPE_P (eltype))
1628 return false;
1630 /* Fail when the array bound is unknown or zero. */
1631 val = TYPE_SIZE_UNIT (optype);
1632 if (!val
1633 || TREE_CODE (val) != INTEGER_CST
1634 || integer_zerop (val))
1635 return false;
1637 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1638 integer_one_node);
1640 /* Set the minimum size to zero since the string in
1641 the array could have zero length. */
1642 pdata->minlen = ssize_int (0);
1644 tight_bound = true;
1646 else if (TREE_CODE (arg) == COMPONENT_REF
1647 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1648 == ARRAY_TYPE))
1650 /* Use the type of the member array to determine the upper
1651 bound on the length of the array. This may be overly
1652 optimistic if the array itself isn't NUL-terminated and
1653 the caller relies on the subsequent member to contain
1654 the NUL but that would only be considered valid if
1655 the array were the last member of a struct. */
1657 tree fld = TREE_OPERAND (arg, 1);
1659 tree optype = TREE_TYPE (fld);
1661 /* Determine the "innermost" array type. */
1662 while (TREE_CODE (optype) == ARRAY_TYPE
1663 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1664 optype = TREE_TYPE (optype);
1666 /* Fail when the array bound is unknown or zero. */
1667 val = TYPE_SIZE_UNIT (optype);
1668 if (!val
1669 || TREE_CODE (val) != INTEGER_CST
1670 || integer_zerop (val))
1671 return false;
1672 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1673 integer_one_node);
1675 /* Set the minimum size to zero since the string in
1676 the array could have zero length. */
1677 pdata->minlen = ssize_int (0);
1679 /* The array size determined above is an optimistic bound
1680 on the length. If the array isn't nul-terminated the
1681 length computed by the library function would be greater.
1682 Even though using strlen to cross the subobject boundary
1683 is undefined, avoid drawing conclusions from the member
1684 type about the length here. */
1685 tight_bound = true;
1687 else if (TREE_CODE (arg) == MEM_REF
1688 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1689 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1690 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1692 /* Handle a MEM_REF into a DECL accessing an array of integers,
1693 being conservative about references to extern structures with
1694 flexible array members that can be initialized to arbitrary
1695 numbers of elements as an extension (static structs are okay).
1696 FIXME: Make this less conservative -- see
1697 component_ref_size in tree.cc. */
1698 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1699 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1700 && (decl_binds_to_current_def_p (ref)
1701 || !array_at_struct_end_p (arg)))
1703 /* Fail if the offset is out of bounds. Such accesses
1704 should be diagnosed at some point. */
1705 val = DECL_SIZE_UNIT (ref);
1706 if (!val
1707 || TREE_CODE (val) != INTEGER_CST
1708 || integer_zerop (val))
1709 return false;
1711 poly_offset_int psiz = wi::to_offset (val);
1712 poly_offset_int poff = mem_ref_offset (arg);
1713 if (known_le (psiz, poff))
1714 return false;
1716 pdata->minlen = ssize_int (0);
1718 /* Subtract the offset and one for the terminating nul. */
1719 psiz -= poff;
1720 psiz -= 1;
1721 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1722 /* Since VAL reflects the size of a declared object
1723 rather the type of the access it is not a tight bound. */
1726 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
1728 /* Avoid handling pointers to arrays. GCC might misuse
1729 a pointer to an array of one bound to point to an array
1730 object of a greater bound. */
1731 tree argtype = TREE_TYPE (arg);
1732 if (TREE_CODE (argtype) == ARRAY_TYPE)
1734 val = TYPE_SIZE_UNIT (argtype);
1735 if (!val
1736 || TREE_CODE (val) != INTEGER_CST
1737 || integer_zerop (val))
1738 return false;
1739 val = wide_int_to_tree (TREE_TYPE (val),
1740 wi::sub (wi::to_wide (val), 1));
1742 /* Set the minimum size to zero since the string in
1743 the array could have zero length. */
1744 pdata->minlen = ssize_int (0);
1747 maxbound = true;
1750 if (!val)
1751 return false;
1753 /* Adjust the lower bound on the string length as necessary. */
1754 if (!pdata->minlen
1755 || (rkind != SRK_STRLEN
1756 && TREE_CODE (pdata->minlen) == INTEGER_CST
1757 && TREE_CODE (val) == INTEGER_CST
1758 && tree_int_cst_lt (val, pdata->minlen)))
1759 pdata->minlen = val;
1761 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1763 /* Adjust the tighter (more optimistic) string length bound
1764 if necessary and proceed to adjust the more conservative
1765 bound. */
1766 if (TREE_CODE (val) == INTEGER_CST)
1768 if (tree_int_cst_lt (pdata->maxbound, val))
1769 pdata->maxbound = val;
1771 else
1772 pdata->maxbound = val;
1774 else if (pdata->maxbound || maxbound)
1775 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1776 if VAL corresponds to the maximum length determined based
1777 on the type of the object. */
1778 pdata->maxbound = val;
1780 if (tight_bound)
1782 /* VAL computed above represents an optimistically tight bound
1783 on the length of the string based on the referenced object's
1784 or subobject's type. Determine the conservative upper bound
1785 based on the enclosing object's size if possible. */
1786 if (rkind == SRK_LENRANGE)
1788 poly_int64 offset;
1789 tree base = get_addr_base_and_unit_offset (arg, &offset);
1790 if (!base)
1792 /* When the call above fails due to a non-constant offset
1793 assume the offset is zero and use the size of the whole
1794 enclosing object instead. */
1795 base = get_base_address (arg);
1796 offset = 0;
1798 /* If the base object is a pointer no upper bound on the length
1799 can be determined. Otherwise the maximum length is equal to
1800 the size of the enclosing object minus the offset of
1801 the referenced subobject minus 1 (for the terminating nul). */
1802 tree type = TREE_TYPE (base);
1803 if (TREE_CODE (type) == POINTER_TYPE
1804 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1805 || !(val = DECL_SIZE_UNIT (base)))
1806 val = build_all_ones_cst (size_type_node);
1807 else
1809 val = DECL_SIZE_UNIT (base);
1810 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1811 size_int (offset + 1));
1814 else
1815 return false;
1818 if (pdata->maxlen)
1820 /* Adjust the more conservative bound if possible/necessary
1821 and fail otherwise. */
1822 if (rkind != SRK_STRLEN)
1824 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1825 || TREE_CODE (val) != INTEGER_CST)
1826 return false;
1828 if (tree_int_cst_lt (pdata->maxlen, val))
1829 pdata->maxlen = val;
1830 return true;
1832 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1834 /* Fail if the length of this ARG is different from that
1835 previously determined from another ARG. */
1836 return false;
1840 pdata->maxlen = val;
1841 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1844 /* For an ARG referencing one or more strings, try to obtain the range
1845 of their lengths, or the size of the largest array ARG referes to if
1846 the range of lengths cannot be determined, and store all in *PDATA.
1847 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1848 the maximum constant value.
1849 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1850 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1851 length or if we are unable to determine the length, return false.
1852 VISITED is a bitmap of visited variables.
1853 RKIND determines the kind of value or range to obtain (see
1854 strlen_range_kind).
1855 Set PDATA->DECL if ARG refers to an unterminated constant array.
1856 On input, set ELTSIZE to 1 for normal single byte character strings,
1857 and either 2 or 4 for wide characer strings (the size of wchar_t).
1858 Return true if *PDATA was successfully populated and false otherwise. */
1860 static bool
1861 get_range_strlen (tree arg, bitmap visited,
1862 strlen_range_kind rkind,
1863 c_strlen_data *pdata, unsigned eltsize)
1866 if (TREE_CODE (arg) != SSA_NAME)
1867 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1869 /* If ARG is registered for SSA update we cannot look at its defining
1870 statement. */
1871 if (name_registered_for_update_p (arg))
1872 return false;
1874 /* If we were already here, break the infinite cycle. */
1875 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1876 return true;
1878 tree var = arg;
1879 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1881 switch (gimple_code (def_stmt))
1883 case GIMPLE_ASSIGN:
1884 /* The RHS of the statement defining VAR must either have a
1885 constant length or come from another SSA_NAME with a constant
1886 length. */
1887 if (gimple_assign_single_p (def_stmt)
1888 || gimple_assign_unary_nop_p (def_stmt))
1890 tree rhs = gimple_assign_rhs1 (def_stmt);
1891 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1893 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1895 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1896 gimple_assign_rhs3 (def_stmt) };
1898 for (unsigned int i = 0; i < 2; i++)
1899 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1901 if (rkind != SRK_LENRANGE)
1902 return false;
1903 /* Set the upper bound to the maximum to prevent
1904 it from being adjusted in the next iteration but
1905 leave MINLEN and the more conservative MAXBOUND
1906 determined so far alone (or leave them null if
1907 they haven't been set yet). That the MINLEN is
1908 in fact zero can be determined from MAXLEN being
1909 unbounded but the discovered minimum is used for
1910 diagnostics. */
1911 pdata->maxlen = build_all_ones_cst (size_type_node);
1913 return true;
1915 return false;
1917 case GIMPLE_PHI:
1918 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1919 must have a constant length. */
1920 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1922 tree arg = gimple_phi_arg (def_stmt, i)->def;
1924 /* If this PHI has itself as an argument, we cannot
1925 determine the string length of this argument. However,
1926 if we can find a constant string length for the other
1927 PHI args then we can still be sure that this is a
1928 constant string length. So be optimistic and just
1929 continue with the next argument. */
1930 if (arg == gimple_phi_result (def_stmt))
1931 continue;
1933 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1935 if (rkind != SRK_LENRANGE)
1936 return false;
1937 /* Set the upper bound to the maximum to prevent
1938 it from being adjusted in the next iteration but
1939 leave MINLEN and the more conservative MAXBOUND
1940 determined so far alone (or leave them null if
1941 they haven't been set yet). That the MINLEN is
1942 in fact zero can be determined from MAXLEN being
1943 unbounded but the discovered minimum is used for
1944 diagnostics. */
1945 pdata->maxlen = build_all_ones_cst (size_type_node);
1948 return true;
1950 default:
1951 return false;
1955 /* Try to obtain the range of the lengths of the string(s) referenced
1956 by ARG, or the size of the largest array ARG refers to if the range
1957 of lengths cannot be determined, and store all in *PDATA which must
1958 be zero-initialized on input except PDATA->MAXBOUND may be set to
1959 a non-null tree node other than INTEGER_CST to request to have it
1960 set to the length of the longest string in a PHI. ELTSIZE is
1961 the expected size of the string element in bytes: 1 for char and
1962 some power of 2 for wide characters.
1963 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1964 for optimization. Returning false means that a nonzero PDATA->MINLEN
1965 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1966 is -1 (in that case, the actual range is indeterminate, i.e.,
1967 [0, PTRDIFF_MAX - 2]. */
1969 bool
1970 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1972 auto_bitmap visited;
1973 tree maxbound = pdata->maxbound;
1975 if (!get_range_strlen (arg, visited, SRK_LENRANGE, pdata, eltsize))
1977 /* On failure extend the length range to an impossible maximum
1978 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1979 members can stay unchanged regardless. */
1980 pdata->minlen = ssize_int (0);
1981 pdata->maxlen = build_all_ones_cst (size_type_node);
1983 else if (!pdata->minlen)
1984 pdata->minlen = ssize_int (0);
1986 /* If it's unchanged from it initial non-null value, set the conservative
1987 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1988 if (maxbound && pdata->maxbound == maxbound)
1989 pdata->maxbound = build_all_ones_cst (size_type_node);
1991 return !integer_all_onesp (pdata->maxlen);
1994 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1995 For ARG of pointer types, NONSTR indicates if the caller is prepared
1996 to handle unterminated strings. For integer ARG and when RKIND ==
1997 SRK_INT_VALUE, NONSTR must be null.
1999 If an unterminated array is discovered and our caller handles
2000 unterminated arrays, then bubble up the offending DECL and
2001 return the maximum size. Otherwise return NULL. */
2003 static tree
2004 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
2006 /* A non-null NONSTR is meaningless when determining the maximum
2007 value of an integer ARG. */
2008 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
2009 /* ARG must have an integral type when RKIND says so. */
2010 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
2012 auto_bitmap visited;
2014 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2015 is unbounded. */
2016 c_strlen_data lendata = { };
2017 if (!get_range_strlen (arg, visited, rkind, &lendata, /* eltsize = */1))
2018 lendata.maxlen = NULL_TREE;
2019 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
2020 lendata.maxlen = NULL_TREE;
2022 if (nonstr)
2024 /* For callers prepared to handle unterminated arrays set
2025 *NONSTR to point to the declaration of the array and return
2026 the maximum length/size. */
2027 *nonstr = lendata.decl;
2028 return lendata.maxlen;
2031 /* Fail if the constant array isn't nul-terminated. */
2032 return lendata.decl ? NULL_TREE : lendata.maxlen;
2035 /* Return true if LEN is known to be less than or equal to (or if STRICT is
2036 true, strictly less than) the lower bound of SIZE at compile time and false
2037 otherwise. */
2039 static bool
2040 known_lower (gimple *stmt, tree len, tree size, bool strict = false)
2042 if (len == NULL_TREE)
2043 return false;
2045 wide_int size_range[2];
2046 wide_int len_range[2];
2047 if (get_range (len, stmt, len_range) && get_range (size, stmt, size_range))
2049 if (strict)
2050 return wi::ltu_p (len_range[1], size_range[0]);
2051 else
2052 return wi::leu_p (len_range[1], size_range[0]);
2055 return false;
2058 /* Fold function call to builtin strcpy with arguments DEST and SRC.
2059 If LEN is not NULL, it represents the length of the string to be
2060 copied. Return NULL_TREE if no simplification can be made. */
2062 static bool
2063 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
2064 tree dest, tree src)
2066 gimple *stmt = gsi_stmt (*gsi);
2067 location_t loc = gimple_location (stmt);
2068 tree fn;
2070 /* If SRC and DEST are the same (and not volatile), return DEST. */
2071 if (operand_equal_p (src, dest, 0))
2073 /* Issue -Wrestrict unless the pointers are null (those do
2074 not point to objects and so do not indicate an overlap;
2075 such calls could be the result of sanitization and jump
2076 threading). */
2077 if (!integer_zerop (dest) && !warning_suppressed_p (stmt, OPT_Wrestrict))
2079 tree func = gimple_call_fndecl (stmt);
2081 warning_at (loc, OPT_Wrestrict,
2082 "%qD source argument is the same as destination",
2083 func);
2086 replace_call_with_value (gsi, dest);
2087 return true;
2090 if (optimize_function_for_size_p (cfun))
2091 return false;
2093 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2094 if (!fn)
2095 return false;
2097 /* Set to non-null if ARG refers to an unterminated array. */
2098 tree nonstr = NULL;
2099 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
2101 if (nonstr)
2103 /* Avoid folding calls with unterminated arrays. */
2104 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
2105 warn_string_no_nul (loc, stmt, "strcpy", src, nonstr);
2106 suppress_warning (stmt, OPT_Wstringop_overread);
2107 return false;
2110 if (!len)
2111 return false;
2113 len = fold_convert_loc (loc, size_type_node, len);
2114 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
2115 len = force_gimple_operand_gsi (gsi, len, true,
2116 NULL_TREE, true, GSI_SAME_STMT);
2117 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2118 replace_call_with_call_and_fold (gsi, repl);
2119 return true;
2122 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2123 If SLEN is not NULL, it represents the length of the source string.
2124 Return NULL_TREE if no simplification can be made. */
2126 static bool
2127 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
2128 tree dest, tree src, tree len)
2130 gimple *stmt = gsi_stmt (*gsi);
2131 location_t loc = gimple_location (stmt);
2132 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
2134 /* If the LEN parameter is zero, return DEST. */
2135 if (integer_zerop (len))
2137 /* Avoid warning if the destination refers to an array/pointer
2138 decorate with attribute nonstring. */
2139 if (!nonstring)
2141 tree fndecl = gimple_call_fndecl (stmt);
2143 /* Warn about the lack of nul termination: the result is not
2144 a (nul-terminated) string. */
2145 tree slen = get_maxval_strlen (src, SRK_STRLEN);
2146 if (slen && !integer_zerop (slen))
2147 warning_at (loc, OPT_Wstringop_truncation,
2148 "%qD destination unchanged after copying no bytes "
2149 "from a string of length %E",
2150 fndecl, slen);
2151 else
2152 warning_at (loc, OPT_Wstringop_truncation,
2153 "%qD destination unchanged after copying no bytes",
2154 fndecl);
2157 replace_call_with_value (gsi, dest);
2158 return true;
2161 /* We can't compare slen with len as constants below if len is not a
2162 constant. */
2163 if (TREE_CODE (len) != INTEGER_CST)
2164 return false;
2166 /* Now, we must be passed a constant src ptr parameter. */
2167 tree slen = get_maxval_strlen (src, SRK_STRLEN);
2168 if (!slen || TREE_CODE (slen) != INTEGER_CST)
2169 return false;
2171 /* The size of the source string including the terminating nul. */
2172 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
2174 /* We do not support simplification of this case, though we do
2175 support it when expanding trees into RTL. */
2176 /* FIXME: generate a call to __builtin_memset. */
2177 if (tree_int_cst_lt (ssize, len))
2178 return false;
2180 /* Diagnose truncation that leaves the copy unterminated. */
2181 maybe_diag_stxncpy_trunc (*gsi, src, len);
2183 /* OK transform into builtin memcpy. */
2184 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2185 if (!fn)
2186 return false;
2188 len = fold_convert_loc (loc, size_type_node, len);
2189 len = force_gimple_operand_gsi (gsi, len, true,
2190 NULL_TREE, true, GSI_SAME_STMT);
2191 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2192 replace_call_with_call_and_fold (gsi, repl);
2194 return true;
2197 /* Fold function call to builtin strchr or strrchr.
2198 If both arguments are constant, evaluate and fold the result,
2199 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2200 In general strlen is significantly faster than strchr
2201 due to being a simpler operation. */
2202 static bool
2203 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
2205 gimple *stmt = gsi_stmt (*gsi);
2206 tree str = gimple_call_arg (stmt, 0);
2207 tree c = gimple_call_arg (stmt, 1);
2208 location_t loc = gimple_location (stmt);
2209 const char *p;
2210 char ch;
2212 if (!gimple_call_lhs (stmt))
2213 return false;
2215 /* Avoid folding if the first argument is not a nul-terminated array.
2216 Defer warning until later. */
2217 if (!check_nul_terminated_array (NULL_TREE, str))
2218 return false;
2220 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
2222 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
2224 if (p1 == NULL)
2226 replace_call_with_value (gsi, integer_zero_node);
2227 return true;
2230 tree len = build_int_cst (size_type_node, p1 - p);
2231 gimple_seq stmts = NULL;
2232 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2233 POINTER_PLUS_EXPR, str, len);
2234 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2235 gsi_replace_with_seq_vops (gsi, stmts);
2236 return true;
2239 if (!integer_zerop (c))
2240 return false;
2242 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2243 if (is_strrchr && optimize_function_for_size_p (cfun))
2245 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2247 if (strchr_fn)
2249 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2250 replace_call_with_call_and_fold (gsi, repl);
2251 return true;
2254 return false;
2257 tree len;
2258 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2260 if (!strlen_fn)
2261 return false;
2263 /* Create newstr = strlen (str). */
2264 gimple_seq stmts = NULL;
2265 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2266 gimple_set_location (new_stmt, loc);
2267 len = create_tmp_reg_or_ssa_name (size_type_node);
2268 gimple_call_set_lhs (new_stmt, len);
2269 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2271 /* Create (str p+ strlen (str)). */
2272 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2273 POINTER_PLUS_EXPR, str, len);
2274 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2275 gsi_replace_with_seq_vops (gsi, stmts);
2276 /* gsi now points at the assignment to the lhs, get a
2277 stmt iterator to the strlen.
2278 ??? We can't use gsi_for_stmt as that doesn't work when the
2279 CFG isn't built yet. */
2280 gimple_stmt_iterator gsi2 = *gsi;
2281 gsi_prev (&gsi2);
2282 fold_stmt (&gsi2);
2283 return true;
2286 /* Fold function call to builtin strstr.
2287 If both arguments are constant, evaluate and fold the result,
2288 additionally fold strstr (x, "") into x and strstr (x, "c")
2289 into strchr (x, 'c'). */
2290 static bool
2291 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2293 gimple *stmt = gsi_stmt (*gsi);
2294 if (!gimple_call_lhs (stmt))
2295 return false;
2297 tree haystack = gimple_call_arg (stmt, 0);
2298 tree needle = gimple_call_arg (stmt, 1);
2300 /* Avoid folding if either argument is not a nul-terminated array.
2301 Defer warning until later. */
2302 if (!check_nul_terminated_array (NULL_TREE, haystack)
2303 || !check_nul_terminated_array (NULL_TREE, needle))
2304 return false;
2306 const char *q = c_getstr (needle);
2307 if (q == NULL)
2308 return false;
2310 if (const char *p = c_getstr (haystack))
2312 const char *r = strstr (p, q);
2314 if (r == NULL)
2316 replace_call_with_value (gsi, integer_zero_node);
2317 return true;
2320 tree len = build_int_cst (size_type_node, r - p);
2321 gimple_seq stmts = NULL;
2322 gimple *new_stmt
2323 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2324 haystack, len);
2325 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2326 gsi_replace_with_seq_vops (gsi, stmts);
2327 return true;
2330 /* For strstr (x, "") return x. */
2331 if (q[0] == '\0')
2333 replace_call_with_value (gsi, haystack);
2334 return true;
2337 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2338 if (q[1] == '\0')
2340 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2341 if (strchr_fn)
2343 tree c = build_int_cst (integer_type_node, q[0]);
2344 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2345 replace_call_with_call_and_fold (gsi, repl);
2346 return true;
2350 return false;
2353 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2354 to the call.
2356 Return NULL_TREE if no simplification was possible, otherwise return the
2357 simplified form of the call as a tree.
2359 The simplified form may be a constant or other expression which
2360 computes the same value, but in a more efficient manner (including
2361 calls to other builtin functions).
2363 The call may contain arguments which need to be evaluated, but
2364 which are not useful to determine the result of the call. In
2365 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2366 COMPOUND_EXPR will be an argument which must be evaluated.
2367 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2368 COMPOUND_EXPR in the chain will contain the tree for the simplified
2369 form of the builtin function call. */
2371 static bool
2372 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2374 gimple *stmt = gsi_stmt (*gsi);
2375 location_t loc = gimple_location (stmt);
2377 const char *p = c_getstr (src);
2379 /* If the string length is zero, return the dst parameter. */
2380 if (p && *p == '\0')
2382 replace_call_with_value (gsi, dst);
2383 return true;
2386 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2387 return false;
2389 /* See if we can store by pieces into (dst + strlen(dst)). */
2390 tree newdst;
2391 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2392 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2394 if (!strlen_fn || !memcpy_fn)
2395 return false;
2397 /* If the length of the source string isn't computable don't
2398 split strcat into strlen and memcpy. */
2399 tree len = get_maxval_strlen (src, SRK_STRLEN);
2400 if (! len)
2401 return false;
2403 /* Create strlen (dst). */
2404 gimple_seq stmts = NULL, stmts2;
2405 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2406 gimple_set_location (repl, loc);
2407 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2408 gimple_call_set_lhs (repl, newdst);
2409 gimple_seq_add_stmt_without_update (&stmts, repl);
2411 /* Create (dst p+ strlen (dst)). */
2412 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2413 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2414 gimple_seq_add_seq_without_update (&stmts, stmts2);
2416 len = fold_convert_loc (loc, size_type_node, len);
2417 len = size_binop_loc (loc, PLUS_EXPR, len,
2418 build_int_cst (size_type_node, 1));
2419 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2420 gimple_seq_add_seq_without_update (&stmts, stmts2);
2422 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2423 gimple_seq_add_stmt_without_update (&stmts, repl);
2424 if (gimple_call_lhs (stmt))
2426 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2427 gimple_seq_add_stmt_without_update (&stmts, repl);
2428 gsi_replace_with_seq_vops (gsi, stmts);
2429 /* gsi now points at the assignment to the lhs, get a
2430 stmt iterator to the memcpy call.
2431 ??? We can't use gsi_for_stmt as that doesn't work when the
2432 CFG isn't built yet. */
2433 gimple_stmt_iterator gsi2 = *gsi;
2434 gsi_prev (&gsi2);
2435 fold_stmt (&gsi2);
2437 else
2439 gsi_replace_with_seq_vops (gsi, stmts);
2440 fold_stmt (gsi);
2442 return true;
2445 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2446 are the arguments to the call. */
2448 static bool
2449 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2451 gimple *stmt = gsi_stmt (*gsi);
2452 tree dest = gimple_call_arg (stmt, 0);
2453 tree src = gimple_call_arg (stmt, 1);
2454 tree size = gimple_call_arg (stmt, 2);
2455 tree fn;
2456 const char *p;
2459 p = c_getstr (src);
2460 /* If the SRC parameter is "", return DEST. */
2461 if (p && *p == '\0')
2463 replace_call_with_value (gsi, dest);
2464 return true;
2467 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2468 return false;
2470 /* If __builtin_strcat_chk is used, assume strcat is available. */
2471 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2472 if (!fn)
2473 return false;
2475 gimple *repl = gimple_build_call (fn, 2, dest, src);
2476 replace_call_with_call_and_fold (gsi, repl);
2477 return true;
2480 /* Simplify a call to the strncat builtin. */
2482 static bool
2483 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2485 gimple *stmt = gsi_stmt (*gsi);
2486 tree dst = gimple_call_arg (stmt, 0);
2487 tree src = gimple_call_arg (stmt, 1);
2488 tree len = gimple_call_arg (stmt, 2);
2489 tree src_len = c_strlen (src, 1);
2491 /* If the requested length is zero, or the src parameter string
2492 length is zero, return the dst parameter. */
2493 if (integer_zerop (len) || (src_len && integer_zerop (src_len)))
2495 replace_call_with_value (gsi, dst);
2496 return true;
2499 /* Return early if the requested len is less than the string length.
2500 Warnings will be issued elsewhere later. */
2501 if (!src_len || known_lower (stmt, len, src_len, true))
2502 return false;
2504 /* Warn on constant LEN. */
2505 if (TREE_CODE (len) == INTEGER_CST)
2507 bool nowarn = warning_suppressed_p (stmt, OPT_Wstringop_overflow_);
2508 tree dstsize;
2510 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize)
2511 && TREE_CODE (dstsize) == INTEGER_CST)
2513 int cmpdst = tree_int_cst_compare (len, dstsize);
2515 if (cmpdst >= 0)
2517 tree fndecl = gimple_call_fndecl (stmt);
2519 /* Strncat copies (at most) LEN bytes and always appends
2520 the terminating NUL so the specified bound should never
2521 be equal to (or greater than) the size of the destination.
2522 If it is, the copy could overflow. */
2523 location_t loc = gimple_location (stmt);
2524 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2525 cmpdst == 0
2526 ? G_("%qD specified bound %E equals "
2527 "destination size")
2528 : G_("%qD specified bound %E exceeds "
2529 "destination size %E"),
2530 fndecl, len, dstsize);
2531 if (nowarn)
2532 suppress_warning (stmt, OPT_Wstringop_overflow_);
2536 if (!nowarn && TREE_CODE (src_len) == INTEGER_CST
2537 && tree_int_cst_compare (src_len, len) == 0)
2539 tree fndecl = gimple_call_fndecl (stmt);
2540 location_t loc = gimple_location (stmt);
2542 /* To avoid possible overflow the specified bound should also
2543 not be equal to the length of the source, even when the size
2544 of the destination is unknown (it's not an uncommon mistake
2545 to specify as the bound to strncpy the length of the source). */
2546 if (warning_at (loc, OPT_Wstringop_overflow_,
2547 "%qD specified bound %E equals source length",
2548 fndecl, len))
2549 suppress_warning (stmt, OPT_Wstringop_overflow_);
2553 if (!known_lower (stmt, src_len, len))
2554 return false;
2556 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2558 /* If the replacement _DECL isn't initialized, don't do the
2559 transformation. */
2560 if (!fn)
2561 return false;
2563 /* Otherwise, emit a call to strcat. */
2564 gcall *repl = gimple_build_call (fn, 2, dst, src);
2565 replace_call_with_call_and_fold (gsi, repl);
2566 return true;
2569 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2570 LEN, and SIZE. */
2572 static bool
2573 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2575 gimple *stmt = gsi_stmt (*gsi);
2576 tree dest = gimple_call_arg (stmt, 0);
2577 tree src = gimple_call_arg (stmt, 1);
2578 tree len = gimple_call_arg (stmt, 2);
2579 tree size = gimple_call_arg (stmt, 3);
2580 tree fn;
2581 const char *p;
2583 p = c_getstr (src);
2584 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2585 if ((p && *p == '\0')
2586 || integer_zerop (len))
2588 replace_call_with_value (gsi, dest);
2589 return true;
2592 if (! integer_all_onesp (size))
2594 tree src_len = c_strlen (src, 1);
2595 if (known_lower (stmt, src_len, len))
2597 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2598 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2599 if (!fn)
2600 return false;
2602 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2603 replace_call_with_call_and_fold (gsi, repl);
2604 return true;
2606 return false;
2609 /* If __builtin_strncat_chk is used, assume strncat is available. */
2610 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2611 if (!fn)
2612 return false;
2614 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2615 replace_call_with_call_and_fold (gsi, repl);
2616 return true;
2619 /* Build and append gimple statements to STMTS that would load a first
2620 character of a memory location identified by STR. LOC is location
2621 of the statement. */
2623 static tree
2624 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2626 tree var;
2628 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2629 tree cst_uchar_ptr_node
2630 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2631 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2633 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2634 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2635 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2637 gimple_assign_set_lhs (stmt, var);
2638 gimple_seq_add_stmt_without_update (stmts, stmt);
2640 return var;
2643 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2645 static bool
2646 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2648 gimple *stmt = gsi_stmt (*gsi);
2649 tree callee = gimple_call_fndecl (stmt);
2650 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2652 tree type = integer_type_node;
2653 tree str1 = gimple_call_arg (stmt, 0);
2654 tree str2 = gimple_call_arg (stmt, 1);
2655 tree lhs = gimple_call_lhs (stmt);
2657 tree bound_node = NULL_TREE;
2658 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
2660 /* Handle strncmp and strncasecmp functions. */
2661 if (gimple_call_num_args (stmt) == 3)
2663 bound_node = gimple_call_arg (stmt, 2);
2664 if (tree_fits_uhwi_p (bound_node))
2665 bound = tree_to_uhwi (bound_node);
2668 /* If the BOUND parameter is zero, return zero. */
2669 if (bound == 0)
2671 replace_call_with_value (gsi, integer_zero_node);
2672 return true;
2675 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2676 if (operand_equal_p (str1, str2, 0))
2678 replace_call_with_value (gsi, integer_zero_node);
2679 return true;
2682 /* Initially set to the number of characters, including the terminating
2683 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2684 the array Sx is not terminated by a nul.
2685 For nul-terminated strings then adjusted to their length so that
2686 LENx == NULPOSx holds. */
2687 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2688 const char *p1 = getbyterep (str1, &len1);
2689 const char *p2 = getbyterep (str2, &len2);
2691 /* The position of the terminating nul character if one exists, otherwise
2692 a value greater than LENx. */
2693 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2695 if (p1)
2697 size_t n = strnlen (p1, len1);
2698 if (n < len1)
2699 len1 = nulpos1 = n;
2702 if (p2)
2704 size_t n = strnlen (p2, len2);
2705 if (n < len2)
2706 len2 = nulpos2 = n;
2709 /* For known strings, return an immediate value. */
2710 if (p1 && p2)
2712 int r = 0;
2713 bool known_result = false;
2715 switch (fcode)
2717 case BUILT_IN_STRCMP:
2718 case BUILT_IN_STRCMP_EQ:
2719 if (len1 != nulpos1 || len2 != nulpos2)
2720 break;
2722 r = strcmp (p1, p2);
2723 known_result = true;
2724 break;
2726 case BUILT_IN_STRNCMP:
2727 case BUILT_IN_STRNCMP_EQ:
2729 if (bound == HOST_WIDE_INT_M1U)
2730 break;
2732 /* Reduce the bound to be no more than the length
2733 of the shorter of the two strings, or the sizes
2734 of the unterminated arrays. */
2735 unsigned HOST_WIDE_INT n = bound;
2737 if (len1 == nulpos1 && len1 < n)
2738 n = len1 + 1;
2739 if (len2 == nulpos2 && len2 < n)
2740 n = len2 + 1;
2742 if (MIN (nulpos1, nulpos2) + 1 < n)
2743 break;
2745 r = strncmp (p1, p2, n);
2746 known_result = true;
2747 break;
2749 /* Only handleable situation is where the string are equal (result 0),
2750 which is already handled by operand_equal_p case. */
2751 case BUILT_IN_STRCASECMP:
2752 break;
2753 case BUILT_IN_STRNCASECMP:
2755 if (bound == HOST_WIDE_INT_M1U)
2756 break;
2757 r = strncmp (p1, p2, bound);
2758 if (r == 0)
2759 known_result = true;
2760 break;
2762 default:
2763 gcc_unreachable ();
2766 if (known_result)
2768 replace_call_with_value (gsi, build_cmp_result (type, r));
2769 return true;
2773 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
2774 || fcode == BUILT_IN_STRCMP
2775 || fcode == BUILT_IN_STRCMP_EQ
2776 || fcode == BUILT_IN_STRCASECMP;
2778 location_t loc = gimple_location (stmt);
2780 /* If the second arg is "", return *(const unsigned char*)arg1. */
2781 if (p2 && *p2 == '\0' && nonzero_bound)
2783 gimple_seq stmts = NULL;
2784 tree var = gimple_load_first_char (loc, str1, &stmts);
2785 if (lhs)
2787 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2788 gimple_seq_add_stmt_without_update (&stmts, stmt);
2791 gsi_replace_with_seq_vops (gsi, stmts);
2792 return true;
2795 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2796 if (p1 && *p1 == '\0' && nonzero_bound)
2798 gimple_seq stmts = NULL;
2799 tree var = gimple_load_first_char (loc, str2, &stmts);
2801 if (lhs)
2803 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2804 stmt = gimple_build_assign (c, NOP_EXPR, var);
2805 gimple_seq_add_stmt_without_update (&stmts, stmt);
2807 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2808 gimple_seq_add_stmt_without_update (&stmts, stmt);
2811 gsi_replace_with_seq_vops (gsi, stmts);
2812 return true;
2815 /* If BOUND is one, return an expression corresponding to
2816 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2817 if (fcode == BUILT_IN_STRNCMP && bound == 1)
2819 gimple_seq stmts = NULL;
2820 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2821 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2823 if (lhs)
2825 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2826 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2827 gimple_seq_add_stmt_without_update (&stmts, convert1);
2829 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2830 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2831 gimple_seq_add_stmt_without_update (&stmts, convert2);
2833 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2834 gimple_seq_add_stmt_without_update (&stmts, stmt);
2837 gsi_replace_with_seq_vops (gsi, stmts);
2838 return true;
2841 /* If BOUND is greater than the length of one constant string,
2842 and the other argument is also a nul-terminated string, replace
2843 strncmp with strcmp. */
2844 if (fcode == BUILT_IN_STRNCMP
2845 && bound > 0 && bound < HOST_WIDE_INT_M1U
2846 && ((p2 && len2 < bound && len2 == nulpos2)
2847 || (p1 && len1 < bound && len1 == nulpos1)))
2849 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2850 if (!fn)
2851 return false;
2852 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2853 replace_call_with_call_and_fold (gsi, repl);
2854 return true;
2857 return false;
2860 /* Fold a call to the memchr pointed by GSI iterator. */
2862 static bool
2863 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2865 gimple *stmt = gsi_stmt (*gsi);
2866 tree lhs = gimple_call_lhs (stmt);
2867 tree arg1 = gimple_call_arg (stmt, 0);
2868 tree arg2 = gimple_call_arg (stmt, 1);
2869 tree len = gimple_call_arg (stmt, 2);
2871 /* If the LEN parameter is zero, return zero. */
2872 if (integer_zerop (len))
2874 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2875 return true;
2878 char c;
2879 if (TREE_CODE (arg2) != INTEGER_CST
2880 || !tree_fits_uhwi_p (len)
2881 || !target_char_cst_p (arg2, &c))
2882 return false;
2884 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2885 unsigned HOST_WIDE_INT string_length;
2886 const char *p1 = getbyterep (arg1, &string_length);
2888 if (p1)
2890 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2891 if (r == NULL)
2893 tree mem_size, offset_node;
2894 byte_representation (arg1, &offset_node, &mem_size, NULL);
2895 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2896 ? 0 : tree_to_uhwi (offset_node);
2897 /* MEM_SIZE is the size of the array the string literal
2898 is stored in. */
2899 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2900 gcc_checking_assert (string_length <= string_size);
2901 if (length <= string_size)
2903 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2904 return true;
2907 else
2909 unsigned HOST_WIDE_INT offset = r - p1;
2910 gimple_seq stmts = NULL;
2911 if (lhs != NULL_TREE)
2913 tree offset_cst = build_int_cst (sizetype, offset);
2914 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2915 arg1, offset_cst);
2916 gimple_seq_add_stmt_without_update (&stmts, stmt);
2918 else
2919 gimple_seq_add_stmt_without_update (&stmts,
2920 gimple_build_nop ());
2922 gsi_replace_with_seq_vops (gsi, stmts);
2923 return true;
2927 return false;
2930 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2931 to the call. IGNORE is true if the value returned
2932 by the builtin will be ignored. UNLOCKED is true is true if this
2933 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2934 the known length of the string. Return NULL_TREE if no simplification
2935 was possible. */
2937 static bool
2938 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2939 tree arg0, tree arg1,
2940 bool unlocked)
2942 gimple *stmt = gsi_stmt (*gsi);
2944 /* If we're using an unlocked function, assume the other unlocked
2945 functions exist explicitly. */
2946 tree const fn_fputc = (unlocked
2947 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2948 : builtin_decl_implicit (BUILT_IN_FPUTC));
2949 tree const fn_fwrite = (unlocked
2950 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2951 : builtin_decl_implicit (BUILT_IN_FWRITE));
2953 /* If the return value is used, don't do the transformation. */
2954 if (gimple_call_lhs (stmt))
2955 return false;
2957 /* Get the length of the string passed to fputs. If the length
2958 can't be determined, punt. */
2959 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2960 if (!len
2961 || TREE_CODE (len) != INTEGER_CST)
2962 return false;
2964 switch (compare_tree_int (len, 1))
2966 case -1: /* length is 0, delete the call entirely . */
2967 replace_call_with_value (gsi, integer_zero_node);
2968 return true;
2970 case 0: /* length is 1, call fputc. */
2972 const char *p = c_getstr (arg0);
2973 if (p != NULL)
2975 if (!fn_fputc)
2976 return false;
2978 gimple *repl = gimple_build_call (fn_fputc, 2,
2979 build_int_cst
2980 (integer_type_node, p[0]), arg1);
2981 replace_call_with_call_and_fold (gsi, repl);
2982 return true;
2985 /* FALLTHROUGH */
2986 case 1: /* length is greater than 1, call fwrite. */
2988 /* If optimizing for size keep fputs. */
2989 if (optimize_function_for_size_p (cfun))
2990 return false;
2991 /* New argument list transforming fputs(string, stream) to
2992 fwrite(string, 1, len, stream). */
2993 if (!fn_fwrite)
2994 return false;
2996 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2997 size_one_node, len, arg1);
2998 replace_call_with_call_and_fold (gsi, repl);
2999 return true;
3001 default:
3002 gcc_unreachable ();
3006 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
3007 DEST, SRC, LEN, and SIZE are the arguments to the call.
3008 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
3009 code of the builtin. If MAXLEN is not NULL, it is maximum length
3010 passed as third argument. */
3012 static bool
3013 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
3014 tree dest, tree src, tree len, tree size,
3015 enum built_in_function fcode)
3017 gimple *stmt = gsi_stmt (*gsi);
3018 location_t loc = gimple_location (stmt);
3019 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3020 tree fn;
3022 /* If SRC and DEST are the same (and not volatile), return DEST
3023 (resp. DEST+LEN for __mempcpy_chk). */
3024 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
3026 if (fcode != BUILT_IN_MEMPCPY_CHK)
3028 replace_call_with_value (gsi, dest);
3029 return true;
3031 else
3033 gimple_seq stmts = NULL;
3034 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
3035 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
3036 TREE_TYPE (dest), dest, len);
3037 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3038 replace_call_with_value (gsi, temp);
3039 return true;
3043 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3044 if (! integer_all_onesp (size)
3045 && !known_lower (stmt, len, size)
3046 && !known_lower (stmt, maxlen, size))
3048 /* MAXLEN and LEN both cannot be proved to be less than SIZE, at
3049 least try to optimize (void) __mempcpy_chk () into
3050 (void) __memcpy_chk () */
3051 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
3053 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3054 if (!fn)
3055 return false;
3057 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3058 replace_call_with_call_and_fold (gsi, repl);
3059 return true;
3061 return false;
3064 fn = NULL_TREE;
3065 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3066 mem{cpy,pcpy,move,set} is available. */
3067 switch (fcode)
3069 case BUILT_IN_MEMCPY_CHK:
3070 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
3071 break;
3072 case BUILT_IN_MEMPCPY_CHK:
3073 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
3074 break;
3075 case BUILT_IN_MEMMOVE_CHK:
3076 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
3077 break;
3078 case BUILT_IN_MEMSET_CHK:
3079 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
3080 break;
3081 default:
3082 break;
3085 if (!fn)
3086 return false;
3088 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3089 replace_call_with_call_and_fold (gsi, repl);
3090 return true;
3093 /* Print a message in the dump file recording transformation of FROM to TO. */
3095 static void
3096 dump_transformation (gcall *from, gcall *to)
3098 if (dump_enabled_p ())
3099 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, from, "simplified %T to %T\n",
3100 gimple_call_fn (from), gimple_call_fn (to));
3103 /* Fold a call to the __st[rp]cpy_chk builtin.
3104 DEST, SRC, and SIZE are the arguments to the call.
3105 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3106 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3107 strings passed as second argument. */
3109 static bool
3110 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
3111 tree dest,
3112 tree src, tree size,
3113 enum built_in_function fcode)
3115 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3116 location_t loc = gimple_location (stmt);
3117 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3118 tree len, fn;
3120 /* If SRC and DEST are the same (and not volatile), return DEST. */
3121 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
3123 /* Issue -Wrestrict unless the pointers are null (those do
3124 not point to objects and so do not indicate an overlap;
3125 such calls could be the result of sanitization and jump
3126 threading). */
3127 if (!integer_zerop (dest)
3128 && !warning_suppressed_p (stmt, OPT_Wrestrict))
3130 tree func = gimple_call_fndecl (stmt);
3132 warning_at (loc, OPT_Wrestrict,
3133 "%qD source argument is the same as destination",
3134 func);
3137 replace_call_with_value (gsi, dest);
3138 return true;
3141 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
3142 if (! integer_all_onesp (size))
3144 len = c_strlen (src, 1);
3145 if (!known_lower (stmt, len, size, true)
3146 && !known_lower (stmt, maxlen, size, true))
3148 if (fcode == BUILT_IN_STPCPY_CHK)
3150 if (! ignore)
3151 return false;
3153 /* If return value of __stpcpy_chk is ignored,
3154 optimize into __strcpy_chk. */
3155 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
3156 if (!fn)
3157 return false;
3159 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
3160 replace_call_with_call_and_fold (gsi, repl);
3161 return true;
3164 if (! len || TREE_SIDE_EFFECTS (len))
3165 return false;
3167 /* If c_strlen returned something, but not provably less than size,
3168 transform __strcpy_chk into __memcpy_chk. */
3169 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3170 if (!fn)
3171 return false;
3173 gimple_seq stmts = NULL;
3174 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
3175 len = gimple_convert (&stmts, loc, size_type_node, len);
3176 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
3177 build_int_cst (size_type_node, 1));
3178 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3179 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3180 replace_call_with_call_and_fold (gsi, repl);
3181 return true;
3185 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3186 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK && !ignore
3187 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
3188 if (!fn)
3189 return false;
3191 gcall *repl = gimple_build_call (fn, 2, dest, src);
3192 dump_transformation (stmt, repl);
3193 replace_call_with_call_and_fold (gsi, repl);
3194 return true;
3197 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3198 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3199 length passed as third argument. IGNORE is true if return value can be
3200 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3202 static bool
3203 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
3204 tree dest, tree src,
3205 tree len, tree size,
3206 enum built_in_function fcode)
3208 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3209 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3210 tree fn;
3212 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3213 if (! integer_all_onesp (size)
3214 && !known_lower (stmt, len, size) && !known_lower (stmt, maxlen, size))
3216 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
3218 /* If return value of __stpncpy_chk is ignored,
3219 optimize into __strncpy_chk. */
3220 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3221 if (fn)
3223 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3224 replace_call_with_call_and_fold (gsi, repl);
3225 return true;
3228 return false;
3231 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3232 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK && !ignore
3233 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3234 if (!fn)
3235 return false;
3237 gcall *repl = gimple_build_call (fn, 3, dest, src, len);
3238 dump_transformation (stmt, repl);
3239 replace_call_with_call_and_fold (gsi, repl);
3240 return true;
3243 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3244 Return NULL_TREE if no simplification can be made. */
3246 static bool
3247 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3249 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3250 location_t loc = gimple_location (stmt);
3251 tree dest = gimple_call_arg (stmt, 0);
3252 tree src = gimple_call_arg (stmt, 1);
3253 tree fn, lenp1;
3255 /* If the result is unused, replace stpcpy with strcpy. */
3256 if (gimple_call_lhs (stmt) == NULL_TREE)
3258 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3259 if (!fn)
3260 return false;
3261 gimple_call_set_fndecl (stmt, fn);
3262 fold_stmt (gsi);
3263 return true;
3266 /* Set to non-null if ARG refers to an unterminated array. */
3267 c_strlen_data data = { };
3268 /* The size of the unterminated array if SRC referes to one. */
3269 tree size;
3270 /* True if the size is exact/constant, false if it's the lower bound
3271 of a range. */
3272 bool exact;
3273 tree len = c_strlen (src, 1, &data, 1);
3274 if (!len
3275 || TREE_CODE (len) != INTEGER_CST)
3277 data.decl = unterminated_array (src, &size, &exact);
3278 if (!data.decl)
3279 return false;
3282 if (data.decl)
3284 /* Avoid folding calls with unterminated arrays. */
3285 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
3286 warn_string_no_nul (loc, stmt, "stpcpy", src, data.decl, size,
3287 exact);
3288 suppress_warning (stmt, OPT_Wstringop_overread);
3289 return false;
3292 if (optimize_function_for_size_p (cfun)
3293 /* If length is zero it's small enough. */
3294 && !integer_zerop (len))
3295 return false;
3297 /* If the source has a known length replace stpcpy with memcpy. */
3298 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3299 if (!fn)
3300 return false;
3302 gimple_seq stmts = NULL;
3303 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3304 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3305 tem, build_int_cst (size_type_node, 1));
3306 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3307 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
3308 gimple_move_vops (repl, stmt);
3309 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3310 /* Replace the result with dest + len. */
3311 stmts = NULL;
3312 tem = gimple_convert (&stmts, loc, sizetype, len);
3313 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3314 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3315 POINTER_PLUS_EXPR, dest, tem);
3316 gsi_replace (gsi, ret, false);
3317 /* Finally fold the memcpy call. */
3318 gimple_stmt_iterator gsi2 = *gsi;
3319 gsi_prev (&gsi2);
3320 fold_stmt (&gsi2);
3321 return true;
3324 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3325 NULL_TREE if a normal call should be emitted rather than expanding
3326 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3327 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3328 passed as second argument. */
3330 static bool
3331 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3332 enum built_in_function fcode)
3334 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3335 tree dest, size, len, fn, fmt, flag;
3336 const char *fmt_str;
3338 /* Verify the required arguments in the original call. */
3339 if (gimple_call_num_args (stmt) < 5)
3340 return false;
3342 dest = gimple_call_arg (stmt, 0);
3343 len = gimple_call_arg (stmt, 1);
3344 flag = gimple_call_arg (stmt, 2);
3345 size = gimple_call_arg (stmt, 3);
3346 fmt = gimple_call_arg (stmt, 4);
3348 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3349 if (! integer_all_onesp (size)
3350 && !known_lower (stmt, len, size) && !known_lower (stmt, maxlen, size))
3351 return false;
3353 if (!init_target_chars ())
3354 return false;
3356 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3357 or if format doesn't contain % chars or is "%s". */
3358 if (! integer_zerop (flag))
3360 fmt_str = c_getstr (fmt);
3361 if (fmt_str == NULL)
3362 return false;
3363 if (strchr (fmt_str, target_percent) != NULL
3364 && strcmp (fmt_str, target_percent_s))
3365 return false;
3368 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3369 available. */
3370 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3371 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3372 if (!fn)
3373 return false;
3375 /* Replace the called function and the first 5 argument by 3 retaining
3376 trailing varargs. */
3377 gimple_call_set_fndecl (stmt, fn);
3378 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3379 gimple_call_set_arg (stmt, 0, dest);
3380 gimple_call_set_arg (stmt, 1, len);
3381 gimple_call_set_arg (stmt, 2, fmt);
3382 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3383 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3384 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3385 fold_stmt (gsi);
3386 return true;
3389 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3390 Return NULL_TREE if a normal call should be emitted rather than
3391 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3392 or BUILT_IN_VSPRINTF_CHK. */
3394 static bool
3395 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3396 enum built_in_function fcode)
3398 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3399 tree dest, size, len, fn, fmt, flag;
3400 const char *fmt_str;
3401 unsigned nargs = gimple_call_num_args (stmt);
3403 /* Verify the required arguments in the original call. */
3404 if (nargs < 4)
3405 return false;
3406 dest = gimple_call_arg (stmt, 0);
3407 flag = gimple_call_arg (stmt, 1);
3408 size = gimple_call_arg (stmt, 2);
3409 fmt = gimple_call_arg (stmt, 3);
3411 len = NULL_TREE;
3413 if (!init_target_chars ())
3414 return false;
3416 /* Check whether the format is a literal string constant. */
3417 fmt_str = c_getstr (fmt);
3418 if (fmt_str != NULL)
3420 /* If the format doesn't contain % args or %%, we know the size. */
3421 if (strchr (fmt_str, target_percent) == 0)
3423 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3424 len = build_int_cstu (size_type_node, strlen (fmt_str));
3426 /* If the format is "%s" and first ... argument is a string literal,
3427 we know the size too. */
3428 else if (fcode == BUILT_IN_SPRINTF_CHK
3429 && strcmp (fmt_str, target_percent_s) == 0)
3431 tree arg;
3433 if (nargs == 5)
3435 arg = gimple_call_arg (stmt, 4);
3436 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3437 len = c_strlen (arg, 1);
3442 if (! integer_all_onesp (size) && !known_lower (stmt, len, size, true))
3443 return false;
3445 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3446 or if format doesn't contain % chars or is "%s". */
3447 if (! integer_zerop (flag))
3449 if (fmt_str == NULL)
3450 return false;
3451 if (strchr (fmt_str, target_percent) != NULL
3452 && strcmp (fmt_str, target_percent_s))
3453 return false;
3456 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3457 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3458 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3459 if (!fn)
3460 return false;
3462 /* Replace the called function and the first 4 argument by 2 retaining
3463 trailing varargs. */
3464 gimple_call_set_fndecl (stmt, fn);
3465 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3466 gimple_call_set_arg (stmt, 0, dest);
3467 gimple_call_set_arg (stmt, 1, fmt);
3468 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3469 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3470 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3471 fold_stmt (gsi);
3472 return true;
3475 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3476 ORIG may be null if this is a 2-argument call. We don't attempt to
3477 simplify calls with more than 3 arguments.
3479 Return true if simplification was possible, otherwise false. */
3481 bool
3482 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3484 gimple *stmt = gsi_stmt (*gsi);
3486 /* Verify the required arguments in the original call. We deal with two
3487 types of sprintf() calls: 'sprintf (str, fmt)' and
3488 'sprintf (dest, "%s", orig)'. */
3489 if (gimple_call_num_args (stmt) > 3)
3490 return false;
3492 tree orig = NULL_TREE;
3493 if (gimple_call_num_args (stmt) == 3)
3494 orig = gimple_call_arg (stmt, 2);
3496 /* Check whether the format is a literal string constant. */
3497 tree fmt = gimple_call_arg (stmt, 1);
3498 const char *fmt_str = c_getstr (fmt);
3499 if (fmt_str == NULL)
3500 return false;
3502 tree dest = gimple_call_arg (stmt, 0);
3504 if (!init_target_chars ())
3505 return false;
3507 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3508 if (!fn)
3509 return false;
3511 /* If the format doesn't contain % args or %%, use strcpy. */
3512 if (strchr (fmt_str, target_percent) == NULL)
3514 /* Don't optimize sprintf (buf, "abc", ptr++). */
3515 if (orig)
3516 return false;
3518 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3519 'format' is known to contain no % formats. */
3520 gimple_seq stmts = NULL;
3521 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3523 /* Propagate the NO_WARNING bit to avoid issuing the same
3524 warning more than once. */
3525 copy_warning (repl, stmt);
3527 gimple_seq_add_stmt_without_update (&stmts, repl);
3528 if (tree lhs = gimple_call_lhs (stmt))
3530 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3531 strlen (fmt_str)));
3532 gimple_seq_add_stmt_without_update (&stmts, repl);
3533 gsi_replace_with_seq_vops (gsi, stmts);
3534 /* gsi now points at the assignment to the lhs, get a
3535 stmt iterator to the memcpy call.
3536 ??? We can't use gsi_for_stmt as that doesn't work when the
3537 CFG isn't built yet. */
3538 gimple_stmt_iterator gsi2 = *gsi;
3539 gsi_prev (&gsi2);
3540 fold_stmt (&gsi2);
3542 else
3544 gsi_replace_with_seq_vops (gsi, stmts);
3545 fold_stmt (gsi);
3547 return true;
3550 /* If the format is "%s", use strcpy if the result isn't used. */
3551 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3553 /* Don't crash on sprintf (str1, "%s"). */
3554 if (!orig)
3555 return false;
3557 /* Don't fold calls with source arguments of invalid (nonpointer)
3558 types. */
3559 if (!POINTER_TYPE_P (TREE_TYPE (orig)))
3560 return false;
3562 tree orig_len = NULL_TREE;
3563 if (gimple_call_lhs (stmt))
3565 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3566 if (!orig_len)
3567 return false;
3570 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3571 gimple_seq stmts = NULL;
3572 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3574 /* Propagate the NO_WARNING bit to avoid issuing the same
3575 warning more than once. */
3576 copy_warning (repl, stmt);
3578 gimple_seq_add_stmt_without_update (&stmts, repl);
3579 if (tree lhs = gimple_call_lhs (stmt))
3581 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3582 TREE_TYPE (orig_len)))
3583 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3584 repl = gimple_build_assign (lhs, orig_len);
3585 gimple_seq_add_stmt_without_update (&stmts, repl);
3586 gsi_replace_with_seq_vops (gsi, stmts);
3587 /* gsi now points at the assignment to the lhs, get a
3588 stmt iterator to the memcpy call.
3589 ??? We can't use gsi_for_stmt as that doesn't work when the
3590 CFG isn't built yet. */
3591 gimple_stmt_iterator gsi2 = *gsi;
3592 gsi_prev (&gsi2);
3593 fold_stmt (&gsi2);
3595 else
3597 gsi_replace_with_seq_vops (gsi, stmts);
3598 fold_stmt (gsi);
3600 return true;
3602 return false;
3605 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3606 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3607 attempt to simplify calls with more than 4 arguments.
3609 Return true if simplification was possible, otherwise false. */
3611 bool
3612 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3614 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3615 tree dest = gimple_call_arg (stmt, 0);
3616 tree destsize = gimple_call_arg (stmt, 1);
3617 tree fmt = gimple_call_arg (stmt, 2);
3618 tree orig = NULL_TREE;
3619 const char *fmt_str = NULL;
3621 if (gimple_call_num_args (stmt) > 4)
3622 return false;
3624 if (gimple_call_num_args (stmt) == 4)
3625 orig = gimple_call_arg (stmt, 3);
3627 /* Check whether the format is a literal string constant. */
3628 fmt_str = c_getstr (fmt);
3629 if (fmt_str == NULL)
3630 return false;
3632 if (!init_target_chars ())
3633 return false;
3635 /* If the format doesn't contain % args or %%, use strcpy. */
3636 if (strchr (fmt_str, target_percent) == NULL)
3638 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3639 if (!fn)
3640 return false;
3642 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3643 if (orig)
3644 return false;
3646 tree len = build_int_cstu (TREE_TYPE (destsize), strlen (fmt_str));
3648 /* We could expand this as
3649 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3650 or to
3651 memcpy (str, fmt_with_nul_at_cstm1, cst);
3652 but in the former case that might increase code size
3653 and in the latter case grow .rodata section too much.
3654 So punt for now. */
3655 if (!known_lower (stmt, len, destsize, true))
3656 return false;
3658 gimple_seq stmts = NULL;
3659 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3660 gimple_seq_add_stmt_without_update (&stmts, repl);
3661 if (tree lhs = gimple_call_lhs (stmt))
3663 repl = gimple_build_assign (lhs,
3664 fold_convert (TREE_TYPE (lhs), len));
3665 gimple_seq_add_stmt_without_update (&stmts, repl);
3666 gsi_replace_with_seq_vops (gsi, stmts);
3667 /* gsi now points at the assignment to the lhs, get a
3668 stmt iterator to the memcpy call.
3669 ??? We can't use gsi_for_stmt as that doesn't work when the
3670 CFG isn't built yet. */
3671 gimple_stmt_iterator gsi2 = *gsi;
3672 gsi_prev (&gsi2);
3673 fold_stmt (&gsi2);
3675 else
3677 gsi_replace_with_seq_vops (gsi, stmts);
3678 fold_stmt (gsi);
3680 return true;
3683 /* If the format is "%s", use strcpy if the result isn't used. */
3684 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3686 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3687 if (!fn)
3688 return false;
3690 /* Don't crash on snprintf (str1, cst, "%s"). */
3691 if (!orig)
3692 return false;
3694 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3696 /* We could expand this as
3697 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3698 or to
3699 memcpy (str1, str2_with_nul_at_cstm1, cst);
3700 but in the former case that might increase code size
3701 and in the latter case grow .rodata section too much.
3702 So punt for now. */
3703 if (!known_lower (stmt, orig_len, destsize, true))
3704 return false;
3706 /* Convert snprintf (str1, cst, "%s", str2) into
3707 strcpy (str1, str2) if strlen (str2) < cst. */
3708 gimple_seq stmts = NULL;
3709 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3710 gimple_seq_add_stmt_without_update (&stmts, repl);
3711 if (tree lhs = gimple_call_lhs (stmt))
3713 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3714 TREE_TYPE (orig_len)))
3715 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3716 repl = gimple_build_assign (lhs, orig_len);
3717 gimple_seq_add_stmt_without_update (&stmts, repl);
3718 gsi_replace_with_seq_vops (gsi, stmts);
3719 /* gsi now points at the assignment to the lhs, get a
3720 stmt iterator to the memcpy call.
3721 ??? We can't use gsi_for_stmt as that doesn't work when the
3722 CFG isn't built yet. */
3723 gimple_stmt_iterator gsi2 = *gsi;
3724 gsi_prev (&gsi2);
3725 fold_stmt (&gsi2);
3727 else
3729 gsi_replace_with_seq_vops (gsi, stmts);
3730 fold_stmt (gsi);
3732 return true;
3734 return false;
3737 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3738 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3739 more than 3 arguments, and ARG may be null in the 2-argument case.
3741 Return NULL_TREE if no simplification was possible, otherwise return the
3742 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3743 code of the function to be simplified. */
3745 static bool
3746 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3747 tree fp, tree fmt, tree arg,
3748 enum built_in_function fcode)
3750 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3751 tree fn_fputc, fn_fputs;
3752 const char *fmt_str = NULL;
3754 /* If the return value is used, don't do the transformation. */
3755 if (gimple_call_lhs (stmt) != NULL_TREE)
3756 return false;
3758 /* Check whether the format is a literal string constant. */
3759 fmt_str = c_getstr (fmt);
3760 if (fmt_str == NULL)
3761 return false;
3763 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3765 /* If we're using an unlocked function, assume the other
3766 unlocked functions exist explicitly. */
3767 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3768 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3770 else
3772 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3773 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3776 if (!init_target_chars ())
3777 return false;
3779 /* If the format doesn't contain % args or %%, use strcpy. */
3780 if (strchr (fmt_str, target_percent) == NULL)
3782 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3783 && arg)
3784 return false;
3786 /* If the format specifier was "", fprintf does nothing. */
3787 if (fmt_str[0] == '\0')
3789 replace_call_with_value (gsi, NULL_TREE);
3790 return true;
3793 /* When "string" doesn't contain %, replace all cases of
3794 fprintf (fp, string) with fputs (string, fp). The fputs
3795 builtin will take care of special cases like length == 1. */
3796 if (fn_fputs)
3798 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3799 replace_call_with_call_and_fold (gsi, repl);
3800 return true;
3804 /* The other optimizations can be done only on the non-va_list variants. */
3805 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3806 return false;
3808 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3809 else if (strcmp (fmt_str, target_percent_s) == 0)
3811 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3812 return false;
3813 if (fn_fputs)
3815 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3816 replace_call_with_call_and_fold (gsi, repl);
3817 return true;
3821 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3822 else if (strcmp (fmt_str, target_percent_c) == 0)
3824 if (!arg
3825 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3826 return false;
3827 if (fn_fputc)
3829 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3830 replace_call_with_call_and_fold (gsi, repl);
3831 return true;
3835 return false;
3838 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3839 FMT and ARG are the arguments to the call; we don't fold cases with
3840 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3842 Return NULL_TREE if no simplification was possible, otherwise return the
3843 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3844 code of the function to be simplified. */
3846 static bool
3847 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3848 tree arg, enum built_in_function fcode)
3850 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3851 tree fn_putchar, fn_puts, newarg;
3852 const char *fmt_str = NULL;
3854 /* If the return value is used, don't do the transformation. */
3855 if (gimple_call_lhs (stmt) != NULL_TREE)
3856 return false;
3858 /* Check whether the format is a literal string constant. */
3859 fmt_str = c_getstr (fmt);
3860 if (fmt_str == NULL)
3861 return false;
3863 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3865 /* If we're using an unlocked function, assume the other
3866 unlocked functions exist explicitly. */
3867 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3868 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3870 else
3872 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3873 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3876 if (!init_target_chars ())
3877 return false;
3879 if (strcmp (fmt_str, target_percent_s) == 0
3880 || strchr (fmt_str, target_percent) == NULL)
3882 const char *str;
3884 if (strcmp (fmt_str, target_percent_s) == 0)
3886 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3887 return false;
3889 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3890 return false;
3892 str = c_getstr (arg);
3893 if (str == NULL)
3894 return false;
3896 else
3898 /* The format specifier doesn't contain any '%' characters. */
3899 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3900 && arg)
3901 return false;
3902 str = fmt_str;
3905 /* If the string was "", printf does nothing. */
3906 if (str[0] == '\0')
3908 replace_call_with_value (gsi, NULL_TREE);
3909 return true;
3912 /* If the string has length of 1, call putchar. */
3913 if (str[1] == '\0')
3915 /* Given printf("c"), (where c is any one character,)
3916 convert "c"[0] to an int and pass that to the replacement
3917 function. */
3918 newarg = build_int_cst (integer_type_node, str[0]);
3919 if (fn_putchar)
3921 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3922 replace_call_with_call_and_fold (gsi, repl);
3923 return true;
3926 else
3928 /* If the string was "string\n", call puts("string"). */
3929 size_t len = strlen (str);
3930 if ((unsigned char)str[len - 1] == target_newline
3931 && (size_t) (int) len == len
3932 && (int) len > 0)
3934 char *newstr;
3936 /* Create a NUL-terminated string that's one char shorter
3937 than the original, stripping off the trailing '\n'. */
3938 newstr = xstrdup (str);
3939 newstr[len - 1] = '\0';
3940 newarg = build_string_literal (len, newstr);
3941 free (newstr);
3942 if (fn_puts)
3944 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3945 replace_call_with_call_and_fold (gsi, repl);
3946 return true;
3949 else
3950 /* We'd like to arrange to call fputs(string,stdout) here,
3951 but we need stdout and don't have a way to get it yet. */
3952 return false;
3956 /* The other optimizations can be done only on the non-va_list variants. */
3957 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3958 return false;
3960 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3961 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3963 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3964 return false;
3965 if (fn_puts)
3967 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3968 replace_call_with_call_and_fold (gsi, repl);
3969 return true;
3973 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3974 else if (strcmp (fmt_str, target_percent_c) == 0)
3976 if (!arg || ! useless_type_conversion_p (integer_type_node,
3977 TREE_TYPE (arg)))
3978 return false;
3979 if (fn_putchar)
3981 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3982 replace_call_with_call_and_fold (gsi, repl);
3983 return true;
3987 return false;
3992 /* Fold a call to __builtin_strlen with known length LEN. */
3994 static bool
3995 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3997 gimple *stmt = gsi_stmt (*gsi);
3998 tree arg = gimple_call_arg (stmt, 0);
4000 wide_int minlen;
4001 wide_int maxlen;
4003 c_strlen_data lendata = { };
4004 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
4005 && !lendata.decl
4006 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
4007 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
4009 /* The range of lengths refers to either a single constant
4010 string or to the longest and shortest constant string
4011 referenced by the argument of the strlen() call, or to
4012 the strings that can possibly be stored in the arrays
4013 the argument refers to. */
4014 minlen = wi::to_wide (lendata.minlen);
4015 maxlen = wi::to_wide (lendata.maxlen);
4017 else
4019 unsigned prec = TYPE_PRECISION (sizetype);
4021 minlen = wi::shwi (0, prec);
4022 maxlen = wi::to_wide (max_object_size (), prec) - 2;
4025 if (minlen == maxlen)
4027 /* Fold the strlen call to a constant. */
4028 tree type = TREE_TYPE (lendata.minlen);
4029 tree len = force_gimple_operand_gsi (gsi,
4030 wide_int_to_tree (type, minlen),
4031 true, NULL, true, GSI_SAME_STMT);
4032 replace_call_with_value (gsi, len);
4033 return true;
4036 /* Set the strlen() range to [0, MAXLEN]. */
4037 if (tree lhs = gimple_call_lhs (stmt))
4038 set_strlen_range (lhs, minlen, maxlen);
4040 return false;
4043 /* Fold a call to __builtin_acc_on_device. */
4045 static bool
4046 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
4048 /* Defer folding until we know which compiler we're in. */
4049 if (symtab->state != EXPANSION)
4050 return false;
4052 unsigned val_host = GOMP_DEVICE_HOST;
4053 unsigned val_dev = GOMP_DEVICE_NONE;
4055 #ifdef ACCEL_COMPILER
4056 val_host = GOMP_DEVICE_NOT_HOST;
4057 val_dev = ACCEL_COMPILER_acc_device;
4058 #endif
4060 location_t loc = gimple_location (gsi_stmt (*gsi));
4062 tree host_eq = make_ssa_name (boolean_type_node);
4063 gimple *host_ass = gimple_build_assign
4064 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
4065 gimple_set_location (host_ass, loc);
4066 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
4068 tree dev_eq = make_ssa_name (boolean_type_node);
4069 gimple *dev_ass = gimple_build_assign
4070 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
4071 gimple_set_location (dev_ass, loc);
4072 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
4074 tree result = make_ssa_name (boolean_type_node);
4075 gimple *result_ass = gimple_build_assign
4076 (result, BIT_IOR_EXPR, host_eq, dev_eq);
4077 gimple_set_location (result_ass, loc);
4078 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
4080 replace_call_with_value (gsi, result);
4082 return true;
4085 /* Fold realloc (0, n) -> malloc (n). */
4087 static bool
4088 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
4090 gimple *stmt = gsi_stmt (*gsi);
4091 tree arg = gimple_call_arg (stmt, 0);
4092 tree size = gimple_call_arg (stmt, 1);
4094 if (operand_equal_p (arg, null_pointer_node, 0))
4096 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
4097 if (fn_malloc)
4099 gcall *repl = gimple_build_call (fn_malloc, 1, size);
4100 replace_call_with_call_and_fold (gsi, repl);
4101 return true;
4104 return false;
4107 /* Number of bytes into which any type but aggregate or vector types
4108 should fit. */
4109 static constexpr size_t clear_padding_unit
4110 = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
4111 /* Buffer size on which __builtin_clear_padding folding code works. */
4112 static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
4114 /* Data passed through __builtin_clear_padding folding. */
4115 struct clear_padding_struct {
4116 location_t loc;
4117 /* 0 during __builtin_clear_padding folding, nonzero during
4118 clear_type_padding_in_mask. In that case, instead of clearing the
4119 non-padding bits in union_ptr array clear the padding bits in there. */
4120 bool clear_in_mask;
4121 tree base;
4122 tree alias_type;
4123 gimple_stmt_iterator *gsi;
4124 /* Alignment of buf->base + 0. */
4125 unsigned align;
4126 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4127 HOST_WIDE_INT off;
4128 /* Number of padding bytes before buf->off that don't have padding clear
4129 code emitted yet. */
4130 HOST_WIDE_INT padding_bytes;
4131 /* The size of the whole object. Never emit code to touch
4132 buf->base + buf->sz or following bytes. */
4133 HOST_WIDE_INT sz;
4134 /* Number of bytes recorded in buf->buf. */
4135 size_t size;
4136 /* When inside union, instead of emitting code we and bits inside of
4137 the union_ptr array. */
4138 unsigned char *union_ptr;
4139 /* Set bits mean padding bits that need to be cleared by the builtin. */
4140 unsigned char buf[clear_padding_buf_size + clear_padding_unit];
4143 /* Emit code to clear padding requested in BUF->buf - set bits
4144 in there stand for padding that should be cleared. FULL is true
4145 if everything from the buffer should be flushed, otherwise
4146 it can leave up to 2 * clear_padding_unit bytes for further
4147 processing. */
4149 static void
4150 clear_padding_flush (clear_padding_struct *buf, bool full)
4152 gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
4153 if (!full && buf->size < 2 * clear_padding_unit)
4154 return;
4155 gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
4156 size_t end = buf->size;
4157 if (!full)
4158 end = ((end - clear_padding_unit - 1) / clear_padding_unit
4159 * clear_padding_unit);
4160 size_t padding_bytes = buf->padding_bytes;
4161 if (buf->union_ptr)
4163 if (buf->clear_in_mask)
4165 /* During clear_type_padding_in_mask, clear the padding
4166 bits set in buf->buf in the buf->union_ptr mask. */
4167 for (size_t i = 0; i < end; i++)
4169 if (buf->buf[i] == (unsigned char) ~0)
4170 padding_bytes++;
4171 else
4173 memset (&buf->union_ptr[buf->off + i - padding_bytes],
4174 0, padding_bytes);
4175 padding_bytes = 0;
4176 buf->union_ptr[buf->off + i] &= ~buf->buf[i];
4179 if (full)
4181 memset (&buf->union_ptr[buf->off + end - padding_bytes],
4182 0, padding_bytes);
4183 buf->off = 0;
4184 buf->size = 0;
4185 buf->padding_bytes = 0;
4187 else
4189 memmove (buf->buf, buf->buf + end, buf->size - end);
4190 buf->off += end;
4191 buf->size -= end;
4192 buf->padding_bytes = padding_bytes;
4194 return;
4196 /* Inside of a union, instead of emitting any code, instead
4197 clear all bits in the union_ptr buffer that are clear
4198 in buf. Whole padding bytes don't clear anything. */
4199 for (size_t i = 0; i < end; i++)
4201 if (buf->buf[i] == (unsigned char) ~0)
4202 padding_bytes++;
4203 else
4205 padding_bytes = 0;
4206 buf->union_ptr[buf->off + i] &= buf->buf[i];
4209 if (full)
4211 buf->off = 0;
4212 buf->size = 0;
4213 buf->padding_bytes = 0;
4215 else
4217 memmove (buf->buf, buf->buf + end, buf->size - end);
4218 buf->off += end;
4219 buf->size -= end;
4220 buf->padding_bytes = padding_bytes;
4222 return;
4224 size_t wordsize = UNITS_PER_WORD;
4225 for (size_t i = 0; i < end; i += wordsize)
4227 size_t nonzero_first = wordsize;
4228 size_t nonzero_last = 0;
4229 size_t zero_first = wordsize;
4230 size_t zero_last = 0;
4231 bool all_ones = true, bytes_only = true;
4232 if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4233 > (unsigned HOST_WIDE_INT) buf->sz)
4235 gcc_assert (wordsize > 1);
4236 wordsize /= 2;
4237 i -= wordsize;
4238 continue;
4240 for (size_t j = i; j < i + wordsize && j < end; j++)
4242 if (buf->buf[j])
4244 if (nonzero_first == wordsize)
4246 nonzero_first = j - i;
4247 nonzero_last = j - i;
4249 if (nonzero_last != j - i)
4250 all_ones = false;
4251 nonzero_last = j + 1 - i;
4253 else
4255 if (zero_first == wordsize)
4256 zero_first = j - i;
4257 zero_last = j + 1 - i;
4259 if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4261 all_ones = false;
4262 bytes_only = false;
4265 size_t padding_end = i;
4266 if (padding_bytes)
4268 if (nonzero_first == 0
4269 && nonzero_last == wordsize
4270 && all_ones)
4272 /* All bits are padding and we had some padding
4273 before too. Just extend it. */
4274 padding_bytes += wordsize;
4275 continue;
4277 if (all_ones && nonzero_first == 0)
4279 padding_bytes += nonzero_last;
4280 padding_end += nonzero_last;
4281 nonzero_first = wordsize;
4282 nonzero_last = 0;
4284 else if (bytes_only && nonzero_first == 0)
4286 gcc_assert (zero_first && zero_first != wordsize);
4287 padding_bytes += zero_first;
4288 padding_end += zero_first;
4290 tree atype, src;
4291 if (padding_bytes == 1)
4293 atype = char_type_node;
4294 src = build_zero_cst (char_type_node);
4296 else
4298 atype = build_array_type_nelts (char_type_node, padding_bytes);
4299 src = build_constructor (atype, NULL);
4301 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4302 build_int_cst (buf->alias_type,
4303 buf->off + padding_end
4304 - padding_bytes));
4305 gimple *g = gimple_build_assign (dst, src);
4306 gimple_set_location (g, buf->loc);
4307 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4308 padding_bytes = 0;
4309 buf->padding_bytes = 0;
4311 if (nonzero_first == wordsize)
4312 /* All bits in a word are 0, there are no padding bits. */
4313 continue;
4314 if (all_ones && nonzero_last == wordsize)
4316 /* All bits between nonzero_first and end of word are padding
4317 bits, start counting padding_bytes. */
4318 padding_bytes = nonzero_last - nonzero_first;
4319 continue;
4321 if (bytes_only)
4323 /* If bitfields aren't involved in this word, prefer storing
4324 individual bytes or groups of them over performing a RMW
4325 operation on the whole word. */
4326 gcc_assert (i + zero_last <= end);
4327 for (size_t j = padding_end; j < i + zero_last; j++)
4329 if (buf->buf[j])
4331 size_t k;
4332 for (k = j; k < i + zero_last; k++)
4333 if (buf->buf[k] == 0)
4334 break;
4335 HOST_WIDE_INT off = buf->off + j;
4336 tree atype, src;
4337 if (k - j == 1)
4339 atype = char_type_node;
4340 src = build_zero_cst (char_type_node);
4342 else
4344 atype = build_array_type_nelts (char_type_node, k - j);
4345 src = build_constructor (atype, NULL);
4347 tree dst = build2_loc (buf->loc, MEM_REF, atype,
4348 buf->base,
4349 build_int_cst (buf->alias_type, off));
4350 gimple *g = gimple_build_assign (dst, src);
4351 gimple_set_location (g, buf->loc);
4352 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4353 j = k;
4356 if (nonzero_last == wordsize)
4357 padding_bytes = nonzero_last - zero_last;
4358 continue;
4360 for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4362 if (nonzero_last - nonzero_first <= eltsz
4363 && ((nonzero_first & ~(eltsz - 1))
4364 == ((nonzero_last - 1) & ~(eltsz - 1))))
4366 tree type;
4367 if (eltsz == 1)
4368 type = char_type_node;
4369 else
4370 type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4372 size_t start = nonzero_first & ~(eltsz - 1);
4373 HOST_WIDE_INT off = buf->off + i + start;
4374 tree atype = type;
4375 if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4376 atype = build_aligned_type (type, buf->align);
4377 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4378 build_int_cst (buf->alias_type, off));
4379 tree src;
4380 gimple *g;
4381 if (all_ones
4382 && nonzero_first == start
4383 && nonzero_last == start + eltsz)
4384 src = build_zero_cst (type);
4385 else
4387 src = make_ssa_name (type);
4388 tree tmp_dst = unshare_expr (dst);
4389 /* The folding introduces a read from the tmp_dst, we should
4390 prevent uninitialized warning analysis from issuing warning
4391 for such fake read. In order to suppress warning only for
4392 this expr, we should set the location of tmp_dst to
4393 UNKNOWN_LOCATION first, then suppress_warning will call
4394 set_no_warning_bit to set the no_warning flag only for
4395 tmp_dst. */
4396 SET_EXPR_LOCATION (tmp_dst, UNKNOWN_LOCATION);
4397 suppress_warning (tmp_dst, OPT_Wuninitialized);
4398 g = gimple_build_assign (src, tmp_dst);
4399 gimple_set_location (g, buf->loc);
4400 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4401 tree mask = native_interpret_expr (type,
4402 buf->buf + i + start,
4403 eltsz);
4404 gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4405 mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4406 tree src_masked = make_ssa_name (type);
4407 g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4408 src, mask);
4409 gimple_set_location (g, buf->loc);
4410 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4411 src = src_masked;
4413 g = gimple_build_assign (dst, src);
4414 gimple_set_location (g, buf->loc);
4415 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4416 break;
4420 if (full)
4422 if (padding_bytes)
4424 tree atype, src;
4425 if (padding_bytes == 1)
4427 atype = char_type_node;
4428 src = build_zero_cst (char_type_node);
4430 else
4432 atype = build_array_type_nelts (char_type_node, padding_bytes);
4433 src = build_constructor (atype, NULL);
4435 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4436 build_int_cst (buf->alias_type,
4437 buf->off + end
4438 - padding_bytes));
4439 gimple *g = gimple_build_assign (dst, src);
4440 gimple_set_location (g, buf->loc);
4441 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4443 size_t end_rem = end % UNITS_PER_WORD;
4444 buf->off += end - end_rem;
4445 buf->size = end_rem;
4446 memset (buf->buf, 0, buf->size);
4447 buf->padding_bytes = 0;
4449 else
4451 memmove (buf->buf, buf->buf + end, buf->size - end);
4452 buf->off += end;
4453 buf->size -= end;
4454 buf->padding_bytes = padding_bytes;
4458 /* Append PADDING_BYTES padding bytes. */
4460 static void
4461 clear_padding_add_padding (clear_padding_struct *buf,
4462 HOST_WIDE_INT padding_bytes)
4464 if (padding_bytes == 0)
4465 return;
4466 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4467 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4468 clear_padding_flush (buf, false);
4469 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4470 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4472 memset (buf->buf + buf->size, ~0, clear_padding_buf_size - buf->size);
4473 padding_bytes -= clear_padding_buf_size - buf->size;
4474 buf->size = clear_padding_buf_size;
4475 clear_padding_flush (buf, false);
4476 gcc_assert (buf->padding_bytes);
4477 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4478 is guaranteed to be all ones. */
4479 padding_bytes += buf->size;
4480 buf->size = padding_bytes % UNITS_PER_WORD;
4481 memset (buf->buf, ~0, buf->size);
4482 buf->off += padding_bytes - buf->size;
4483 buf->padding_bytes += padding_bytes - buf->size;
4485 else
4487 memset (buf->buf + buf->size, ~0, padding_bytes);
4488 buf->size += padding_bytes;
4492 static void clear_padding_type (clear_padding_struct *, tree,
4493 HOST_WIDE_INT, bool);
4495 /* Clear padding bits of union type TYPE. */
4497 static void
4498 clear_padding_union (clear_padding_struct *buf, tree type,
4499 HOST_WIDE_INT sz, bool for_auto_init)
4501 clear_padding_struct *union_buf;
4502 HOST_WIDE_INT start_off = 0, next_off = 0;
4503 size_t start_size = 0;
4504 if (buf->union_ptr)
4506 start_off = buf->off + buf->size;
4507 next_off = start_off + sz;
4508 start_size = start_off % UNITS_PER_WORD;
4509 start_off -= start_size;
4510 clear_padding_flush (buf, true);
4511 union_buf = buf;
4513 else
4515 if (sz + buf->size > clear_padding_buf_size)
4516 clear_padding_flush (buf, false);
4517 union_buf = XALLOCA (clear_padding_struct);
4518 union_buf->loc = buf->loc;
4519 union_buf->clear_in_mask = buf->clear_in_mask;
4520 union_buf->base = NULL_TREE;
4521 union_buf->alias_type = NULL_TREE;
4522 union_buf->gsi = NULL;
4523 union_buf->align = 0;
4524 union_buf->off = 0;
4525 union_buf->padding_bytes = 0;
4526 union_buf->sz = sz;
4527 union_buf->size = 0;
4528 if (sz + buf->size <= clear_padding_buf_size)
4529 union_buf->union_ptr = buf->buf + buf->size;
4530 else
4531 union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4532 memset (union_buf->union_ptr, ~0, sz);
4535 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4536 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4538 if (DECL_SIZE_UNIT (field) == NULL_TREE)
4540 if (TREE_TYPE (field) == error_mark_node)
4541 continue;
4542 gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4543 && !COMPLETE_TYPE_P (TREE_TYPE (field)));
4544 if (!buf->clear_in_mask && !for_auto_init)
4545 error_at (buf->loc, "flexible array member %qD does not have "
4546 "well defined padding bits for %qs",
4547 field, "__builtin_clear_padding");
4548 continue;
4550 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4551 gcc_assert (union_buf->size == 0);
4552 union_buf->off = start_off;
4553 union_buf->size = start_size;
4554 memset (union_buf->buf, ~0, start_size);
4555 clear_padding_type (union_buf, TREE_TYPE (field), fldsz, for_auto_init);
4556 clear_padding_add_padding (union_buf, sz - fldsz);
4557 clear_padding_flush (union_buf, true);
4560 if (buf == union_buf)
4562 buf->off = next_off;
4563 buf->size = next_off % UNITS_PER_WORD;
4564 buf->off -= buf->size;
4565 memset (buf->buf, ~0, buf->size);
4567 else if (sz + buf->size <= clear_padding_buf_size)
4568 buf->size += sz;
4569 else
4571 unsigned char *union_ptr = union_buf->union_ptr;
4572 while (sz)
4574 clear_padding_flush (buf, false);
4575 HOST_WIDE_INT this_sz
4576 = MIN ((unsigned HOST_WIDE_INT) sz,
4577 clear_padding_buf_size - buf->size);
4578 memcpy (buf->buf + buf->size, union_ptr, this_sz);
4579 buf->size += this_sz;
4580 union_ptr += this_sz;
4581 sz -= this_sz;
4583 XDELETE (union_buf->union_ptr);
4587 /* The only known floating point formats with padding bits are the
4588 IEEE extended ones. */
4590 static bool
4591 clear_padding_real_needs_padding_p (tree type)
4593 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4594 return (fmt->b == 2
4595 && fmt->signbit_ro == fmt->signbit_rw
4596 && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4599 /* Return true if TYPE might contain any padding bits. */
4601 bool
4602 clear_padding_type_may_have_padding_p (tree type)
4604 switch (TREE_CODE (type))
4606 case RECORD_TYPE:
4607 case UNION_TYPE:
4608 return true;
4609 case ARRAY_TYPE:
4610 case COMPLEX_TYPE:
4611 case VECTOR_TYPE:
4612 return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4613 case REAL_TYPE:
4614 return clear_padding_real_needs_padding_p (type);
4615 default:
4616 return false;
4620 /* Emit a runtime loop:
4621 for (; buf.base != end; buf.base += sz)
4622 __builtin_clear_padding (buf.base); */
4624 static void
4625 clear_padding_emit_loop (clear_padding_struct *buf, tree type,
4626 tree end, bool for_auto_init)
4628 tree l1 = create_artificial_label (buf->loc);
4629 tree l2 = create_artificial_label (buf->loc);
4630 tree l3 = create_artificial_label (buf->loc);
4631 gimple *g = gimple_build_goto (l2);
4632 gimple_set_location (g, buf->loc);
4633 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4634 g = gimple_build_label (l1);
4635 gimple_set_location (g, buf->loc);
4636 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4637 clear_padding_type (buf, type, buf->sz, for_auto_init);
4638 clear_padding_flush (buf, true);
4639 g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4640 size_int (buf->sz));
4641 gimple_set_location (g, buf->loc);
4642 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4643 g = gimple_build_label (l2);
4644 gimple_set_location (g, buf->loc);
4645 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4646 g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4647 gimple_set_location (g, buf->loc);
4648 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4649 g = gimple_build_label (l3);
4650 gimple_set_location (g, buf->loc);
4651 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4654 /* Clear padding bits for TYPE. Called recursively from
4655 gimple_fold_builtin_clear_padding. If FOR_AUTO_INIT is true,
4656 the __builtin_clear_padding is not called by the end user,
4657 instead, it's inserted by the compiler to initialize the
4658 paddings of automatic variable. Therefore, we should not
4659 emit the error messages for flexible array members to confuse
4660 the end user. */
4662 static void
4663 clear_padding_type (clear_padding_struct *buf, tree type,
4664 HOST_WIDE_INT sz, bool for_auto_init)
4666 switch (TREE_CODE (type))
4668 case RECORD_TYPE:
4669 HOST_WIDE_INT cur_pos;
4670 cur_pos = 0;
4671 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4672 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4674 tree ftype = TREE_TYPE (field);
4675 if (DECL_BIT_FIELD (field))
4677 HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
4678 if (fldsz == 0)
4679 continue;
4680 HOST_WIDE_INT pos = int_byte_position (field);
4681 if (pos >= sz)
4682 continue;
4683 HOST_WIDE_INT bpos
4684 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4685 bpos %= BITS_PER_UNIT;
4686 HOST_WIDE_INT end
4687 = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4688 if (pos + end > cur_pos)
4690 clear_padding_add_padding (buf, pos + end - cur_pos);
4691 cur_pos = pos + end;
4693 gcc_assert (cur_pos > pos
4694 && ((unsigned HOST_WIDE_INT) buf->size
4695 >= (unsigned HOST_WIDE_INT) cur_pos - pos));
4696 unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4697 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4698 sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4699 " in %qs", "__builtin_clear_padding");
4700 else if (BYTES_BIG_ENDIAN)
4702 /* Big endian. */
4703 if (bpos + fldsz <= BITS_PER_UNIT)
4704 *p &= ~(((1 << fldsz) - 1)
4705 << (BITS_PER_UNIT - bpos - fldsz));
4706 else
4708 if (bpos)
4710 *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4711 p++;
4712 fldsz -= BITS_PER_UNIT - bpos;
4714 memset (p, 0, fldsz / BITS_PER_UNIT);
4715 p += fldsz / BITS_PER_UNIT;
4716 fldsz %= BITS_PER_UNIT;
4717 if (fldsz)
4718 *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4721 else
4723 /* Little endian. */
4724 if (bpos + fldsz <= BITS_PER_UNIT)
4725 *p &= ~(((1 << fldsz) - 1) << bpos);
4726 else
4728 if (bpos)
4730 *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4731 p++;
4732 fldsz -= BITS_PER_UNIT - bpos;
4734 memset (p, 0, fldsz / BITS_PER_UNIT);
4735 p += fldsz / BITS_PER_UNIT;
4736 fldsz %= BITS_PER_UNIT;
4737 if (fldsz)
4738 *p &= ~((1 << fldsz) - 1);
4742 else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4744 if (ftype == error_mark_node)
4745 continue;
4746 gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4747 && !COMPLETE_TYPE_P (ftype));
4748 if (!buf->clear_in_mask && !for_auto_init)
4749 error_at (buf->loc, "flexible array member %qD does not "
4750 "have well defined padding bits for %qs",
4751 field, "__builtin_clear_padding");
4753 else if (is_empty_type (ftype))
4754 continue;
4755 else
4757 HOST_WIDE_INT pos = int_byte_position (field);
4758 if (pos >= sz)
4759 continue;
4760 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4761 gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4762 clear_padding_add_padding (buf, pos - cur_pos);
4763 cur_pos = pos;
4764 if (tree asbase = lang_hooks.types.classtype_as_base (field))
4765 ftype = asbase;
4766 clear_padding_type (buf, ftype, fldsz, for_auto_init);
4767 cur_pos += fldsz;
4770 gcc_assert (sz >= cur_pos);
4771 clear_padding_add_padding (buf, sz - cur_pos);
4772 break;
4773 case ARRAY_TYPE:
4774 HOST_WIDE_INT nelts, fldsz;
4775 fldsz = int_size_in_bytes (TREE_TYPE (type));
4776 if (fldsz == 0)
4777 break;
4778 nelts = sz / fldsz;
4779 if (nelts > 1
4780 && sz > 8 * UNITS_PER_WORD
4781 && buf->union_ptr == NULL
4782 && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4784 /* For sufficiently large array of more than one elements,
4785 emit a runtime loop to keep code size manageable. */
4786 tree base = buf->base;
4787 unsigned int prev_align = buf->align;
4788 HOST_WIDE_INT off = buf->off + buf->size;
4789 HOST_WIDE_INT prev_sz = buf->sz;
4790 clear_padding_flush (buf, true);
4791 tree elttype = TREE_TYPE (type);
4792 buf->base = create_tmp_var (build_pointer_type (elttype));
4793 tree end = make_ssa_name (TREE_TYPE (buf->base));
4794 gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4795 base, size_int (off));
4796 gimple_set_location (g, buf->loc);
4797 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4798 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4799 size_int (sz));
4800 gimple_set_location (g, buf->loc);
4801 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4802 buf->sz = fldsz;
4803 buf->align = TYPE_ALIGN (elttype);
4804 buf->off = 0;
4805 buf->size = 0;
4806 clear_padding_emit_loop (buf, elttype, end, for_auto_init);
4807 buf->base = base;
4808 buf->sz = prev_sz;
4809 buf->align = prev_align;
4810 buf->size = off % UNITS_PER_WORD;
4811 buf->off = off - buf->size;
4812 memset (buf->buf, 0, buf->size);
4813 break;
4815 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4816 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4817 break;
4818 case UNION_TYPE:
4819 clear_padding_union (buf, type, sz, for_auto_init);
4820 break;
4821 case REAL_TYPE:
4822 gcc_assert ((size_t) sz <= clear_padding_unit);
4823 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4824 clear_padding_flush (buf, false);
4825 if (clear_padding_real_needs_padding_p (type))
4827 /* Use native_interpret_real + native_encode_expr to figure out
4828 which bits are padding. */
4829 memset (buf->buf + buf->size, ~0, sz);
4830 tree cst = native_interpret_real (type, buf->buf + buf->size, sz);
4831 gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
4832 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4833 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4834 for (size_t i = 0; i < (size_t) sz; i++)
4835 buf->buf[buf->size + i] ^= ~0;
4837 else
4838 memset (buf->buf + buf->size, 0, sz);
4839 buf->size += sz;
4840 break;
4841 case COMPLEX_TYPE:
4842 fldsz = int_size_in_bytes (TREE_TYPE (type));
4843 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4844 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4845 break;
4846 case VECTOR_TYPE:
4847 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
4848 fldsz = int_size_in_bytes (TREE_TYPE (type));
4849 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4850 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4851 break;
4852 case NULLPTR_TYPE:
4853 gcc_assert ((size_t) sz <= clear_padding_unit);
4854 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4855 clear_padding_flush (buf, false);
4856 memset (buf->buf + buf->size, ~0, sz);
4857 buf->size += sz;
4858 break;
4859 default:
4860 gcc_assert ((size_t) sz <= clear_padding_unit);
4861 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4862 clear_padding_flush (buf, false);
4863 memset (buf->buf + buf->size, 0, sz);
4864 buf->size += sz;
4865 break;
4869 /* Clear padding bits of TYPE in MASK. */
4871 void
4872 clear_type_padding_in_mask (tree type, unsigned char *mask)
4874 clear_padding_struct buf;
4875 buf.loc = UNKNOWN_LOCATION;
4876 buf.clear_in_mask = true;
4877 buf.base = NULL_TREE;
4878 buf.alias_type = NULL_TREE;
4879 buf.gsi = NULL;
4880 buf.align = 0;
4881 buf.off = 0;
4882 buf.padding_bytes = 0;
4883 buf.sz = int_size_in_bytes (type);
4884 buf.size = 0;
4885 buf.union_ptr = mask;
4886 clear_padding_type (&buf, type, buf.sz, false);
4887 clear_padding_flush (&buf, true);
4890 /* Fold __builtin_clear_padding builtin. */
4892 static bool
4893 gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
4895 gimple *stmt = gsi_stmt (*gsi);
4896 gcc_assert (gimple_call_num_args (stmt) == 2);
4897 tree ptr = gimple_call_arg (stmt, 0);
4898 tree typearg = gimple_call_arg (stmt, 1);
4899 /* The 2nd argument of __builtin_clear_padding's value is used to
4900 distinguish whether this call is made by the user or by the compiler
4901 for automatic variable initialization. */
4902 bool for_auto_init = (bool) TREE_INT_CST_LOW (typearg);
4903 tree type = TREE_TYPE (TREE_TYPE (typearg));
4904 location_t loc = gimple_location (stmt);
4905 clear_padding_struct buf;
4906 gimple_stmt_iterator gsiprev = *gsi;
4907 /* This should be folded during the lower pass. */
4908 gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
4909 gcc_assert (COMPLETE_TYPE_P (type));
4910 gsi_prev (&gsiprev);
4912 buf.loc = loc;
4913 buf.clear_in_mask = false;
4914 buf.base = ptr;
4915 buf.alias_type = NULL_TREE;
4916 buf.gsi = gsi;
4917 buf.align = get_pointer_alignment (ptr);
4918 unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
4919 buf.align = MAX (buf.align, talign);
4920 buf.off = 0;
4921 buf.padding_bytes = 0;
4922 buf.size = 0;
4923 buf.sz = int_size_in_bytes (type);
4924 buf.union_ptr = NULL;
4925 if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
4926 sorry_at (loc, "%s not supported for variable length aggregates",
4927 "__builtin_clear_padding");
4928 /* The implementation currently assumes 8-bit host and target
4929 chars which is the case for all currently supported targets
4930 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4931 else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
4932 sorry_at (loc, "%s not supported on this target",
4933 "__builtin_clear_padding");
4934 else if (!clear_padding_type_may_have_padding_p (type))
4936 else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
4938 tree sz = TYPE_SIZE_UNIT (type);
4939 tree elttype = type;
4940 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4941 while (TREE_CODE (elttype) == ARRAY_TYPE
4942 && int_size_in_bytes (elttype) < 0)
4943 elttype = TREE_TYPE (elttype);
4944 HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
4945 gcc_assert (eltsz >= 0);
4946 if (eltsz)
4948 buf.base = create_tmp_var (build_pointer_type (elttype));
4949 tree end = make_ssa_name (TREE_TYPE (buf.base));
4950 gimple *g = gimple_build_assign (buf.base, ptr);
4951 gimple_set_location (g, loc);
4952 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4953 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
4954 gimple_set_location (g, loc);
4955 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4956 buf.sz = eltsz;
4957 buf.align = TYPE_ALIGN (elttype);
4958 buf.alias_type = build_pointer_type (elttype);
4959 clear_padding_emit_loop (&buf, elttype, end, for_auto_init);
4962 else
4964 if (!is_gimple_mem_ref_addr (buf.base))
4966 buf.base = make_ssa_name (TREE_TYPE (ptr));
4967 gimple *g = gimple_build_assign (buf.base, ptr);
4968 gimple_set_location (g, loc);
4969 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4971 buf.alias_type = build_pointer_type (type);
4972 clear_padding_type (&buf, type, buf.sz, for_auto_init);
4973 clear_padding_flush (&buf, true);
4976 gimple_stmt_iterator gsiprev2 = *gsi;
4977 gsi_prev (&gsiprev2);
4978 if (gsi_stmt (gsiprev) == gsi_stmt (gsiprev2))
4979 gsi_replace (gsi, gimple_build_nop (), true);
4980 else
4982 gsi_remove (gsi, true);
4983 *gsi = gsiprev2;
4985 return true;
4988 /* Fold the non-target builtin at *GSI and return whether any simplification
4989 was made. */
4991 static bool
4992 gimple_fold_builtin (gimple_stmt_iterator *gsi)
4994 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
4995 tree callee = gimple_call_fndecl (stmt);
4997 /* Give up for always_inline inline builtins until they are
4998 inlined. */
4999 if (avoid_folding_inline_builtin (callee))
5000 return false;
5002 unsigned n = gimple_call_num_args (stmt);
5003 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
5004 switch (fcode)
5006 case BUILT_IN_BCMP:
5007 return gimple_fold_builtin_bcmp (gsi);
5008 case BUILT_IN_BCOPY:
5009 return gimple_fold_builtin_bcopy (gsi);
5010 case BUILT_IN_BZERO:
5011 return gimple_fold_builtin_bzero (gsi);
5013 case BUILT_IN_MEMSET:
5014 return gimple_fold_builtin_memset (gsi,
5015 gimple_call_arg (stmt, 1),
5016 gimple_call_arg (stmt, 2));
5017 case BUILT_IN_MEMCPY:
5018 case BUILT_IN_MEMPCPY:
5019 case BUILT_IN_MEMMOVE:
5020 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
5021 gimple_call_arg (stmt, 1), fcode);
5022 case BUILT_IN_SPRINTF_CHK:
5023 case BUILT_IN_VSPRINTF_CHK:
5024 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
5025 case BUILT_IN_STRCAT_CHK:
5026 return gimple_fold_builtin_strcat_chk (gsi);
5027 case BUILT_IN_STRNCAT_CHK:
5028 return gimple_fold_builtin_strncat_chk (gsi);
5029 case BUILT_IN_STRLEN:
5030 return gimple_fold_builtin_strlen (gsi);
5031 case BUILT_IN_STRCPY:
5032 return gimple_fold_builtin_strcpy (gsi,
5033 gimple_call_arg (stmt, 0),
5034 gimple_call_arg (stmt, 1));
5035 case BUILT_IN_STRNCPY:
5036 return gimple_fold_builtin_strncpy (gsi,
5037 gimple_call_arg (stmt, 0),
5038 gimple_call_arg (stmt, 1),
5039 gimple_call_arg (stmt, 2));
5040 case BUILT_IN_STRCAT:
5041 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
5042 gimple_call_arg (stmt, 1));
5043 case BUILT_IN_STRNCAT:
5044 return gimple_fold_builtin_strncat (gsi);
5045 case BUILT_IN_INDEX:
5046 case BUILT_IN_STRCHR:
5047 return gimple_fold_builtin_strchr (gsi, false);
5048 case BUILT_IN_RINDEX:
5049 case BUILT_IN_STRRCHR:
5050 return gimple_fold_builtin_strchr (gsi, true);
5051 case BUILT_IN_STRSTR:
5052 return gimple_fold_builtin_strstr (gsi);
5053 case BUILT_IN_STRCMP:
5054 case BUILT_IN_STRCMP_EQ:
5055 case BUILT_IN_STRCASECMP:
5056 case BUILT_IN_STRNCMP:
5057 case BUILT_IN_STRNCMP_EQ:
5058 case BUILT_IN_STRNCASECMP:
5059 return gimple_fold_builtin_string_compare (gsi);
5060 case BUILT_IN_MEMCHR:
5061 return gimple_fold_builtin_memchr (gsi);
5062 case BUILT_IN_FPUTS:
5063 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5064 gimple_call_arg (stmt, 1), false);
5065 case BUILT_IN_FPUTS_UNLOCKED:
5066 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5067 gimple_call_arg (stmt, 1), true);
5068 case BUILT_IN_MEMCPY_CHK:
5069 case BUILT_IN_MEMPCPY_CHK:
5070 case BUILT_IN_MEMMOVE_CHK:
5071 case BUILT_IN_MEMSET_CHK:
5072 return gimple_fold_builtin_memory_chk (gsi,
5073 gimple_call_arg (stmt, 0),
5074 gimple_call_arg (stmt, 1),
5075 gimple_call_arg (stmt, 2),
5076 gimple_call_arg (stmt, 3),
5077 fcode);
5078 case BUILT_IN_STPCPY:
5079 return gimple_fold_builtin_stpcpy (gsi);
5080 case BUILT_IN_STRCPY_CHK:
5081 case BUILT_IN_STPCPY_CHK:
5082 return gimple_fold_builtin_stxcpy_chk (gsi,
5083 gimple_call_arg (stmt, 0),
5084 gimple_call_arg (stmt, 1),
5085 gimple_call_arg (stmt, 2),
5086 fcode);
5087 case BUILT_IN_STRNCPY_CHK:
5088 case BUILT_IN_STPNCPY_CHK:
5089 return gimple_fold_builtin_stxncpy_chk (gsi,
5090 gimple_call_arg (stmt, 0),
5091 gimple_call_arg (stmt, 1),
5092 gimple_call_arg (stmt, 2),
5093 gimple_call_arg (stmt, 3),
5094 fcode);
5095 case BUILT_IN_SNPRINTF_CHK:
5096 case BUILT_IN_VSNPRINTF_CHK:
5097 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
5099 case BUILT_IN_FPRINTF:
5100 case BUILT_IN_FPRINTF_UNLOCKED:
5101 case BUILT_IN_VFPRINTF:
5102 if (n == 2 || n == 3)
5103 return gimple_fold_builtin_fprintf (gsi,
5104 gimple_call_arg (stmt, 0),
5105 gimple_call_arg (stmt, 1),
5106 n == 3
5107 ? gimple_call_arg (stmt, 2)
5108 : NULL_TREE,
5109 fcode);
5110 break;
5111 case BUILT_IN_FPRINTF_CHK:
5112 case BUILT_IN_VFPRINTF_CHK:
5113 if (n == 3 || n == 4)
5114 return gimple_fold_builtin_fprintf (gsi,
5115 gimple_call_arg (stmt, 0),
5116 gimple_call_arg (stmt, 2),
5117 n == 4
5118 ? gimple_call_arg (stmt, 3)
5119 : NULL_TREE,
5120 fcode);
5121 break;
5122 case BUILT_IN_PRINTF:
5123 case BUILT_IN_PRINTF_UNLOCKED:
5124 case BUILT_IN_VPRINTF:
5125 if (n == 1 || n == 2)
5126 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
5127 n == 2
5128 ? gimple_call_arg (stmt, 1)
5129 : NULL_TREE, fcode);
5130 break;
5131 case BUILT_IN_PRINTF_CHK:
5132 case BUILT_IN_VPRINTF_CHK:
5133 if (n == 2 || n == 3)
5134 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
5135 n == 3
5136 ? gimple_call_arg (stmt, 2)
5137 : NULL_TREE, fcode);
5138 break;
5139 case BUILT_IN_ACC_ON_DEVICE:
5140 return gimple_fold_builtin_acc_on_device (gsi,
5141 gimple_call_arg (stmt, 0));
5142 case BUILT_IN_REALLOC:
5143 return gimple_fold_builtin_realloc (gsi);
5145 case BUILT_IN_CLEAR_PADDING:
5146 return gimple_fold_builtin_clear_padding (gsi);
5148 default:;
5151 /* Try the generic builtin folder. */
5152 bool ignore = (gimple_call_lhs (stmt) == NULL);
5153 tree result = fold_call_stmt (stmt, ignore);
5154 if (result)
5156 if (ignore)
5157 STRIP_NOPS (result);
5158 else
5159 result = fold_convert (gimple_call_return_type (stmt), result);
5160 gimplify_and_update_call_from_tree (gsi, result);
5161 return true;
5164 return false;
5167 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5168 function calls to constants, where possible. */
5170 static tree
5171 fold_internal_goacc_dim (const gimple *call)
5173 int axis = oacc_get_ifn_dim_arg (call);
5174 int size = oacc_get_fn_dim_size (current_function_decl, axis);
5175 tree result = NULL_TREE;
5176 tree type = TREE_TYPE (gimple_call_lhs (call));
5178 switch (gimple_call_internal_fn (call))
5180 case IFN_GOACC_DIM_POS:
5181 /* If the size is 1, we know the answer. */
5182 if (size == 1)
5183 result = build_int_cst (type, 0);
5184 break;
5185 case IFN_GOACC_DIM_SIZE:
5186 /* If the size is not dynamic, we know the answer. */
5187 if (size)
5188 result = build_int_cst (type, size);
5189 break;
5190 default:
5191 break;
5194 return result;
5197 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5198 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5199 &var where var is only addressable because of such calls. */
5201 bool
5202 optimize_atomic_compare_exchange_p (gimple *stmt)
5204 if (gimple_call_num_args (stmt) != 6
5205 || !flag_inline_atomics
5206 || !optimize
5207 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
5208 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5209 || !gimple_vdef (stmt)
5210 || !gimple_vuse (stmt))
5211 return false;
5213 tree fndecl = gimple_call_fndecl (stmt);
5214 switch (DECL_FUNCTION_CODE (fndecl))
5216 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
5217 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
5218 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
5219 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
5220 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
5221 break;
5222 default:
5223 return false;
5226 tree expected = gimple_call_arg (stmt, 1);
5227 if (TREE_CODE (expected) != ADDR_EXPR
5228 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
5229 return false;
5231 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
5232 if (!is_gimple_reg_type (etype)
5233 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
5234 || TREE_THIS_VOLATILE (etype)
5235 || VECTOR_TYPE_P (etype)
5236 || TREE_CODE (etype) == COMPLEX_TYPE
5237 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5238 might not preserve all the bits. See PR71716. */
5239 || SCALAR_FLOAT_TYPE_P (etype)
5240 || maybe_ne (TYPE_PRECISION (etype),
5241 GET_MODE_BITSIZE (TYPE_MODE (etype))))
5242 return false;
5244 tree weak = gimple_call_arg (stmt, 3);
5245 if (!integer_zerop (weak) && !integer_onep (weak))
5246 return false;
5248 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5249 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5250 machine_mode mode = TYPE_MODE (itype);
5252 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
5253 == CODE_FOR_nothing
5254 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5255 return false;
5257 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
5258 return false;
5260 return true;
5263 /* Fold
5264 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5265 into
5266 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5267 i = IMAGPART_EXPR <t>;
5268 r = (_Bool) i;
5269 e = REALPART_EXPR <t>; */
5271 void
5272 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5274 gimple *stmt = gsi_stmt (*gsi);
5275 tree fndecl = gimple_call_fndecl (stmt);
5276 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5277 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5278 tree ctype = build_complex_type (itype);
5279 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
5280 bool throws = false;
5281 edge e = NULL;
5282 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5283 expected);
5284 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5285 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5286 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5288 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
5289 build1 (VIEW_CONVERT_EXPR, itype,
5290 gimple_assign_lhs (g)));
5291 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5293 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
5294 + int_size_in_bytes (itype);
5295 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5296 gimple_call_arg (stmt, 0),
5297 gimple_assign_lhs (g),
5298 gimple_call_arg (stmt, 2),
5299 build_int_cst (integer_type_node, flag),
5300 gimple_call_arg (stmt, 4),
5301 gimple_call_arg (stmt, 5));
5302 tree lhs = make_ssa_name (ctype);
5303 gimple_call_set_lhs (g, lhs);
5304 gimple_move_vops (g, stmt);
5305 tree oldlhs = gimple_call_lhs (stmt);
5306 if (stmt_can_throw_internal (cfun, stmt))
5308 throws = true;
5309 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
5311 gimple_call_set_nothrow (as_a <gcall *> (g),
5312 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
5313 gimple_call_set_lhs (stmt, NULL_TREE);
5314 gsi_replace (gsi, g, true);
5315 if (oldlhs)
5317 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
5318 build1 (IMAGPART_EXPR, itype, lhs));
5319 if (throws)
5321 gsi_insert_on_edge_immediate (e, g);
5322 *gsi = gsi_for_stmt (g);
5324 else
5325 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5326 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
5327 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5329 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
5330 build1 (REALPART_EXPR, itype, lhs));
5331 if (throws && oldlhs == NULL_TREE)
5333 gsi_insert_on_edge_immediate (e, g);
5334 *gsi = gsi_for_stmt (g);
5336 else
5337 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5338 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5340 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5341 VIEW_CONVERT_EXPR,
5342 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5343 gimple_assign_lhs (g)));
5344 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5346 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
5347 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5348 *gsi = gsiret;
5351 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5352 doesn't fit into TYPE. The test for overflow should be regardless of
5353 -fwrapv, and even for unsigned types. */
5355 bool
5356 arith_overflowed_p (enum tree_code code, const_tree type,
5357 const_tree arg0, const_tree arg1)
5359 widest2_int warg0 = widest2_int_cst (arg0);
5360 widest2_int warg1 = widest2_int_cst (arg1);
5361 widest2_int wres;
5362 switch (code)
5364 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
5365 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
5366 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
5367 default: gcc_unreachable ();
5369 signop sign = TYPE_SIGN (type);
5370 if (sign == UNSIGNED && wi::neg_p (wres))
5371 return true;
5372 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
5375 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5376 for the memory it references, otherwise return null. VECTYPE is the
5377 type of the memory vector. */
5379 static tree
5380 gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
5382 tree ptr = gimple_call_arg (call, 0);
5383 tree alias_align = gimple_call_arg (call, 1);
5384 tree mask = gimple_call_arg (call, 2);
5385 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
5386 return NULL_TREE;
5388 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
5389 if (TYPE_ALIGN (vectype) != align)
5390 vectype = build_aligned_type (vectype, align);
5391 tree offset = build_zero_cst (TREE_TYPE (alias_align));
5392 return fold_build2 (MEM_REF, vectype, ptr, offset);
5395 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5397 static bool
5398 gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
5400 tree lhs = gimple_call_lhs (call);
5401 if (!lhs)
5402 return false;
5404 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
5406 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5407 gimple_set_location (new_stmt, gimple_location (call));
5408 gimple_move_vops (new_stmt, call);
5409 gsi_replace (gsi, new_stmt, false);
5410 return true;
5412 return false;
5415 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5417 static bool
5418 gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
5420 tree rhs = gimple_call_arg (call, 3);
5421 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
5423 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5424 gimple_set_location (new_stmt, gimple_location (call));
5425 gimple_move_vops (new_stmt, call);
5426 gsi_replace (gsi, new_stmt, false);
5427 return true;
5429 return false;
5432 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5433 The statement may be replaced by another statement, e.g., if the call
5434 simplifies to a constant value. Return true if any changes were made.
5435 It is assumed that the operands have been previously folded. */
5437 static bool
5438 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
5440 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
5441 tree callee;
5442 bool changed = false;
5444 /* Check for virtual calls that became direct calls. */
5445 callee = gimple_call_fn (stmt);
5446 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
5448 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
5450 if (dump_file && virtual_method_call_p (callee)
5451 && !possible_polymorphic_call_target_p
5452 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
5453 (OBJ_TYPE_REF_EXPR (callee)))))
5455 fprintf (dump_file,
5456 "Type inheritance inconsistent devirtualization of ");
5457 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5458 fprintf (dump_file, " to ");
5459 print_generic_expr (dump_file, callee, TDF_SLIM);
5460 fprintf (dump_file, "\n");
5463 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
5464 changed = true;
5466 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
5468 bool final;
5469 vec <cgraph_node *>targets
5470 = possible_polymorphic_call_targets (callee, stmt, &final);
5471 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5473 tree lhs = gimple_call_lhs (stmt);
5474 if (dump_enabled_p ())
5476 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5477 "folding virtual function call to %s\n",
5478 targets.length () == 1
5479 ? targets[0]->name ()
5480 : "__builtin_unreachable");
5482 if (targets.length () == 1)
5484 tree fndecl = targets[0]->decl;
5485 gimple_call_set_fndecl (stmt, fndecl);
5486 changed = true;
5487 /* If changing the call to __cxa_pure_virtual
5488 or similar noreturn function, adjust gimple_call_fntype
5489 too. */
5490 if (gimple_call_noreturn_p (stmt)
5491 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
5492 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
5493 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5494 == void_type_node))
5495 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
5496 /* If the call becomes noreturn, remove the lhs. */
5497 if (lhs
5498 && gimple_call_noreturn_p (stmt)
5499 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
5500 || should_remove_lhs_p (lhs)))
5502 if (TREE_CODE (lhs) == SSA_NAME)
5504 tree var = create_tmp_var (TREE_TYPE (lhs));
5505 tree def = get_or_create_ssa_default_def (cfun, var);
5506 gimple *new_stmt = gimple_build_assign (lhs, def);
5507 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
5509 gimple_call_set_lhs (stmt, NULL_TREE);
5511 maybe_remove_unused_call_args (cfun, stmt);
5513 else
5515 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5516 gimple *new_stmt = gimple_build_call (fndecl, 0);
5517 gimple_set_location (new_stmt, gimple_location (stmt));
5518 /* If the call had a SSA name as lhs morph that into
5519 an uninitialized value. */
5520 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5522 tree var = create_tmp_var (TREE_TYPE (lhs));
5523 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
5524 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5525 set_ssa_default_def (cfun, var, lhs);
5527 gimple_move_vops (new_stmt, stmt);
5528 gsi_replace (gsi, new_stmt, false);
5529 return true;
5535 /* Check for indirect calls that became direct calls, and then
5536 no longer require a static chain. */
5537 if (gimple_call_chain (stmt))
5539 tree fn = gimple_call_fndecl (stmt);
5540 if (fn && !DECL_STATIC_CHAIN (fn))
5542 gimple_call_set_chain (stmt, NULL);
5543 changed = true;
5547 if (inplace)
5548 return changed;
5550 /* Check for builtins that CCP can handle using information not
5551 available in the generic fold routines. */
5552 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5554 if (gimple_fold_builtin (gsi))
5555 changed = true;
5557 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
5559 changed |= targetm.gimple_fold_builtin (gsi);
5561 else if (gimple_call_internal_p (stmt))
5563 enum tree_code subcode = ERROR_MARK;
5564 tree result = NULL_TREE;
5565 bool cplx_result = false;
5566 tree overflow = NULL_TREE;
5567 switch (gimple_call_internal_fn (stmt))
5569 case IFN_BUILTIN_EXPECT:
5570 result = fold_builtin_expect (gimple_location (stmt),
5571 gimple_call_arg (stmt, 0),
5572 gimple_call_arg (stmt, 1),
5573 gimple_call_arg (stmt, 2),
5574 NULL_TREE);
5575 break;
5576 case IFN_UBSAN_OBJECT_SIZE:
5578 tree offset = gimple_call_arg (stmt, 1);
5579 tree objsize = gimple_call_arg (stmt, 2);
5580 if (integer_all_onesp (objsize)
5581 || (TREE_CODE (offset) == INTEGER_CST
5582 && TREE_CODE (objsize) == INTEGER_CST
5583 && tree_int_cst_le (offset, objsize)))
5585 replace_call_with_value (gsi, NULL_TREE);
5586 return true;
5589 break;
5590 case IFN_UBSAN_PTR:
5591 if (integer_zerop (gimple_call_arg (stmt, 1)))
5593 replace_call_with_value (gsi, NULL_TREE);
5594 return true;
5596 break;
5597 case IFN_UBSAN_BOUNDS:
5599 tree index = gimple_call_arg (stmt, 1);
5600 tree bound = gimple_call_arg (stmt, 2);
5601 if (TREE_CODE (index) == INTEGER_CST
5602 && TREE_CODE (bound) == INTEGER_CST)
5604 index = fold_convert (TREE_TYPE (bound), index);
5605 if (TREE_CODE (index) == INTEGER_CST
5606 && tree_int_cst_le (index, bound))
5608 replace_call_with_value (gsi, NULL_TREE);
5609 return true;
5613 break;
5614 case IFN_GOACC_DIM_SIZE:
5615 case IFN_GOACC_DIM_POS:
5616 result = fold_internal_goacc_dim (stmt);
5617 break;
5618 case IFN_UBSAN_CHECK_ADD:
5619 subcode = PLUS_EXPR;
5620 break;
5621 case IFN_UBSAN_CHECK_SUB:
5622 subcode = MINUS_EXPR;
5623 break;
5624 case IFN_UBSAN_CHECK_MUL:
5625 subcode = MULT_EXPR;
5626 break;
5627 case IFN_ADD_OVERFLOW:
5628 subcode = PLUS_EXPR;
5629 cplx_result = true;
5630 break;
5631 case IFN_SUB_OVERFLOW:
5632 subcode = MINUS_EXPR;
5633 cplx_result = true;
5634 break;
5635 case IFN_MUL_OVERFLOW:
5636 subcode = MULT_EXPR;
5637 cplx_result = true;
5638 break;
5639 case IFN_MASK_LOAD:
5640 changed |= gimple_fold_mask_load (gsi, stmt);
5641 break;
5642 case IFN_MASK_STORE:
5643 changed |= gimple_fold_mask_store (gsi, stmt);
5644 break;
5645 default:
5646 break;
5648 if (subcode != ERROR_MARK)
5650 tree arg0 = gimple_call_arg (stmt, 0);
5651 tree arg1 = gimple_call_arg (stmt, 1);
5652 tree type = TREE_TYPE (arg0);
5653 if (cplx_result)
5655 tree lhs = gimple_call_lhs (stmt);
5656 if (lhs == NULL_TREE)
5657 type = NULL_TREE;
5658 else
5659 type = TREE_TYPE (TREE_TYPE (lhs));
5661 if (type == NULL_TREE)
5663 /* x = y + 0; x = y - 0; x = y * 0; */
5664 else if (integer_zerop (arg1))
5665 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
5666 /* x = 0 + y; x = 0 * y; */
5667 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
5668 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
5669 /* x = y - y; */
5670 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
5671 result = integer_zero_node;
5672 /* x = y * 1; x = 1 * y; */
5673 else if (subcode == MULT_EXPR && integer_onep (arg1))
5674 result = arg0;
5675 else if (subcode == MULT_EXPR && integer_onep (arg0))
5676 result = arg1;
5677 else if (TREE_CODE (arg0) == INTEGER_CST
5678 && TREE_CODE (arg1) == INTEGER_CST)
5680 if (cplx_result)
5681 result = int_const_binop (subcode, fold_convert (type, arg0),
5682 fold_convert (type, arg1));
5683 else
5684 result = int_const_binop (subcode, arg0, arg1);
5685 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
5687 if (cplx_result)
5688 overflow = build_one_cst (type);
5689 else
5690 result = NULL_TREE;
5693 if (result)
5695 if (result == integer_zero_node)
5696 result = build_zero_cst (type);
5697 else if (cplx_result && TREE_TYPE (result) != type)
5699 if (TREE_CODE (result) == INTEGER_CST)
5701 if (arith_overflowed_p (PLUS_EXPR, type, result,
5702 integer_zero_node))
5703 overflow = build_one_cst (type);
5705 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
5706 && TYPE_UNSIGNED (type))
5707 || (TYPE_PRECISION (type)
5708 < (TYPE_PRECISION (TREE_TYPE (result))
5709 + (TYPE_UNSIGNED (TREE_TYPE (result))
5710 && !TYPE_UNSIGNED (type)))))
5711 result = NULL_TREE;
5712 if (result)
5713 result = fold_convert (type, result);
5718 if (result)
5720 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
5721 result = drop_tree_overflow (result);
5722 if (cplx_result)
5724 if (overflow == NULL_TREE)
5725 overflow = build_zero_cst (TREE_TYPE (result));
5726 tree ctype = build_complex_type (TREE_TYPE (result));
5727 if (TREE_CODE (result) == INTEGER_CST
5728 && TREE_CODE (overflow) == INTEGER_CST)
5729 result = build_complex (ctype, result, overflow);
5730 else
5731 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
5732 ctype, result, overflow);
5734 gimplify_and_update_call_from_tree (gsi, result);
5735 changed = true;
5739 return changed;
5743 /* Return true whether NAME has a use on STMT. */
5745 static bool
5746 has_use_on_stmt (tree name, gimple *stmt)
5748 imm_use_iterator iter;
5749 use_operand_p use_p;
5750 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
5751 if (USE_STMT (use_p) == stmt)
5752 return true;
5753 return false;
5756 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5757 gimple_simplify.
5759 Replaces *GSI with the simplification result in RCODE and OPS
5760 and the associated statements in *SEQ. Does the replacement
5761 according to INPLACE and returns true if the operation succeeded. */
5763 static bool
5764 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5765 gimple_match_op *res_op,
5766 gimple_seq *seq, bool inplace)
5768 gimple *stmt = gsi_stmt (*gsi);
5769 tree *ops = res_op->ops;
5770 unsigned int num_ops = res_op->num_ops;
5772 /* Play safe and do not allow abnormals to be mentioned in
5773 newly created statements. See also maybe_push_res_to_seq.
5774 As an exception allow such uses if there was a use of the
5775 same SSA name on the old stmt. */
5776 for (unsigned int i = 0; i < num_ops; ++i)
5777 if (TREE_CODE (ops[i]) == SSA_NAME
5778 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
5779 && !has_use_on_stmt (ops[i], stmt))
5780 return false;
5782 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
5783 for (unsigned int i = 0; i < 2; ++i)
5784 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
5785 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
5786 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
5787 return false;
5789 /* Don't insert new statements when INPLACE is true, even if we could
5790 reuse STMT for the final statement. */
5791 if (inplace && !gimple_seq_empty_p (*seq))
5792 return false;
5794 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
5796 gcc_assert (res_op->code.is_tree_code ());
5797 auto code = tree_code (res_op->code);
5798 if (TREE_CODE_CLASS (code) == tcc_comparison
5799 /* GIMPLE_CONDs condition may not throw. */
5800 && (!flag_exceptions
5801 || !cfun->can_throw_non_call_exceptions
5802 || !operation_could_trap_p (code,
5803 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
5804 false, NULL_TREE)))
5805 gimple_cond_set_condition (cond_stmt, code, ops[0], ops[1]);
5806 else if (code == SSA_NAME)
5807 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
5808 build_zero_cst (TREE_TYPE (ops[0])));
5809 else if (code == INTEGER_CST)
5811 if (integer_zerop (ops[0]))
5812 gimple_cond_make_false (cond_stmt);
5813 else
5814 gimple_cond_make_true (cond_stmt);
5816 else if (!inplace)
5818 tree res = maybe_push_res_to_seq (res_op, seq);
5819 if (!res)
5820 return false;
5821 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
5822 build_zero_cst (TREE_TYPE (res)));
5824 else
5825 return false;
5826 if (dump_file && (dump_flags & TDF_DETAILS))
5828 fprintf (dump_file, "gimple_simplified to ");
5829 if (!gimple_seq_empty_p (*seq))
5830 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5831 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5832 0, TDF_SLIM);
5834 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5835 return true;
5837 else if (is_gimple_assign (stmt)
5838 && res_op->code.is_tree_code ())
5840 auto code = tree_code (res_op->code);
5841 if (!inplace
5842 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (code))
5844 maybe_build_generic_op (res_op);
5845 gimple_assign_set_rhs_with_ops (gsi, code,
5846 res_op->op_or_null (0),
5847 res_op->op_or_null (1),
5848 res_op->op_or_null (2));
5849 if (dump_file && (dump_flags & TDF_DETAILS))
5851 fprintf (dump_file, "gimple_simplified to ");
5852 if (!gimple_seq_empty_p (*seq))
5853 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5854 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5855 0, TDF_SLIM);
5857 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5858 return true;
5861 else if (res_op->code.is_fn_code ()
5862 && gimple_call_combined_fn (stmt) == combined_fn (res_op->code))
5864 gcc_assert (num_ops == gimple_call_num_args (stmt));
5865 for (unsigned int i = 0; i < num_ops; ++i)
5866 gimple_call_set_arg (stmt, i, ops[i]);
5867 if (dump_file && (dump_flags & TDF_DETAILS))
5869 fprintf (dump_file, "gimple_simplified to ");
5870 if (!gimple_seq_empty_p (*seq))
5871 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5872 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
5874 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5875 return true;
5877 else if (!inplace)
5879 if (gimple_has_lhs (stmt))
5881 tree lhs = gimple_get_lhs (stmt);
5882 if (!maybe_push_res_to_seq (res_op, seq, lhs))
5883 return false;
5884 if (dump_file && (dump_flags & TDF_DETAILS))
5886 fprintf (dump_file, "gimple_simplified to ");
5887 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5889 gsi_replace_with_seq_vops (gsi, *seq);
5890 return true;
5892 else
5893 gcc_unreachable ();
5896 return false;
5899 /* Canonicalize MEM_REFs invariant address operand after propagation. */
5901 static bool
5902 maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
5904 bool res = false;
5905 tree *orig_t = t;
5907 if (TREE_CODE (*t) == ADDR_EXPR)
5908 t = &TREE_OPERAND (*t, 0);
5910 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5911 generic vector extension. The actual vector referenced is
5912 view-converted to an array type for this purpose. If the index
5913 is constant the canonical representation in the middle-end is a
5914 BIT_FIELD_REF so re-write the former to the latter here. */
5915 if (TREE_CODE (*t) == ARRAY_REF
5916 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
5917 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
5918 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
5920 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
5921 if (VECTOR_TYPE_P (vtype))
5923 tree low = array_ref_low_bound (*t);
5924 if (TREE_CODE (low) == INTEGER_CST)
5926 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
5928 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
5929 wi::to_widest (low));
5930 idx = wi::mul (idx, wi::to_widest
5931 (TYPE_SIZE (TREE_TYPE (*t))));
5932 widest_int ext
5933 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
5934 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
5936 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
5937 TREE_TYPE (*t),
5938 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
5939 TYPE_SIZE (TREE_TYPE (*t)),
5940 wide_int_to_tree (bitsizetype, idx));
5941 res = true;
5948 while (handled_component_p (*t))
5949 t = &TREE_OPERAND (*t, 0);
5951 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5952 of invariant addresses into a SSA name MEM_REF address. */
5953 if (TREE_CODE (*t) == MEM_REF
5954 || TREE_CODE (*t) == TARGET_MEM_REF)
5956 tree addr = TREE_OPERAND (*t, 0);
5957 if (TREE_CODE (addr) == ADDR_EXPR
5958 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
5959 || handled_component_p (TREE_OPERAND (addr, 0))))
5961 tree base;
5962 poly_int64 coffset;
5963 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
5964 &coffset);
5965 if (!base)
5967 if (is_debug)
5968 return false;
5969 gcc_unreachable ();
5972 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
5973 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
5974 TREE_OPERAND (*t, 1),
5975 size_int (coffset));
5976 res = true;
5978 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
5979 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
5982 /* Canonicalize back MEM_REFs to plain reference trees if the object
5983 accessed is a decl that has the same access semantics as the MEM_REF. */
5984 if (TREE_CODE (*t) == MEM_REF
5985 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
5986 && integer_zerop (TREE_OPERAND (*t, 1))
5987 && MR_DEPENDENCE_CLIQUE (*t) == 0)
5989 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
5990 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
5991 if (/* Same volatile qualification. */
5992 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
5993 /* Same TBAA behavior with -fstrict-aliasing. */
5994 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
5995 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
5996 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
5997 /* Same alignment. */
5998 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
5999 /* We have to look out here to not drop a required conversion
6000 from the rhs to the lhs if *t appears on the lhs or vice-versa
6001 if it appears on the rhs. Thus require strict type
6002 compatibility. */
6003 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
6005 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6006 res = true;
6010 else if (TREE_CODE (*orig_t) == ADDR_EXPR
6011 && TREE_CODE (*t) == MEM_REF
6012 && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
6014 tree base;
6015 poly_int64 coffset;
6016 base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
6017 &coffset);
6018 if (base)
6020 gcc_assert (TREE_CODE (base) == MEM_REF);
6021 poly_int64 moffset;
6022 if (mem_ref_offset (base).to_shwi (&moffset))
6024 coffset += moffset;
6025 if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
6027 coffset += moffset;
6028 *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
6029 return true;
6035 /* Canonicalize TARGET_MEM_REF in particular with respect to
6036 the indexes becoming constant. */
6037 else if (TREE_CODE (*t) == TARGET_MEM_REF)
6039 tree tem = maybe_fold_tmr (*t);
6040 if (tem)
6042 *t = tem;
6043 if (TREE_CODE (*orig_t) == ADDR_EXPR)
6044 recompute_tree_invariant_for_addr_expr (*orig_t);
6045 res = true;
6049 return res;
6052 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6053 distinguishes both cases. */
6055 static bool
6056 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
6058 bool changed = false;
6059 gimple *stmt = gsi_stmt (*gsi);
6060 bool nowarning = warning_suppressed_p (stmt, OPT_Wstrict_overflow);
6061 unsigned i;
6062 fold_defer_overflow_warnings ();
6064 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6065 after propagation.
6066 ??? This shouldn't be done in generic folding but in the
6067 propagation helpers which also know whether an address was
6068 propagated.
6069 Also canonicalize operand order. */
6070 switch (gimple_code (stmt))
6072 case GIMPLE_ASSIGN:
6073 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
6075 tree *rhs = gimple_assign_rhs1_ptr (stmt);
6076 if ((REFERENCE_CLASS_P (*rhs)
6077 || TREE_CODE (*rhs) == ADDR_EXPR)
6078 && maybe_canonicalize_mem_ref_addr (rhs))
6079 changed = true;
6080 tree *lhs = gimple_assign_lhs_ptr (stmt);
6081 if (REFERENCE_CLASS_P (*lhs)
6082 && maybe_canonicalize_mem_ref_addr (lhs))
6083 changed = true;
6084 /* Canonicalize &MEM[ssa_n, CST] to ssa_n p+ CST.
6085 This cannot be done in maybe_canonicalize_mem_ref_addr
6086 as the gimple now has two operands rather than one.
6087 The same reason why this can't be done in
6088 maybe_canonicalize_mem_ref_addr is the same reason why
6089 this can't be done inplace. */
6090 if (!inplace && TREE_CODE (*rhs) == ADDR_EXPR)
6092 tree inner = TREE_OPERAND (*rhs, 0);
6093 if (TREE_CODE (inner) == MEM_REF
6094 && TREE_CODE (TREE_OPERAND (inner, 0)) == SSA_NAME
6095 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6097 tree ptr = TREE_OPERAND (inner, 0);
6098 tree addon = TREE_OPERAND (inner, 1);
6099 addon = fold_convert (sizetype, addon);
6100 gimple_assign_set_rhs_with_ops (gsi, POINTER_PLUS_EXPR,
6101 ptr, addon);
6102 changed = true;
6103 stmt = gsi_stmt (*gsi);
6107 else
6109 /* Canonicalize operand order. */
6110 enum tree_code code = gimple_assign_rhs_code (stmt);
6111 if (TREE_CODE_CLASS (code) == tcc_comparison
6112 || commutative_tree_code (code)
6113 || commutative_ternary_tree_code (code))
6115 tree rhs1 = gimple_assign_rhs1 (stmt);
6116 tree rhs2 = gimple_assign_rhs2 (stmt);
6117 if (tree_swap_operands_p (rhs1, rhs2))
6119 gimple_assign_set_rhs1 (stmt, rhs2);
6120 gimple_assign_set_rhs2 (stmt, rhs1);
6121 if (TREE_CODE_CLASS (code) == tcc_comparison)
6122 gimple_assign_set_rhs_code (stmt,
6123 swap_tree_comparison (code));
6124 changed = true;
6128 break;
6129 case GIMPLE_CALL:
6131 gcall *call = as_a<gcall *> (stmt);
6132 for (i = 0; i < gimple_call_num_args (call); ++i)
6134 tree *arg = gimple_call_arg_ptr (call, i);
6135 if (REFERENCE_CLASS_P (*arg)
6136 && maybe_canonicalize_mem_ref_addr (arg))
6137 changed = true;
6139 tree *lhs = gimple_call_lhs_ptr (call);
6140 if (*lhs
6141 && REFERENCE_CLASS_P (*lhs)
6142 && maybe_canonicalize_mem_ref_addr (lhs))
6143 changed = true;
6144 if (*lhs)
6146 combined_fn cfn = gimple_call_combined_fn (call);
6147 internal_fn ifn = associated_internal_fn (cfn, TREE_TYPE (*lhs));
6148 int opno = first_commutative_argument (ifn);
6149 if (opno >= 0)
6151 tree arg1 = gimple_call_arg (call, opno);
6152 tree arg2 = gimple_call_arg (call, opno + 1);
6153 if (tree_swap_operands_p (arg1, arg2))
6155 gimple_call_set_arg (call, opno, arg2);
6156 gimple_call_set_arg (call, opno + 1, arg1);
6157 changed = true;
6161 break;
6163 case GIMPLE_ASM:
6165 gasm *asm_stmt = as_a <gasm *> (stmt);
6166 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
6168 tree link = gimple_asm_output_op (asm_stmt, i);
6169 tree op = TREE_VALUE (link);
6170 if (REFERENCE_CLASS_P (op)
6171 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6172 changed = true;
6174 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
6176 tree link = gimple_asm_input_op (asm_stmt, i);
6177 tree op = TREE_VALUE (link);
6178 if ((REFERENCE_CLASS_P (op)
6179 || TREE_CODE (op) == ADDR_EXPR)
6180 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6181 changed = true;
6184 break;
6185 case GIMPLE_DEBUG:
6186 if (gimple_debug_bind_p (stmt))
6188 tree *val = gimple_debug_bind_get_value_ptr (stmt);
6189 if (*val
6190 && (REFERENCE_CLASS_P (*val)
6191 || TREE_CODE (*val) == ADDR_EXPR)
6192 && maybe_canonicalize_mem_ref_addr (val, true))
6193 changed = true;
6195 break;
6196 case GIMPLE_COND:
6198 /* Canonicalize operand order. */
6199 tree lhs = gimple_cond_lhs (stmt);
6200 tree rhs = gimple_cond_rhs (stmt);
6201 if (tree_swap_operands_p (lhs, rhs))
6203 gcond *gc = as_a <gcond *> (stmt);
6204 gimple_cond_set_lhs (gc, rhs);
6205 gimple_cond_set_rhs (gc, lhs);
6206 gimple_cond_set_code (gc,
6207 swap_tree_comparison (gimple_cond_code (gc)));
6208 changed = true;
6211 default:;
6214 /* Dispatch to pattern-based folding. */
6215 if (!inplace
6216 || is_gimple_assign (stmt)
6217 || gimple_code (stmt) == GIMPLE_COND)
6219 gimple_seq seq = NULL;
6220 gimple_match_op res_op;
6221 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
6222 valueize, valueize))
6224 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
6225 changed = true;
6226 else
6227 gimple_seq_discard (seq);
6231 stmt = gsi_stmt (*gsi);
6233 /* Fold the main computation performed by the statement. */
6234 switch (gimple_code (stmt))
6236 case GIMPLE_ASSIGN:
6238 /* Try to canonicalize for boolean-typed X the comparisons
6239 X == 0, X == 1, X != 0, and X != 1. */
6240 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
6241 || gimple_assign_rhs_code (stmt) == NE_EXPR)
6243 tree lhs = gimple_assign_lhs (stmt);
6244 tree op1 = gimple_assign_rhs1 (stmt);
6245 tree op2 = gimple_assign_rhs2 (stmt);
6246 tree type = TREE_TYPE (op1);
6248 /* Check whether the comparison operands are of the same boolean
6249 type as the result type is.
6250 Check that second operand is an integer-constant with value
6251 one or zero. */
6252 if (TREE_CODE (op2) == INTEGER_CST
6253 && (integer_zerop (op2) || integer_onep (op2))
6254 && useless_type_conversion_p (TREE_TYPE (lhs), type))
6256 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
6257 bool is_logical_not = false;
6259 /* X == 0 and X != 1 is a logical-not.of X
6260 X == 1 and X != 0 is X */
6261 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
6262 || (cmp_code == NE_EXPR && integer_onep (op2)))
6263 is_logical_not = true;
6265 if (is_logical_not == false)
6266 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
6267 /* Only for one-bit precision typed X the transformation
6268 !X -> ~X is valied. */
6269 else if (TYPE_PRECISION (type) == 1)
6270 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
6271 /* Otherwise we use !X -> X ^ 1. */
6272 else
6273 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
6274 build_int_cst (type, 1));
6275 changed = true;
6276 break;
6280 unsigned old_num_ops = gimple_num_ops (stmt);
6281 tree lhs = gimple_assign_lhs (stmt);
6282 tree new_rhs = fold_gimple_assign (gsi);
6283 if (new_rhs
6284 && !useless_type_conversion_p (TREE_TYPE (lhs),
6285 TREE_TYPE (new_rhs)))
6286 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6287 if (new_rhs
6288 && (!inplace
6289 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6291 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6292 changed = true;
6294 break;
6297 case GIMPLE_CALL:
6298 changed |= gimple_fold_call (gsi, inplace);
6299 break;
6301 case GIMPLE_DEBUG:
6302 if (gimple_debug_bind_p (stmt))
6304 tree val = gimple_debug_bind_get_value (stmt);
6305 if (val && REFERENCE_CLASS_P (val))
6307 tree tem = maybe_fold_reference (val);
6308 if (tem)
6310 gimple_debug_bind_set_value (stmt, tem);
6311 changed = true;
6315 break;
6317 case GIMPLE_RETURN:
6319 greturn *ret_stmt = as_a<greturn *> (stmt);
6320 tree ret = gimple_return_retval(ret_stmt);
6322 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6324 tree val = valueize (ret);
6325 if (val && val != ret
6326 && may_propagate_copy (ret, val))
6328 gimple_return_set_retval (ret_stmt, val);
6329 changed = true;
6333 break;
6335 default:;
6338 stmt = gsi_stmt (*gsi);
6340 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
6341 return changed;
6344 /* Valueziation callback that ends up not following SSA edges. */
6346 tree
6347 no_follow_ssa_edges (tree)
6349 return NULL_TREE;
6352 /* Valueization callback that ends up following single-use SSA edges only. */
6354 tree
6355 follow_single_use_edges (tree val)
6357 if (TREE_CODE (val) == SSA_NAME
6358 && !has_single_use (val))
6359 return NULL_TREE;
6360 return val;
6363 /* Valueization callback that follows all SSA edges. */
6365 tree
6366 follow_all_ssa_edges (tree val)
6368 return val;
6371 /* Fold the statement pointed to by GSI. In some cases, this function may
6372 replace the whole statement with a new one. Returns true iff folding
6373 makes any changes.
6374 The statement pointed to by GSI should be in valid gimple form but may
6375 be in unfolded state as resulting from for example constant propagation
6376 which can produce *&x = 0. */
6378 bool
6379 fold_stmt (gimple_stmt_iterator *gsi)
6381 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
6384 bool
6385 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
6387 return fold_stmt_1 (gsi, false, valueize);
6390 /* Perform the minimal folding on statement *GSI. Only operations like
6391 *&x created by constant propagation are handled. The statement cannot
6392 be replaced with a new one. Return true if the statement was
6393 changed, false otherwise.
6394 The statement *GSI should be in valid gimple form but may
6395 be in unfolded state as resulting from for example constant propagation
6396 which can produce *&x = 0. */
6398 bool
6399 fold_stmt_inplace (gimple_stmt_iterator *gsi)
6401 gimple *stmt = gsi_stmt (*gsi);
6402 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
6403 gcc_assert (gsi_stmt (*gsi) == stmt);
6404 return changed;
6407 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6408 if EXPR is null or we don't know how.
6409 If non-null, the result always has boolean type. */
6411 static tree
6412 canonicalize_bool (tree expr, bool invert)
6414 if (!expr)
6415 return NULL_TREE;
6416 else if (invert)
6418 if (integer_nonzerop (expr))
6419 return boolean_false_node;
6420 else if (integer_zerop (expr))
6421 return boolean_true_node;
6422 else if (TREE_CODE (expr) == SSA_NAME)
6423 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
6424 build_int_cst (TREE_TYPE (expr), 0));
6425 else if (COMPARISON_CLASS_P (expr))
6426 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
6427 boolean_type_node,
6428 TREE_OPERAND (expr, 0),
6429 TREE_OPERAND (expr, 1));
6430 else
6431 return NULL_TREE;
6433 else
6435 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6436 return expr;
6437 if (integer_nonzerop (expr))
6438 return boolean_true_node;
6439 else if (integer_zerop (expr))
6440 return boolean_false_node;
6441 else if (TREE_CODE (expr) == SSA_NAME)
6442 return fold_build2 (NE_EXPR, boolean_type_node, expr,
6443 build_int_cst (TREE_TYPE (expr), 0));
6444 else if (COMPARISON_CLASS_P (expr))
6445 return fold_build2 (TREE_CODE (expr),
6446 boolean_type_node,
6447 TREE_OPERAND (expr, 0),
6448 TREE_OPERAND (expr, 1));
6449 else
6450 return NULL_TREE;
6454 /* Check to see if a boolean expression EXPR is logically equivalent to the
6455 comparison (OP1 CODE OP2). Check for various identities involving
6456 SSA_NAMEs. */
6458 static bool
6459 same_bool_comparison_p (const_tree expr, enum tree_code code,
6460 const_tree op1, const_tree op2)
6462 gimple *s;
6464 /* The obvious case. */
6465 if (TREE_CODE (expr) == code
6466 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
6467 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
6468 return true;
6470 /* Check for comparing (name, name != 0) and the case where expr
6471 is an SSA_NAME with a definition matching the comparison. */
6472 if (TREE_CODE (expr) == SSA_NAME
6473 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6475 if (operand_equal_p (expr, op1, 0))
6476 return ((code == NE_EXPR && integer_zerop (op2))
6477 || (code == EQ_EXPR && integer_nonzerop (op2)));
6478 s = SSA_NAME_DEF_STMT (expr);
6479 if (is_gimple_assign (s)
6480 && gimple_assign_rhs_code (s) == code
6481 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
6482 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
6483 return true;
6486 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6487 of name is a comparison, recurse. */
6488 if (TREE_CODE (op1) == SSA_NAME
6489 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
6491 s = SSA_NAME_DEF_STMT (op1);
6492 if (is_gimple_assign (s)
6493 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
6495 enum tree_code c = gimple_assign_rhs_code (s);
6496 if ((c == NE_EXPR && integer_zerop (op2))
6497 || (c == EQ_EXPR && integer_nonzerop (op2)))
6498 return same_bool_comparison_p (expr, c,
6499 gimple_assign_rhs1 (s),
6500 gimple_assign_rhs2 (s));
6501 if ((c == EQ_EXPR && integer_zerop (op2))
6502 || (c == NE_EXPR && integer_nonzerop (op2)))
6503 return same_bool_comparison_p (expr,
6504 invert_tree_comparison (c, false),
6505 gimple_assign_rhs1 (s),
6506 gimple_assign_rhs2 (s));
6509 return false;
6512 /* Check to see if two boolean expressions OP1 and OP2 are logically
6513 equivalent. */
6515 static bool
6516 same_bool_result_p (const_tree op1, const_tree op2)
6518 /* Simple cases first. */
6519 if (operand_equal_p (op1, op2, 0))
6520 return true;
6522 /* Check the cases where at least one of the operands is a comparison.
6523 These are a bit smarter than operand_equal_p in that they apply some
6524 identifies on SSA_NAMEs. */
6525 if (COMPARISON_CLASS_P (op2)
6526 && same_bool_comparison_p (op1, TREE_CODE (op2),
6527 TREE_OPERAND (op2, 0),
6528 TREE_OPERAND (op2, 1)))
6529 return true;
6530 if (COMPARISON_CLASS_P (op1)
6531 && same_bool_comparison_p (op2, TREE_CODE (op1),
6532 TREE_OPERAND (op1, 0),
6533 TREE_OPERAND (op1, 1)))
6534 return true;
6536 /* Default case. */
6537 return false;
6540 /* Forward declarations for some mutually recursive functions. */
6542 static tree
6543 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6544 enum tree_code code2, tree op2a, tree op2b, basic_block);
6545 static tree
6546 and_var_with_comparison (tree type, tree var, bool invert,
6547 enum tree_code code2, tree op2a, tree op2b,
6548 basic_block);
6549 static tree
6550 and_var_with_comparison_1 (tree type, gimple *stmt,
6551 enum tree_code code2, tree op2a, tree op2b,
6552 basic_block);
6553 static tree
6554 or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
6555 enum tree_code code2, tree op2a, tree op2b,
6556 basic_block);
6557 static tree
6558 or_var_with_comparison (tree, tree var, bool invert,
6559 enum tree_code code2, tree op2a, tree op2b,
6560 basic_block);
6561 static tree
6562 or_var_with_comparison_1 (tree, gimple *stmt,
6563 enum tree_code code2, tree op2a, tree op2b,
6564 basic_block);
6566 /* Helper function for and_comparisons_1: try to simplify the AND of the
6567 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6568 If INVERT is true, invert the value of the VAR before doing the AND.
6569 Return NULL_EXPR if we can't simplify this to a single expression. */
6571 static tree
6572 and_var_with_comparison (tree type, tree var, bool invert,
6573 enum tree_code code2, tree op2a, tree op2b,
6574 basic_block outer_cond_bb)
6576 tree t;
6577 gimple *stmt = SSA_NAME_DEF_STMT (var);
6579 /* We can only deal with variables whose definitions are assignments. */
6580 if (!is_gimple_assign (stmt))
6581 return NULL_TREE;
6583 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6584 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6585 Then we only have to consider the simpler non-inverted cases. */
6586 if (invert)
6587 t = or_var_with_comparison_1 (type, stmt,
6588 invert_tree_comparison (code2, false),
6589 op2a, op2b, outer_cond_bb);
6590 else
6591 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b,
6592 outer_cond_bb);
6593 return canonicalize_bool (t, invert);
6596 /* Try to simplify the AND of the ssa variable defined by the assignment
6597 STMT with the comparison specified by (OP2A CODE2 OP2B).
6598 Return NULL_EXPR if we can't simplify this to a single expression. */
6600 static tree
6601 and_var_with_comparison_1 (tree type, gimple *stmt,
6602 enum tree_code code2, tree op2a, tree op2b,
6603 basic_block outer_cond_bb)
6605 tree var = gimple_assign_lhs (stmt);
6606 tree true_test_var = NULL_TREE;
6607 tree false_test_var = NULL_TREE;
6608 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6610 /* Check for identities like (var AND (var == 0)) => false. */
6611 if (TREE_CODE (op2a) == SSA_NAME
6612 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6614 if ((code2 == NE_EXPR && integer_zerop (op2b))
6615 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6617 true_test_var = op2a;
6618 if (var == true_test_var)
6619 return var;
6621 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6622 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6624 false_test_var = op2a;
6625 if (var == false_test_var)
6626 return boolean_false_node;
6630 /* If the definition is a comparison, recurse on it. */
6631 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6633 tree t = and_comparisons_1 (type, innercode,
6634 gimple_assign_rhs1 (stmt),
6635 gimple_assign_rhs2 (stmt),
6636 code2,
6637 op2a,
6638 op2b, outer_cond_bb);
6639 if (t)
6640 return t;
6643 /* If the definition is an AND or OR expression, we may be able to
6644 simplify by reassociating. */
6645 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6646 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
6648 tree inner1 = gimple_assign_rhs1 (stmt);
6649 tree inner2 = gimple_assign_rhs2 (stmt);
6650 gimple *s;
6651 tree t;
6652 tree partial = NULL_TREE;
6653 bool is_and = (innercode == BIT_AND_EXPR);
6655 /* Check for boolean identities that don't require recursive examination
6656 of inner1/inner2:
6657 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6658 inner1 AND (inner1 OR inner2) => inner1
6659 !inner1 AND (inner1 AND inner2) => false
6660 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6661 Likewise for similar cases involving inner2. */
6662 if (inner1 == true_test_var)
6663 return (is_and ? var : inner1);
6664 else if (inner2 == true_test_var)
6665 return (is_and ? var : inner2);
6666 else if (inner1 == false_test_var)
6667 return (is_and
6668 ? boolean_false_node
6669 : and_var_with_comparison (type, inner2, false, code2, op2a,
6670 op2b, outer_cond_bb));
6671 else if (inner2 == false_test_var)
6672 return (is_and
6673 ? boolean_false_node
6674 : and_var_with_comparison (type, inner1, false, code2, op2a,
6675 op2b, outer_cond_bb));
6677 /* Next, redistribute/reassociate the AND across the inner tests.
6678 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6679 if (TREE_CODE (inner1) == SSA_NAME
6680 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6681 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6682 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6683 gimple_assign_rhs1 (s),
6684 gimple_assign_rhs2 (s),
6685 code2, op2a, op2b,
6686 outer_cond_bb)))
6688 /* Handle the AND case, where we are reassociating:
6689 (inner1 AND inner2) AND (op2a code2 op2b)
6690 => (t AND inner2)
6691 If the partial result t is a constant, we win. Otherwise
6692 continue on to try reassociating with the other inner test. */
6693 if (is_and)
6695 if (integer_onep (t))
6696 return inner2;
6697 else if (integer_zerop (t))
6698 return boolean_false_node;
6701 /* Handle the OR case, where we are redistributing:
6702 (inner1 OR inner2) AND (op2a code2 op2b)
6703 => (t OR (inner2 AND (op2a code2 op2b))) */
6704 else if (integer_onep (t))
6705 return boolean_true_node;
6707 /* Save partial result for later. */
6708 partial = t;
6711 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6712 if (TREE_CODE (inner2) == SSA_NAME
6713 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6714 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6715 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6716 gimple_assign_rhs1 (s),
6717 gimple_assign_rhs2 (s),
6718 code2, op2a, op2b,
6719 outer_cond_bb)))
6721 /* Handle the AND case, where we are reassociating:
6722 (inner1 AND inner2) AND (op2a code2 op2b)
6723 => (inner1 AND t) */
6724 if (is_and)
6726 if (integer_onep (t))
6727 return inner1;
6728 else if (integer_zerop (t))
6729 return boolean_false_node;
6730 /* If both are the same, we can apply the identity
6731 (x AND x) == x. */
6732 else if (partial && same_bool_result_p (t, partial))
6733 return t;
6736 /* Handle the OR case. where we are redistributing:
6737 (inner1 OR inner2) AND (op2a code2 op2b)
6738 => (t OR (inner1 AND (op2a code2 op2b)))
6739 => (t OR partial) */
6740 else
6742 if (integer_onep (t))
6743 return boolean_true_node;
6744 else if (partial)
6746 /* We already got a simplification for the other
6747 operand to the redistributed OR expression. The
6748 interesting case is when at least one is false.
6749 Or, if both are the same, we can apply the identity
6750 (x OR x) == x. */
6751 if (integer_zerop (partial))
6752 return t;
6753 else if (integer_zerop (t))
6754 return partial;
6755 else if (same_bool_result_p (t, partial))
6756 return t;
6761 return NULL_TREE;
6764 /* Try to simplify the AND of two comparisons defined by
6765 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6766 If this can be done without constructing an intermediate value,
6767 return the resulting tree; otherwise NULL_TREE is returned.
6768 This function is deliberately asymmetric as it recurses on SSA_DEFs
6769 in the first comparison but not the second. */
6771 static tree
6772 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6773 enum tree_code code2, tree op2a, tree op2b,
6774 basic_block outer_cond_bb)
6776 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6778 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6779 if (operand_equal_p (op1a, op2a, 0)
6780 && operand_equal_p (op1b, op2b, 0))
6782 /* Result will be either NULL_TREE, or a combined comparison. */
6783 tree t = combine_comparisons (UNKNOWN_LOCATION,
6784 TRUTH_ANDIF_EXPR, code1, code2,
6785 truth_type, op1a, op1b);
6786 if (t)
6787 return t;
6790 /* Likewise the swapped case of the above. */
6791 if (operand_equal_p (op1a, op2b, 0)
6792 && operand_equal_p (op1b, op2a, 0))
6794 /* Result will be either NULL_TREE, or a combined comparison. */
6795 tree t = combine_comparisons (UNKNOWN_LOCATION,
6796 TRUTH_ANDIF_EXPR, code1,
6797 swap_tree_comparison (code2),
6798 truth_type, op1a, op1b);
6799 if (t)
6800 return t;
6803 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6804 NAME's definition is a truth value. See if there are any simplifications
6805 that can be done against the NAME's definition. */
6806 if (TREE_CODE (op1a) == SSA_NAME
6807 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6808 && (integer_zerop (op1b) || integer_onep (op1b)))
6810 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6811 || (code1 == NE_EXPR && integer_onep (op1b)));
6812 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6813 switch (gimple_code (stmt))
6815 case GIMPLE_ASSIGN:
6816 /* Try to simplify by copy-propagating the definition. */
6817 return and_var_with_comparison (type, op1a, invert, code2, op2a,
6818 op2b, outer_cond_bb);
6820 case GIMPLE_PHI:
6821 /* If every argument to the PHI produces the same result when
6822 ANDed with the second comparison, we win.
6823 Do not do this unless the type is bool since we need a bool
6824 result here anyway. */
6825 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6827 tree result = NULL_TREE;
6828 unsigned i;
6829 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6831 tree arg = gimple_phi_arg_def (stmt, i);
6833 /* If this PHI has itself as an argument, ignore it.
6834 If all the other args produce the same result,
6835 we're still OK. */
6836 if (arg == gimple_phi_result (stmt))
6837 continue;
6838 else if (TREE_CODE (arg) == INTEGER_CST)
6840 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
6842 if (!result)
6843 result = boolean_false_node;
6844 else if (!integer_zerop (result))
6845 return NULL_TREE;
6847 else if (!result)
6848 result = fold_build2 (code2, boolean_type_node,
6849 op2a, op2b);
6850 else if (!same_bool_comparison_p (result,
6851 code2, op2a, op2b))
6852 return NULL_TREE;
6854 else if (TREE_CODE (arg) == SSA_NAME
6855 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6857 tree temp;
6858 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6859 /* In simple cases we can look through PHI nodes,
6860 but we have to be careful with loops.
6861 See PR49073. */
6862 if (! dom_info_available_p (CDI_DOMINATORS)
6863 || gimple_bb (def_stmt) == gimple_bb (stmt)
6864 || dominated_by_p (CDI_DOMINATORS,
6865 gimple_bb (def_stmt),
6866 gimple_bb (stmt)))
6867 return NULL_TREE;
6868 temp = and_var_with_comparison (type, arg, invert, code2,
6869 op2a, op2b,
6870 outer_cond_bb);
6871 if (!temp)
6872 return NULL_TREE;
6873 else if (!result)
6874 result = temp;
6875 else if (!same_bool_result_p (result, temp))
6876 return NULL_TREE;
6878 else
6879 return NULL_TREE;
6881 return result;
6884 default:
6885 break;
6888 return NULL_TREE;
6891 static basic_block fosa_bb;
6892 static tree
6893 follow_outer_ssa_edges (tree val)
6895 if (TREE_CODE (val) == SSA_NAME
6896 && !SSA_NAME_IS_DEFAULT_DEF (val))
6898 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (val));
6899 if (!def_bb
6900 || def_bb == fosa_bb
6901 || (dom_info_available_p (CDI_DOMINATORS)
6902 && (def_bb == fosa_bb
6903 || dominated_by_p (CDI_DOMINATORS, fosa_bb, def_bb))))
6904 return val;
6905 return NULL_TREE;
6907 return val;
6910 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6911 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6912 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6913 simplify this to a single expression. As we are going to lower the cost
6914 of building SSA names / gimple stmts significantly, we need to allocate
6915 them ont the stack. This will cause the code to be a bit ugly. */
6917 static tree
6918 maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
6919 enum tree_code code1,
6920 tree op1a, tree op1b,
6921 enum tree_code code2, tree op2a,
6922 tree op2b,
6923 basic_block outer_cond_bb)
6925 /* Allocate gimple stmt1 on the stack. */
6926 gassign *stmt1
6927 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6928 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
6929 gimple_assign_set_rhs_code (stmt1, code1);
6930 gimple_assign_set_rhs1 (stmt1, op1a);
6931 gimple_assign_set_rhs2 (stmt1, op1b);
6932 gimple_set_bb (stmt1, NULL);
6934 /* Allocate gimple stmt2 on the stack. */
6935 gassign *stmt2
6936 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6937 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
6938 gimple_assign_set_rhs_code (stmt2, code2);
6939 gimple_assign_set_rhs1 (stmt2, op2a);
6940 gimple_assign_set_rhs2 (stmt2, op2b);
6941 gimple_set_bb (stmt2, NULL);
6943 /* Allocate SSA names(lhs1) on the stack. */
6944 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
6945 memset (lhs1, 0, sizeof (tree_ssa_name));
6946 TREE_SET_CODE (lhs1, SSA_NAME);
6947 TREE_TYPE (lhs1) = type;
6948 init_ssa_name_imm_use (lhs1);
6950 /* Allocate SSA names(lhs2) on the stack. */
6951 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
6952 memset (lhs2, 0, sizeof (tree_ssa_name));
6953 TREE_SET_CODE (lhs2, SSA_NAME);
6954 TREE_TYPE (lhs2) = type;
6955 init_ssa_name_imm_use (lhs2);
6957 gimple_assign_set_lhs (stmt1, lhs1);
6958 gimple_assign_set_lhs (stmt2, lhs2);
6960 gimple_match_op op (gimple_match_cond::UNCOND, code,
6961 type, gimple_assign_lhs (stmt1),
6962 gimple_assign_lhs (stmt2));
6963 fosa_bb = outer_cond_bb;
6964 if (op.resimplify (NULL, (!outer_cond_bb
6965 ? follow_all_ssa_edges : follow_outer_ssa_edges)))
6967 if (gimple_simplified_result_is_gimple_val (&op))
6969 tree res = op.ops[0];
6970 if (res == lhs1)
6971 return build2 (code1, type, op1a, op1b);
6972 else if (res == lhs2)
6973 return build2 (code2, type, op2a, op2b);
6974 else
6975 return res;
6977 else if (op.code.is_tree_code ()
6978 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
6980 tree op0 = op.ops[0];
6981 tree op1 = op.ops[1];
6982 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
6983 return NULL_TREE; /* not simple */
6985 return build2 ((enum tree_code)op.code, op.type, op0, op1);
6989 return NULL_TREE;
6992 /* Try to simplify the AND of two comparisons, specified by
6993 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6994 If this can be simplified to a single expression (without requiring
6995 introducing more SSA variables to hold intermediate values),
6996 return the resulting tree. Otherwise return NULL_TREE.
6997 If the result expression is non-null, it has boolean type. */
6999 tree
7000 maybe_fold_and_comparisons (tree type,
7001 enum tree_code code1, tree op1a, tree op1b,
7002 enum tree_code code2, tree op2a, tree op2b,
7003 basic_block outer_cond_bb)
7005 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b,
7006 outer_cond_bb))
7007 return t;
7009 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b,
7010 outer_cond_bb))
7011 return t;
7013 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
7014 op1a, op1b, code2, op2a,
7015 op2b, outer_cond_bb))
7016 return t;
7018 return NULL_TREE;
7021 /* Helper function for or_comparisons_1: try to simplify the OR of the
7022 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
7023 If INVERT is true, invert the value of VAR before doing the OR.
7024 Return NULL_EXPR if we can't simplify this to a single expression. */
7026 static tree
7027 or_var_with_comparison (tree type, tree var, bool invert,
7028 enum tree_code code2, tree op2a, tree op2b,
7029 basic_block outer_cond_bb)
7031 tree t;
7032 gimple *stmt = SSA_NAME_DEF_STMT (var);
7034 /* We can only deal with variables whose definitions are assignments. */
7035 if (!is_gimple_assign (stmt))
7036 return NULL_TREE;
7038 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
7039 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
7040 Then we only have to consider the simpler non-inverted cases. */
7041 if (invert)
7042 t = and_var_with_comparison_1 (type, stmt,
7043 invert_tree_comparison (code2, false),
7044 op2a, op2b, outer_cond_bb);
7045 else
7046 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b,
7047 outer_cond_bb);
7048 return canonicalize_bool (t, invert);
7051 /* Try to simplify the OR of the ssa variable defined by the assignment
7052 STMT with the comparison specified by (OP2A CODE2 OP2B).
7053 Return NULL_EXPR if we can't simplify this to a single expression. */
7055 static tree
7056 or_var_with_comparison_1 (tree type, gimple *stmt,
7057 enum tree_code code2, tree op2a, tree op2b,
7058 basic_block outer_cond_bb)
7060 tree var = gimple_assign_lhs (stmt);
7061 tree true_test_var = NULL_TREE;
7062 tree false_test_var = NULL_TREE;
7063 enum tree_code innercode = gimple_assign_rhs_code (stmt);
7065 /* Check for identities like (var OR (var != 0)) => true . */
7066 if (TREE_CODE (op2a) == SSA_NAME
7067 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
7069 if ((code2 == NE_EXPR && integer_zerop (op2b))
7070 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
7072 true_test_var = op2a;
7073 if (var == true_test_var)
7074 return var;
7076 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
7077 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
7079 false_test_var = op2a;
7080 if (var == false_test_var)
7081 return boolean_true_node;
7085 /* If the definition is a comparison, recurse on it. */
7086 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
7088 tree t = or_comparisons_1 (type, innercode,
7089 gimple_assign_rhs1 (stmt),
7090 gimple_assign_rhs2 (stmt),
7091 code2, op2a, op2b, outer_cond_bb);
7092 if (t)
7093 return t;
7096 /* If the definition is an AND or OR expression, we may be able to
7097 simplify by reassociating. */
7098 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
7099 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
7101 tree inner1 = gimple_assign_rhs1 (stmt);
7102 tree inner2 = gimple_assign_rhs2 (stmt);
7103 gimple *s;
7104 tree t;
7105 tree partial = NULL_TREE;
7106 bool is_or = (innercode == BIT_IOR_EXPR);
7108 /* Check for boolean identities that don't require recursive examination
7109 of inner1/inner2:
7110 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7111 inner1 OR (inner1 AND inner2) => inner1
7112 !inner1 OR (inner1 OR inner2) => true
7113 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7115 if (inner1 == true_test_var)
7116 return (is_or ? var : inner1);
7117 else if (inner2 == true_test_var)
7118 return (is_or ? var : inner2);
7119 else if (inner1 == false_test_var)
7120 return (is_or
7121 ? boolean_true_node
7122 : or_var_with_comparison (type, inner2, false, code2, op2a,
7123 op2b, outer_cond_bb));
7124 else if (inner2 == false_test_var)
7125 return (is_or
7126 ? boolean_true_node
7127 : or_var_with_comparison (type, inner1, false, code2, op2a,
7128 op2b, outer_cond_bb));
7130 /* Next, redistribute/reassociate the OR across the inner tests.
7131 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7132 if (TREE_CODE (inner1) == SSA_NAME
7133 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
7134 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7135 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7136 gimple_assign_rhs1 (s),
7137 gimple_assign_rhs2 (s),
7138 code2, op2a, op2b,
7139 outer_cond_bb)))
7141 /* Handle the OR case, where we are reassociating:
7142 (inner1 OR inner2) OR (op2a code2 op2b)
7143 => (t OR inner2)
7144 If the partial result t is a constant, we win. Otherwise
7145 continue on to try reassociating with the other inner test. */
7146 if (is_or)
7148 if (integer_onep (t))
7149 return boolean_true_node;
7150 else if (integer_zerop (t))
7151 return inner2;
7154 /* Handle the AND case, where we are redistributing:
7155 (inner1 AND inner2) OR (op2a code2 op2b)
7156 => (t AND (inner2 OR (op2a code op2b))) */
7157 else if (integer_zerop (t))
7158 return boolean_false_node;
7160 /* Save partial result for later. */
7161 partial = t;
7164 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7165 if (TREE_CODE (inner2) == SSA_NAME
7166 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
7167 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7168 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7169 gimple_assign_rhs1 (s),
7170 gimple_assign_rhs2 (s),
7171 code2, op2a, op2b,
7172 outer_cond_bb)))
7174 /* Handle the OR case, where we are reassociating:
7175 (inner1 OR inner2) OR (op2a code2 op2b)
7176 => (inner1 OR t)
7177 => (t OR partial) */
7178 if (is_or)
7180 if (integer_zerop (t))
7181 return inner1;
7182 else if (integer_onep (t))
7183 return boolean_true_node;
7184 /* If both are the same, we can apply the identity
7185 (x OR x) == x. */
7186 else if (partial && same_bool_result_p (t, partial))
7187 return t;
7190 /* Handle the AND case, where we are redistributing:
7191 (inner1 AND inner2) OR (op2a code2 op2b)
7192 => (t AND (inner1 OR (op2a code2 op2b)))
7193 => (t AND partial) */
7194 else
7196 if (integer_zerop (t))
7197 return boolean_false_node;
7198 else if (partial)
7200 /* We already got a simplification for the other
7201 operand to the redistributed AND expression. The
7202 interesting case is when at least one is true.
7203 Or, if both are the same, we can apply the identity
7204 (x AND x) == x. */
7205 if (integer_onep (partial))
7206 return t;
7207 else if (integer_onep (t))
7208 return partial;
7209 else if (same_bool_result_p (t, partial))
7210 return t;
7215 return NULL_TREE;
7218 /* Try to simplify the OR of two comparisons defined by
7219 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7220 If this can be done without constructing an intermediate value,
7221 return the resulting tree; otherwise NULL_TREE is returned.
7222 This function is deliberately asymmetric as it recurses on SSA_DEFs
7223 in the first comparison but not the second. */
7225 static tree
7226 or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
7227 enum tree_code code2, tree op2a, tree op2b,
7228 basic_block outer_cond_bb)
7230 tree truth_type = truth_type_for (TREE_TYPE (op1a));
7232 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7233 if (operand_equal_p (op1a, op2a, 0)
7234 && operand_equal_p (op1b, op2b, 0))
7236 /* Result will be either NULL_TREE, or a combined comparison. */
7237 tree t = combine_comparisons (UNKNOWN_LOCATION,
7238 TRUTH_ORIF_EXPR, code1, code2,
7239 truth_type, op1a, op1b);
7240 if (t)
7241 return t;
7244 /* Likewise the swapped case of the above. */
7245 if (operand_equal_p (op1a, op2b, 0)
7246 && operand_equal_p (op1b, op2a, 0))
7248 /* Result will be either NULL_TREE, or a combined comparison. */
7249 tree t = combine_comparisons (UNKNOWN_LOCATION,
7250 TRUTH_ORIF_EXPR, code1,
7251 swap_tree_comparison (code2),
7252 truth_type, op1a, op1b);
7253 if (t)
7254 return t;
7257 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7258 NAME's definition is a truth value. See if there are any simplifications
7259 that can be done against the NAME's definition. */
7260 if (TREE_CODE (op1a) == SSA_NAME
7261 && (code1 == NE_EXPR || code1 == EQ_EXPR)
7262 && (integer_zerop (op1b) || integer_onep (op1b)))
7264 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7265 || (code1 == NE_EXPR && integer_onep (op1b)));
7266 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
7267 switch (gimple_code (stmt))
7269 case GIMPLE_ASSIGN:
7270 /* Try to simplify by copy-propagating the definition. */
7271 return or_var_with_comparison (type, op1a, invert, code2, op2a,
7272 op2b, outer_cond_bb);
7274 case GIMPLE_PHI:
7275 /* If every argument to the PHI produces the same result when
7276 ORed with the second comparison, we win.
7277 Do not do this unless the type is bool since we need a bool
7278 result here anyway. */
7279 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7281 tree result = NULL_TREE;
7282 unsigned i;
7283 for (i = 0; i < gimple_phi_num_args (stmt); i++)
7285 tree arg = gimple_phi_arg_def (stmt, i);
7287 /* If this PHI has itself as an argument, ignore it.
7288 If all the other args produce the same result,
7289 we're still OK. */
7290 if (arg == gimple_phi_result (stmt))
7291 continue;
7292 else if (TREE_CODE (arg) == INTEGER_CST)
7294 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
7296 if (!result)
7297 result = boolean_true_node;
7298 else if (!integer_onep (result))
7299 return NULL_TREE;
7301 else if (!result)
7302 result = fold_build2 (code2, boolean_type_node,
7303 op2a, op2b);
7304 else if (!same_bool_comparison_p (result,
7305 code2, op2a, op2b))
7306 return NULL_TREE;
7308 else if (TREE_CODE (arg) == SSA_NAME
7309 && !SSA_NAME_IS_DEFAULT_DEF (arg))
7311 tree temp;
7312 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
7313 /* In simple cases we can look through PHI nodes,
7314 but we have to be careful with loops.
7315 See PR49073. */
7316 if (! dom_info_available_p (CDI_DOMINATORS)
7317 || gimple_bb (def_stmt) == gimple_bb (stmt)
7318 || dominated_by_p (CDI_DOMINATORS,
7319 gimple_bb (def_stmt),
7320 gimple_bb (stmt)))
7321 return NULL_TREE;
7322 temp = or_var_with_comparison (type, arg, invert, code2,
7323 op2a, op2b, outer_cond_bb);
7324 if (!temp)
7325 return NULL_TREE;
7326 else if (!result)
7327 result = temp;
7328 else if (!same_bool_result_p (result, temp))
7329 return NULL_TREE;
7331 else
7332 return NULL_TREE;
7334 return result;
7337 default:
7338 break;
7341 return NULL_TREE;
7344 /* Try to simplify the OR of two comparisons, specified by
7345 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7346 If this can be simplified to a single expression (without requiring
7347 introducing more SSA variables to hold intermediate values),
7348 return the resulting tree. Otherwise return NULL_TREE.
7349 If the result expression is non-null, it has boolean type. */
7351 tree
7352 maybe_fold_or_comparisons (tree type,
7353 enum tree_code code1, tree op1a, tree op1b,
7354 enum tree_code code2, tree op2a, tree op2b,
7355 basic_block outer_cond_bb)
7357 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b,
7358 outer_cond_bb))
7359 return t;
7361 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b,
7362 outer_cond_bb))
7363 return t;
7365 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
7366 op1a, op1b, code2, op2a,
7367 op2b, outer_cond_bb))
7368 return t;
7370 return NULL_TREE;
7373 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7375 Either NULL_TREE, a simplified but non-constant or a constant
7376 is returned.
7378 ??? This should go into a gimple-fold-inline.h file to be eventually
7379 privatized with the single valueize function used in the various TUs
7380 to avoid the indirect function call overhead. */
7382 tree
7383 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
7384 tree (*gvalueize) (tree))
7386 gimple_match_op res_op;
7387 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7388 edges if there are intermediate VARYING defs. For this reason
7389 do not follow SSA edges here even though SCCVN can technically
7390 just deal fine with that. */
7391 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
7393 tree res = NULL_TREE;
7394 if (gimple_simplified_result_is_gimple_val (&res_op))
7395 res = res_op.ops[0];
7396 else if (mprts_hook)
7397 res = mprts_hook (&res_op);
7398 if (res)
7400 if (dump_file && dump_flags & TDF_DETAILS)
7402 fprintf (dump_file, "Match-and-simplified ");
7403 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
7404 fprintf (dump_file, " to ");
7405 print_generic_expr (dump_file, res);
7406 fprintf (dump_file, "\n");
7408 return res;
7412 location_t loc = gimple_location (stmt);
7413 switch (gimple_code (stmt))
7415 case GIMPLE_ASSIGN:
7417 enum tree_code subcode = gimple_assign_rhs_code (stmt);
7419 switch (get_gimple_rhs_class (subcode))
7421 case GIMPLE_SINGLE_RHS:
7423 tree rhs = gimple_assign_rhs1 (stmt);
7424 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
7426 if (TREE_CODE (rhs) == SSA_NAME)
7428 /* If the RHS is an SSA_NAME, return its known constant value,
7429 if any. */
7430 return (*valueize) (rhs);
7432 /* Handle propagating invariant addresses into address
7433 operations. */
7434 else if (TREE_CODE (rhs) == ADDR_EXPR
7435 && !is_gimple_min_invariant (rhs))
7437 poly_int64 offset = 0;
7438 tree base;
7439 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
7440 &offset,
7441 valueize);
7442 if (base
7443 && (CONSTANT_CLASS_P (base)
7444 || decl_address_invariant_p (base)))
7445 return build_invariant_address (TREE_TYPE (rhs),
7446 base, offset);
7448 else if (TREE_CODE (rhs) == CONSTRUCTOR
7449 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
7450 && known_eq (CONSTRUCTOR_NELTS (rhs),
7451 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
7453 unsigned i, nelts;
7454 tree val;
7456 nelts = CONSTRUCTOR_NELTS (rhs);
7457 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
7458 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7460 val = (*valueize) (val);
7461 if (TREE_CODE (val) == INTEGER_CST
7462 || TREE_CODE (val) == REAL_CST
7463 || TREE_CODE (val) == FIXED_CST)
7464 vec.quick_push (val);
7465 else
7466 return NULL_TREE;
7469 return vec.build ();
7471 if (subcode == OBJ_TYPE_REF)
7473 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
7474 /* If callee is constant, we can fold away the wrapper. */
7475 if (is_gimple_min_invariant (val))
7476 return val;
7479 if (kind == tcc_reference)
7481 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
7482 || TREE_CODE (rhs) == REALPART_EXPR
7483 || TREE_CODE (rhs) == IMAGPART_EXPR)
7484 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7486 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7487 return fold_unary_loc (EXPR_LOCATION (rhs),
7488 TREE_CODE (rhs),
7489 TREE_TYPE (rhs), val);
7491 else if (TREE_CODE (rhs) == BIT_FIELD_REF
7492 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7494 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7495 return fold_ternary_loc (EXPR_LOCATION (rhs),
7496 TREE_CODE (rhs),
7497 TREE_TYPE (rhs), val,
7498 TREE_OPERAND (rhs, 1),
7499 TREE_OPERAND (rhs, 2));
7501 else if (TREE_CODE (rhs) == MEM_REF
7502 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7504 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7505 if (TREE_CODE (val) == ADDR_EXPR
7506 && is_gimple_min_invariant (val))
7508 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
7509 unshare_expr (val),
7510 TREE_OPERAND (rhs, 1));
7511 if (tem)
7512 rhs = tem;
7515 return fold_const_aggregate_ref_1 (rhs, valueize);
7517 else if (kind == tcc_declaration)
7518 return get_symbol_constant_value (rhs);
7519 return rhs;
7522 case GIMPLE_UNARY_RHS:
7523 return NULL_TREE;
7525 case GIMPLE_BINARY_RHS:
7526 /* Translate &x + CST into an invariant form suitable for
7527 further propagation. */
7528 if (subcode == POINTER_PLUS_EXPR)
7530 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7531 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7532 if (TREE_CODE (op0) == ADDR_EXPR
7533 && TREE_CODE (op1) == INTEGER_CST)
7535 tree off = fold_convert (ptr_type_node, op1);
7536 return build1_loc
7537 (loc, ADDR_EXPR, TREE_TYPE (op0),
7538 fold_build2 (MEM_REF,
7539 TREE_TYPE (TREE_TYPE (op0)),
7540 unshare_expr (op0), off));
7543 /* Canonicalize bool != 0 and bool == 0 appearing after
7544 valueization. While gimple_simplify handles this
7545 it can get confused by the ~X == 1 -> X == 0 transform
7546 which we cant reduce to a SSA name or a constant
7547 (and we have no way to tell gimple_simplify to not
7548 consider those transforms in the first place). */
7549 else if (subcode == EQ_EXPR
7550 || subcode == NE_EXPR)
7552 tree lhs = gimple_assign_lhs (stmt);
7553 tree op0 = gimple_assign_rhs1 (stmt);
7554 if (useless_type_conversion_p (TREE_TYPE (lhs),
7555 TREE_TYPE (op0)))
7557 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7558 op0 = (*valueize) (op0);
7559 if (TREE_CODE (op0) == INTEGER_CST)
7560 std::swap (op0, op1);
7561 if (TREE_CODE (op1) == INTEGER_CST
7562 && ((subcode == NE_EXPR && integer_zerop (op1))
7563 || (subcode == EQ_EXPR && integer_onep (op1))))
7564 return op0;
7567 return NULL_TREE;
7569 case GIMPLE_TERNARY_RHS:
7571 /* Handle ternary operators that can appear in GIMPLE form. */
7572 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7573 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7574 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
7575 return fold_ternary_loc (loc, subcode,
7576 TREE_TYPE (gimple_assign_lhs (stmt)),
7577 op0, op1, op2);
7580 default:
7581 gcc_unreachable ();
7585 case GIMPLE_CALL:
7587 tree fn;
7588 gcall *call_stmt = as_a <gcall *> (stmt);
7590 if (gimple_call_internal_p (stmt))
7592 enum tree_code subcode = ERROR_MARK;
7593 switch (gimple_call_internal_fn (stmt))
7595 case IFN_UBSAN_CHECK_ADD:
7596 subcode = PLUS_EXPR;
7597 break;
7598 case IFN_UBSAN_CHECK_SUB:
7599 subcode = MINUS_EXPR;
7600 break;
7601 case IFN_UBSAN_CHECK_MUL:
7602 subcode = MULT_EXPR;
7603 break;
7604 case IFN_BUILTIN_EXPECT:
7606 tree arg0 = gimple_call_arg (stmt, 0);
7607 tree op0 = (*valueize) (arg0);
7608 if (TREE_CODE (op0) == INTEGER_CST)
7609 return op0;
7610 return NULL_TREE;
7612 default:
7613 return NULL_TREE;
7615 tree arg0 = gimple_call_arg (stmt, 0);
7616 tree arg1 = gimple_call_arg (stmt, 1);
7617 tree op0 = (*valueize) (arg0);
7618 tree op1 = (*valueize) (arg1);
7620 if (TREE_CODE (op0) != INTEGER_CST
7621 || TREE_CODE (op1) != INTEGER_CST)
7623 switch (subcode)
7625 case MULT_EXPR:
7626 /* x * 0 = 0 * x = 0 without overflow. */
7627 if (integer_zerop (op0) || integer_zerop (op1))
7628 return build_zero_cst (TREE_TYPE (arg0));
7629 break;
7630 case MINUS_EXPR:
7631 /* y - y = 0 without overflow. */
7632 if (operand_equal_p (op0, op1, 0))
7633 return build_zero_cst (TREE_TYPE (arg0));
7634 break;
7635 default:
7636 break;
7639 tree res
7640 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
7641 if (res
7642 && TREE_CODE (res) == INTEGER_CST
7643 && !TREE_OVERFLOW (res))
7644 return res;
7645 return NULL_TREE;
7648 fn = (*valueize) (gimple_call_fn (stmt));
7649 if (TREE_CODE (fn) == ADDR_EXPR
7650 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
7651 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
7652 && gimple_builtin_call_types_compatible_p (stmt,
7653 TREE_OPERAND (fn, 0)))
7655 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
7656 tree retval;
7657 unsigned i;
7658 for (i = 0; i < gimple_call_num_args (stmt); ++i)
7659 args[i] = (*valueize) (gimple_call_arg (stmt, i));
7660 retval = fold_builtin_call_array (loc,
7661 gimple_call_return_type (call_stmt),
7662 fn, gimple_call_num_args (stmt), args);
7663 if (retval)
7665 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7666 STRIP_NOPS (retval);
7667 retval = fold_convert (gimple_call_return_type (call_stmt),
7668 retval);
7670 return retval;
7672 return NULL_TREE;
7675 default:
7676 return NULL_TREE;
7680 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7681 Returns NULL_TREE if folding to a constant is not possible, otherwise
7682 returns a constant according to is_gimple_min_invariant. */
7684 tree
7685 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
7687 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
7688 if (res && is_gimple_min_invariant (res))
7689 return res;
7690 return NULL_TREE;
7694 /* The following set of functions are supposed to fold references using
7695 their constant initializers. */
7697 /* See if we can find constructor defining value of BASE.
7698 When we know the consructor with constant offset (such as
7699 base is array[40] and we do know constructor of array), then
7700 BIT_OFFSET is adjusted accordingly.
7702 As a special case, return error_mark_node when constructor
7703 is not explicitly available, but it is known to be zero
7704 such as 'static const int a;'. */
7705 static tree
7706 get_base_constructor (tree base, poly_int64_pod *bit_offset,
7707 tree (*valueize)(tree))
7709 poly_int64 bit_offset2, size, max_size;
7710 bool reverse;
7712 if (TREE_CODE (base) == MEM_REF)
7714 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
7715 if (!boff.to_shwi (bit_offset))
7716 return NULL_TREE;
7718 if (valueize
7719 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
7720 base = valueize (TREE_OPERAND (base, 0));
7721 if (!base || TREE_CODE (base) != ADDR_EXPR)
7722 return NULL_TREE;
7723 base = TREE_OPERAND (base, 0);
7725 else if (valueize
7726 && TREE_CODE (base) == SSA_NAME)
7727 base = valueize (base);
7729 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7730 DECL_INITIAL. If BASE is a nested reference into another
7731 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7732 the inner reference. */
7733 switch (TREE_CODE (base))
7735 case VAR_DECL:
7736 case CONST_DECL:
7738 tree init = ctor_for_folding (base);
7740 /* Our semantic is exact opposite of ctor_for_folding;
7741 NULL means unknown, while error_mark_node is 0. */
7742 if (init == error_mark_node)
7743 return NULL_TREE;
7744 if (!init)
7745 return error_mark_node;
7746 return init;
7749 case VIEW_CONVERT_EXPR:
7750 return get_base_constructor (TREE_OPERAND (base, 0),
7751 bit_offset, valueize);
7753 case ARRAY_REF:
7754 case COMPONENT_REF:
7755 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
7756 &reverse);
7757 if (!known_size_p (max_size) || maybe_ne (size, max_size))
7758 return NULL_TREE;
7759 *bit_offset += bit_offset2;
7760 return get_base_constructor (base, bit_offset, valueize);
7762 case CONSTRUCTOR:
7763 return base;
7765 default:
7766 if (CONSTANT_CLASS_P (base))
7767 return base;
7769 return NULL_TREE;
7773 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7774 to the memory at bit OFFSET. When non-null, TYPE is the expected
7775 type of the reference; otherwise the type of the referenced element
7776 is used instead. When SIZE is zero, attempt to fold a reference to
7777 the entire element which OFFSET refers to. Increment *SUBOFF by
7778 the bit offset of the accessed element. */
7780 static tree
7781 fold_array_ctor_reference (tree type, tree ctor,
7782 unsigned HOST_WIDE_INT offset,
7783 unsigned HOST_WIDE_INT size,
7784 tree from_decl,
7785 unsigned HOST_WIDE_INT *suboff)
7787 offset_int low_bound;
7788 offset_int elt_size;
7789 offset_int access_index;
7790 tree domain_type = NULL_TREE;
7791 HOST_WIDE_INT inner_offset;
7793 /* Compute low bound and elt size. */
7794 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
7795 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
7796 if (domain_type && TYPE_MIN_VALUE (domain_type))
7798 /* Static constructors for variably sized objects make no sense. */
7799 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
7800 return NULL_TREE;
7801 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
7803 else
7804 low_bound = 0;
7805 /* Static constructors for variably sized objects make no sense. */
7806 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
7807 return NULL_TREE;
7808 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
7810 /* When TYPE is non-null, verify that it specifies a constant-sized
7811 access of a multiple of the array element size. Avoid division
7812 by zero below when ELT_SIZE is zero, such as with the result of
7813 an initializer for a zero-length array or an empty struct. */
7814 if (elt_size == 0
7815 || (type
7816 && (!TYPE_SIZE_UNIT (type)
7817 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
7818 return NULL_TREE;
7820 /* Compute the array index we look for. */
7821 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
7822 elt_size);
7823 access_index += low_bound;
7825 /* And offset within the access. */
7826 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
7828 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
7829 if (size > elt_sz * BITS_PER_UNIT)
7831 /* native_encode_expr constraints. */
7832 if (size > MAX_BITSIZE_MODE_ANY_MODE
7833 || size % BITS_PER_UNIT != 0
7834 || inner_offset % BITS_PER_UNIT != 0
7835 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
7836 return NULL_TREE;
7838 unsigned ctor_idx;
7839 tree val = get_array_ctor_element_at_index (ctor, access_index,
7840 &ctor_idx);
7841 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7842 return build_zero_cst (type);
7844 /* native-encode adjacent ctor elements. */
7845 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7846 unsigned bufoff = 0;
7847 offset_int index = 0;
7848 offset_int max_index = access_index;
7849 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7850 if (!val)
7851 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7852 else if (!CONSTANT_CLASS_P (val))
7853 return NULL_TREE;
7854 if (!elt->index)
7856 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7858 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7859 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7861 else
7862 index = max_index = wi::to_offset (elt->index);
7863 index = wi::umax (index, access_index);
7866 if (bufoff + elt_sz > sizeof (buf))
7867 elt_sz = sizeof (buf) - bufoff;
7868 int len = native_encode_expr (val, buf + bufoff, elt_sz,
7869 inner_offset / BITS_PER_UNIT);
7870 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
7871 return NULL_TREE;
7872 inner_offset = 0;
7873 bufoff += len;
7875 access_index += 1;
7876 if (wi::cmpu (access_index, index) == 0)
7877 val = elt->value;
7878 else if (wi::cmpu (access_index, max_index) > 0)
7880 ctor_idx++;
7881 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7883 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7884 ++max_index;
7886 else
7888 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7889 index = 0;
7890 max_index = access_index;
7891 if (!elt->index)
7893 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7895 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7896 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7898 else
7899 index = max_index = wi::to_offset (elt->index);
7900 index = wi::umax (index, access_index);
7901 if (wi::cmpu (access_index, index) == 0)
7902 val = elt->value;
7903 else
7904 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7908 while (bufoff < size / BITS_PER_UNIT);
7909 *suboff += size;
7910 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
7913 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
7915 if (!size && TREE_CODE (val) != CONSTRUCTOR)
7917 /* For the final reference to the entire accessed element
7918 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7919 may be null) in favor of the type of the element, and set
7920 SIZE to the size of the accessed element. */
7921 inner_offset = 0;
7922 type = TREE_TYPE (val);
7923 size = elt_sz * BITS_PER_UNIT;
7925 else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
7926 && TREE_CODE (val) == CONSTRUCTOR
7927 && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
7928 /* If this isn't the last element in the CTOR and a CTOR itself
7929 and it does not cover the whole object we are requesting give up
7930 since we're not set up for combining from multiple CTORs. */
7931 return NULL_TREE;
7933 *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
7934 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
7935 suboff);
7938 /* Memory not explicitly mentioned in constructor is 0 (or
7939 the reference is out of range). */
7940 return type ? build_zero_cst (type) : NULL_TREE;
7943 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7944 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7945 is the expected type of the reference; otherwise the type of
7946 the referenced member is used instead. When SIZE is zero,
7947 attempt to fold a reference to the entire member which OFFSET
7948 refers to; in this case. Increment *SUBOFF by the bit offset
7949 of the accessed member. */
7951 static tree
7952 fold_nonarray_ctor_reference (tree type, tree ctor,
7953 unsigned HOST_WIDE_INT offset,
7954 unsigned HOST_WIDE_INT size,
7955 tree from_decl,
7956 unsigned HOST_WIDE_INT *suboff)
7958 unsigned HOST_WIDE_INT cnt;
7959 tree cfield, cval;
7961 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
7962 cval)
7964 tree byte_offset = DECL_FIELD_OFFSET (cfield);
7965 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
7966 tree field_size = DECL_SIZE (cfield);
7968 if (!field_size)
7970 /* Determine the size of the flexible array member from
7971 the size of the initializer provided for it. */
7972 field_size = TYPE_SIZE (TREE_TYPE (cval));
7975 /* Variable sized objects in static constructors makes no sense,
7976 but field_size can be NULL for flexible array members. */
7977 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
7978 && TREE_CODE (byte_offset) == INTEGER_CST
7979 && (field_size != NULL_TREE
7980 ? TREE_CODE (field_size) == INTEGER_CST
7981 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
7983 /* Compute bit offset of the field. */
7984 offset_int bitoffset
7985 = (wi::to_offset (field_offset)
7986 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
7987 /* Compute bit offset where the field ends. */
7988 offset_int bitoffset_end;
7989 if (field_size != NULL_TREE)
7990 bitoffset_end = bitoffset + wi::to_offset (field_size);
7991 else
7992 bitoffset_end = 0;
7994 /* Compute the bit offset of the end of the desired access.
7995 As a special case, if the size of the desired access is
7996 zero, assume the access is to the entire field (and let
7997 the caller make any necessary adjustments by storing
7998 the actual bounds of the field in FIELDBOUNDS). */
7999 offset_int access_end = offset_int (offset);
8000 if (size)
8001 access_end += size;
8002 else
8003 access_end = bitoffset_end;
8005 /* Is there any overlap between the desired access at
8006 [OFFSET, OFFSET+SIZE) and the offset of the field within
8007 the object at [BITOFFSET, BITOFFSET_END)? */
8008 if (wi::cmps (access_end, bitoffset) > 0
8009 && (field_size == NULL_TREE
8010 || wi::lts_p (offset, bitoffset_end)))
8012 *suboff += bitoffset.to_uhwi ();
8014 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
8016 /* For the final reference to the entire accessed member
8017 (SIZE is zero), reset OFFSET, disegard TYPE (which may
8018 be null) in favor of the type of the member, and set
8019 SIZE to the size of the accessed member. */
8020 offset = bitoffset.to_uhwi ();
8021 type = TREE_TYPE (cval);
8022 size = (bitoffset_end - bitoffset).to_uhwi ();
8025 /* We do have overlap. Now see if the field is large enough
8026 to cover the access. Give up for accesses that extend
8027 beyond the end of the object or that span multiple fields. */
8028 if (wi::cmps (access_end, bitoffset_end) > 0)
8029 return NULL_TREE;
8030 if (offset < bitoffset)
8031 return NULL_TREE;
8033 offset_int inner_offset = offset_int (offset) - bitoffset;
8034 return fold_ctor_reference (type, cval,
8035 inner_offset.to_uhwi (), size,
8036 from_decl, suboff);
8040 if (!type)
8041 return NULL_TREE;
8043 return build_zero_cst (type);
8046 /* CTOR is value initializing memory. Fold a reference of TYPE and
8047 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
8048 is zero, attempt to fold a reference to the entire subobject
8049 which OFFSET refers to. This is used when folding accesses to
8050 string members of aggregates. When non-null, set *SUBOFF to
8051 the bit offset of the accessed subobject. */
8053 tree
8054 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
8055 const poly_uint64 &poly_size, tree from_decl,
8056 unsigned HOST_WIDE_INT *suboff /* = NULL */)
8058 tree ret;
8060 /* We found the field with exact match. */
8061 if (type
8062 && useless_type_conversion_p (type, TREE_TYPE (ctor))
8063 && known_eq (poly_offset, 0U))
8064 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
8066 /* The remaining optimizations need a constant size and offset. */
8067 unsigned HOST_WIDE_INT size, offset;
8068 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
8069 return NULL_TREE;
8071 /* We are at the end of walk, see if we can view convert the
8072 result. */
8073 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
8074 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
8075 && !compare_tree_int (TYPE_SIZE (type), size)
8076 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
8078 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
8079 if (ret)
8081 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
8082 if (ret)
8083 STRIP_USELESS_TYPE_CONVERSION (ret);
8085 return ret;
8087 /* For constants and byte-aligned/sized reads try to go through
8088 native_encode/interpret. */
8089 if (CONSTANT_CLASS_P (ctor)
8090 && BITS_PER_UNIT == 8
8091 && offset % BITS_PER_UNIT == 0
8092 && offset / BITS_PER_UNIT <= INT_MAX
8093 && size % BITS_PER_UNIT == 0
8094 && size <= MAX_BITSIZE_MODE_ANY_MODE
8095 && can_native_interpret_type_p (type))
8097 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8098 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
8099 offset / BITS_PER_UNIT);
8100 if (len > 0)
8101 return native_interpret_expr (type, buf, len);
8103 if (TREE_CODE (ctor) == CONSTRUCTOR)
8105 unsigned HOST_WIDE_INT dummy = 0;
8106 if (!suboff)
8107 suboff = &dummy;
8109 tree ret;
8110 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
8111 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
8112 ret = fold_array_ctor_reference (type, ctor, offset, size,
8113 from_decl, suboff);
8114 else
8115 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
8116 from_decl, suboff);
8118 /* Fall back to native_encode_initializer. Needs to be done
8119 only in the outermost fold_ctor_reference call (because it itself
8120 recurses into CONSTRUCTORs) and doesn't update suboff. */
8121 if (ret == NULL_TREE
8122 && suboff == &dummy
8123 && BITS_PER_UNIT == 8
8124 && offset % BITS_PER_UNIT == 0
8125 && offset / BITS_PER_UNIT <= INT_MAX
8126 && size % BITS_PER_UNIT == 0
8127 && size <= MAX_BITSIZE_MODE_ANY_MODE
8128 && can_native_interpret_type_p (type))
8130 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8131 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
8132 offset / BITS_PER_UNIT);
8133 if (len > 0)
8134 return native_interpret_expr (type, buf, len);
8137 return ret;
8140 return NULL_TREE;
8143 /* Return the tree representing the element referenced by T if T is an
8144 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8145 names using VALUEIZE. Return NULL_TREE otherwise. */
8147 tree
8148 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
8150 tree ctor, idx, base;
8151 poly_int64 offset, size, max_size;
8152 tree tem;
8153 bool reverse;
8155 if (TREE_THIS_VOLATILE (t))
8156 return NULL_TREE;
8158 if (DECL_P (t))
8159 return get_symbol_constant_value (t);
8161 tem = fold_read_from_constant_string (t);
8162 if (tem)
8163 return tem;
8165 switch (TREE_CODE (t))
8167 case ARRAY_REF:
8168 case ARRAY_RANGE_REF:
8169 /* Constant indexes are handled well by get_base_constructor.
8170 Only special case variable offsets.
8171 FIXME: This code can't handle nested references with variable indexes
8172 (they will be handled only by iteration of ccp). Perhaps we can bring
8173 get_ref_base_and_extent here and make it use a valueize callback. */
8174 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
8175 && valueize
8176 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
8177 && poly_int_tree_p (idx))
8179 tree low_bound, unit_size;
8181 /* If the resulting bit-offset is constant, track it. */
8182 if ((low_bound = array_ref_low_bound (t),
8183 poly_int_tree_p (low_bound))
8184 && (unit_size = array_ref_element_size (t),
8185 tree_fits_uhwi_p (unit_size)))
8187 poly_offset_int woffset
8188 = wi::sext (wi::to_poly_offset (idx)
8189 - wi::to_poly_offset (low_bound),
8190 TYPE_PRECISION (sizetype));
8191 woffset *= tree_to_uhwi (unit_size);
8192 woffset *= BITS_PER_UNIT;
8193 if (woffset.to_shwi (&offset))
8195 base = TREE_OPERAND (t, 0);
8196 ctor = get_base_constructor (base, &offset, valueize);
8197 /* Empty constructor. Always fold to 0. */
8198 if (ctor == error_mark_node)
8199 return build_zero_cst (TREE_TYPE (t));
8200 /* Out of bound array access. Value is undefined,
8201 but don't fold. */
8202 if (maybe_lt (offset, 0))
8203 return NULL_TREE;
8204 /* We cannot determine ctor. */
8205 if (!ctor)
8206 return NULL_TREE;
8207 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
8208 tree_to_uhwi (unit_size)
8209 * BITS_PER_UNIT,
8210 base);
8214 /* Fallthru. */
8216 case COMPONENT_REF:
8217 case BIT_FIELD_REF:
8218 case TARGET_MEM_REF:
8219 case MEM_REF:
8220 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
8221 ctor = get_base_constructor (base, &offset, valueize);
8223 /* Empty constructor. Always fold to 0. */
8224 if (ctor == error_mark_node)
8225 return build_zero_cst (TREE_TYPE (t));
8226 /* We do not know precise address. */
8227 if (!known_size_p (max_size) || maybe_ne (max_size, size))
8228 return NULL_TREE;
8229 /* We cannot determine ctor. */
8230 if (!ctor)
8231 return NULL_TREE;
8233 /* Out of bound array access. Value is undefined, but don't fold. */
8234 if (maybe_lt (offset, 0))
8235 return NULL_TREE;
8237 tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
8238 if (tem)
8239 return tem;
8241 /* For bit field reads try to read the representative and
8242 adjust. */
8243 if (TREE_CODE (t) == COMPONENT_REF
8244 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
8245 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
8247 HOST_WIDE_INT csize, coffset;
8248 tree field = TREE_OPERAND (t, 1);
8249 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8250 if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
8251 && size.is_constant (&csize)
8252 && offset.is_constant (&coffset)
8253 && (coffset % BITS_PER_UNIT != 0
8254 || csize % BITS_PER_UNIT != 0)
8255 && !reverse
8256 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
8258 poly_int64 bitoffset;
8259 poly_uint64 field_offset, repr_offset;
8260 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8261 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
8262 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
8263 else
8264 bitoffset = 0;
8265 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8266 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
8267 HOST_WIDE_INT bitoff;
8268 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8269 - TYPE_PRECISION (TREE_TYPE (field)));
8270 if (bitoffset.is_constant (&bitoff)
8271 && bitoff >= 0
8272 && bitoff <= diff)
8274 offset -= bitoff;
8275 size = tree_to_uhwi (DECL_SIZE (repr));
8277 tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
8278 size, base);
8279 if (tem && TREE_CODE (tem) == INTEGER_CST)
8281 if (!BYTES_BIG_ENDIAN)
8282 tem = wide_int_to_tree (TREE_TYPE (field),
8283 wi::lrshift (wi::to_wide (tem),
8284 bitoff));
8285 else
8286 tem = wide_int_to_tree (TREE_TYPE (field),
8287 wi::lrshift (wi::to_wide (tem),
8288 diff - bitoff));
8289 return tem;
8294 break;
8296 case REALPART_EXPR:
8297 case IMAGPART_EXPR:
8299 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
8300 if (c && TREE_CODE (c) == COMPLEX_CST)
8301 return fold_build1_loc (EXPR_LOCATION (t),
8302 TREE_CODE (t), TREE_TYPE (t), c);
8303 break;
8306 default:
8307 break;
8310 return NULL_TREE;
8313 tree
8314 fold_const_aggregate_ref (tree t)
8316 return fold_const_aggregate_ref_1 (t, NULL);
8319 /* Lookup virtual method with index TOKEN in a virtual table V
8320 at OFFSET.
8321 Set CAN_REFER if non-NULL to false if method
8322 is not referable or if the virtual table is ill-formed (such as rewriten
8323 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8325 tree
8326 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
8327 tree v,
8328 unsigned HOST_WIDE_INT offset,
8329 bool *can_refer)
8331 tree vtable = v, init, fn;
8332 unsigned HOST_WIDE_INT size;
8333 unsigned HOST_WIDE_INT elt_size, access_index;
8334 tree domain_type;
8336 if (can_refer)
8337 *can_refer = true;
8339 /* First of all double check we have virtual table. */
8340 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
8342 /* Pass down that we lost track of the target. */
8343 if (can_refer)
8344 *can_refer = false;
8345 return NULL_TREE;
8348 init = ctor_for_folding (v);
8350 /* The virtual tables should always be born with constructors
8351 and we always should assume that they are avaialble for
8352 folding. At the moment we do not stream them in all cases,
8353 but it should never happen that ctor seem unreachable. */
8354 gcc_assert (init);
8355 if (init == error_mark_node)
8357 /* Pass down that we lost track of the target. */
8358 if (can_refer)
8359 *can_refer = false;
8360 return NULL_TREE;
8362 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
8363 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
8364 offset *= BITS_PER_UNIT;
8365 offset += token * size;
8367 /* Lookup the value in the constructor that is assumed to be array.
8368 This is equivalent to
8369 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8370 offset, size, NULL);
8371 but in a constant time. We expect that frontend produced a simple
8372 array without indexed initializers. */
8374 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
8375 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
8376 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
8377 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
8379 access_index = offset / BITS_PER_UNIT / elt_size;
8380 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
8382 /* The C++ FE can now produce indexed fields, and we check if the indexes
8383 match. */
8384 if (access_index < CONSTRUCTOR_NELTS (init))
8386 fn = CONSTRUCTOR_ELT (init, access_index)->value;
8387 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
8388 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8389 STRIP_NOPS (fn);
8391 else
8392 fn = NULL;
8394 /* For type inconsistent program we may end up looking up virtual method
8395 in virtual table that does not contain TOKEN entries. We may overrun
8396 the virtual table and pick up a constant or RTTI info pointer.
8397 In any case the call is undefined. */
8398 if (!fn
8399 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
8400 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
8401 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
8402 else
8404 fn = TREE_OPERAND (fn, 0);
8406 /* When cgraph node is missing and function is not public, we cannot
8407 devirtualize. This can happen in WHOPR when the actual method
8408 ends up in other partition, because we found devirtualization
8409 possibility too late. */
8410 if (!can_refer_decl_in_current_unit_p (fn, vtable))
8412 if (can_refer)
8414 *can_refer = false;
8415 return fn;
8417 return NULL_TREE;
8421 /* Make sure we create a cgraph node for functions we'll reference.
8422 They can be non-existent if the reference comes from an entry
8423 of an external vtable for example. */
8424 cgraph_node::get_create (fn);
8426 return fn;
8429 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8430 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8431 KNOWN_BINFO carries the binfo describing the true type of
8432 OBJ_TYPE_REF_OBJECT(REF).
8433 Set CAN_REFER if non-NULL to false if method
8434 is not referable or if the virtual table is ill-formed (such as rewriten
8435 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8437 tree
8438 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
8439 bool *can_refer)
8441 unsigned HOST_WIDE_INT offset;
8442 tree v;
8444 v = BINFO_VTABLE (known_binfo);
8445 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8446 if (!v)
8447 return NULL_TREE;
8449 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
8451 if (can_refer)
8452 *can_refer = false;
8453 return NULL_TREE;
8455 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
8458 /* Given a pointer value T, return a simplified version of an
8459 indirection through T, or NULL_TREE if no simplification is
8460 possible. Note that the resulting type may be different from
8461 the type pointed to in the sense that it is still compatible
8462 from the langhooks point of view. */
8464 tree
8465 gimple_fold_indirect_ref (tree t)
8467 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
8468 tree sub = t;
8469 tree subtype;
8471 STRIP_NOPS (sub);
8472 subtype = TREE_TYPE (sub);
8473 if (!POINTER_TYPE_P (subtype)
8474 || TYPE_REF_CAN_ALIAS_ALL (ptype))
8475 return NULL_TREE;
8477 if (TREE_CODE (sub) == ADDR_EXPR)
8479 tree op = TREE_OPERAND (sub, 0);
8480 tree optype = TREE_TYPE (op);
8481 /* *&p => p */
8482 if (useless_type_conversion_p (type, optype))
8483 return op;
8485 /* *(foo *)&fooarray => fooarray[0] */
8486 if (TREE_CODE (optype) == ARRAY_TYPE
8487 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
8488 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8490 tree type_domain = TYPE_DOMAIN (optype);
8491 tree min_val = size_zero_node;
8492 if (type_domain && TYPE_MIN_VALUE (type_domain))
8493 min_val = TYPE_MIN_VALUE (type_domain);
8494 if (TREE_CODE (min_val) == INTEGER_CST)
8495 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
8497 /* *(foo *)&complexfoo => __real__ complexfoo */
8498 else if (TREE_CODE (optype) == COMPLEX_TYPE
8499 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8500 return fold_build1 (REALPART_EXPR, type, op);
8501 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8502 else if (TREE_CODE (optype) == VECTOR_TYPE
8503 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8505 tree part_width = TYPE_SIZE (type);
8506 tree index = bitsize_int (0);
8507 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
8511 /* *(p + CST) -> ... */
8512 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
8513 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
8515 tree addr = TREE_OPERAND (sub, 0);
8516 tree off = TREE_OPERAND (sub, 1);
8517 tree addrtype;
8519 STRIP_NOPS (addr);
8520 addrtype = TREE_TYPE (addr);
8522 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8523 if (TREE_CODE (addr) == ADDR_EXPR
8524 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
8525 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
8526 && tree_fits_uhwi_p (off))
8528 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
8529 tree part_width = TYPE_SIZE (type);
8530 unsigned HOST_WIDE_INT part_widthi
8531 = tree_to_shwi (part_width) / BITS_PER_UNIT;
8532 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
8533 tree index = bitsize_int (indexi);
8534 if (known_lt (offset / part_widthi,
8535 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
8536 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
8537 part_width, index);
8540 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8541 if (TREE_CODE (addr) == ADDR_EXPR
8542 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
8543 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
8545 tree size = TYPE_SIZE_UNIT (type);
8546 if (tree_int_cst_equal (size, off))
8547 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
8550 /* *(p + CST) -> MEM_REF <p, CST>. */
8551 if (TREE_CODE (addr) != ADDR_EXPR
8552 || DECL_P (TREE_OPERAND (addr, 0)))
8553 return fold_build2 (MEM_REF, type,
8554 addr,
8555 wide_int_to_tree (ptype, wi::to_wide (off)));
8558 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8559 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
8560 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
8561 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
8563 tree type_domain;
8564 tree min_val = size_zero_node;
8565 tree osub = sub;
8566 sub = gimple_fold_indirect_ref (sub);
8567 if (! sub)
8568 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
8569 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
8570 if (type_domain && TYPE_MIN_VALUE (type_domain))
8571 min_val = TYPE_MIN_VALUE (type_domain);
8572 if (TREE_CODE (min_val) == INTEGER_CST)
8573 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
8576 return NULL_TREE;
8579 /* Return true if CODE is an operation that when operating on signed
8580 integer types involves undefined behavior on overflow and the
8581 operation can be expressed with unsigned arithmetic. */
8583 bool
8584 arith_code_with_undefined_signed_overflow (tree_code code)
8586 switch (code)
8588 case ABS_EXPR:
8589 case PLUS_EXPR:
8590 case MINUS_EXPR:
8591 case MULT_EXPR:
8592 case NEGATE_EXPR:
8593 case POINTER_PLUS_EXPR:
8594 return true;
8595 default:
8596 return false;
8600 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8601 operation that can be transformed to unsigned arithmetic by converting
8602 its operand, carrying out the operation in the corresponding unsigned
8603 type and converting the result back to the original type.
8605 If IN_PLACE is true, adjust the stmt in place and return NULL.
8606 Otherwise returns a sequence of statements that replace STMT and also
8607 contain a modified form of STMT itself. */
8609 gimple_seq
8610 rewrite_to_defined_overflow (gimple *stmt, bool in_place /* = false */)
8612 if (dump_file && (dump_flags & TDF_DETAILS))
8614 fprintf (dump_file, "rewriting stmt with undefined signed "
8615 "overflow ");
8616 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
8619 tree lhs = gimple_assign_lhs (stmt);
8620 tree type = unsigned_type_for (TREE_TYPE (lhs));
8621 gimple_seq stmts = NULL;
8622 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
8623 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
8624 else
8625 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
8627 tree op = gimple_op (stmt, i);
8628 op = gimple_convert (&stmts, type, op);
8629 gimple_set_op (stmt, i, op);
8631 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
8632 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
8633 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
8634 gimple_set_modified (stmt, true);
8635 if (in_place)
8637 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
8638 if (stmts)
8639 gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
8640 stmts = NULL;
8642 else
8643 gimple_seq_add_stmt (&stmts, stmt);
8644 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
8645 if (in_place)
8647 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
8648 gsi_insert_after (&gsi, cvt, GSI_SAME_STMT);
8649 update_stmt (stmt);
8651 else
8652 gimple_seq_add_stmt (&stmts, cvt);
8654 return stmts;
8658 /* The valueization hook we use for the gimple_build API simplification.
8659 This makes us match fold_buildN behavior by only combining with
8660 statements in the sequence(s) we are currently building. */
8662 static tree
8663 gimple_build_valueize (tree op)
8665 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
8666 return op;
8667 return NULL_TREE;
8670 /* Build the expression CODE OP0 of type TYPE with location LOC,
8671 simplifying it first if possible. Returns the built
8672 expression value and appends statements possibly defining it
8673 to SEQ. */
8675 tree
8676 gimple_build (gimple_seq *seq, location_t loc,
8677 enum tree_code code, tree type, tree op0)
8679 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
8680 if (!res)
8682 res = create_tmp_reg_or_ssa_name (type);
8683 gimple *stmt;
8684 if (code == REALPART_EXPR
8685 || code == IMAGPART_EXPR
8686 || code == VIEW_CONVERT_EXPR)
8687 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
8688 else
8689 stmt = gimple_build_assign (res, code, op0);
8690 gimple_set_location (stmt, loc);
8691 gimple_seq_add_stmt_without_update (seq, stmt);
8693 return res;
8696 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8697 simplifying it first if possible. Returns the built
8698 expression value and appends statements possibly defining it
8699 to SEQ. */
8701 tree
8702 gimple_build (gimple_seq *seq, location_t loc,
8703 enum tree_code code, tree type, tree op0, tree op1)
8705 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
8706 if (!res)
8708 res = create_tmp_reg_or_ssa_name (type);
8709 gimple *stmt = gimple_build_assign (res, code, op0, op1);
8710 gimple_set_location (stmt, loc);
8711 gimple_seq_add_stmt_without_update (seq, stmt);
8713 return res;
8716 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8717 simplifying it first if possible. Returns the built
8718 expression value and appends statements possibly defining it
8719 to SEQ. */
8721 tree
8722 gimple_build (gimple_seq *seq, location_t loc,
8723 enum tree_code code, tree type, tree op0, tree op1, tree op2)
8725 tree res = gimple_simplify (code, type, op0, op1, op2,
8726 seq, gimple_build_valueize);
8727 if (!res)
8729 res = create_tmp_reg_or_ssa_name (type);
8730 gimple *stmt;
8731 if (code == BIT_FIELD_REF)
8732 stmt = gimple_build_assign (res, code,
8733 build3 (code, type, op0, op1, op2));
8734 else
8735 stmt = gimple_build_assign (res, code, op0, op1, op2);
8736 gimple_set_location (stmt, loc);
8737 gimple_seq_add_stmt_without_update (seq, stmt);
8739 return res;
8742 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8743 void) with a location LOC. Returns the built expression value (or NULL_TREE
8744 if TYPE is void) and appends statements possibly defining it to SEQ. */
8746 tree
8747 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn, tree type)
8749 tree res = NULL_TREE;
8750 gcall *stmt;
8751 if (internal_fn_p (fn))
8752 stmt = gimple_build_call_internal (as_internal_fn (fn), 0);
8753 else
8755 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8756 stmt = gimple_build_call (decl, 0);
8758 if (!VOID_TYPE_P (type))
8760 res = create_tmp_reg_or_ssa_name (type);
8761 gimple_call_set_lhs (stmt, res);
8763 gimple_set_location (stmt, loc);
8764 gimple_seq_add_stmt_without_update (seq, stmt);
8765 return res;
8768 /* Build the call FN (ARG0) with a result of type TYPE
8769 (or no result if TYPE is void) with location LOC,
8770 simplifying it first if possible. Returns the built
8771 expression value (or NULL_TREE if TYPE is void) and appends
8772 statements possibly defining it to SEQ. */
8774 tree
8775 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8776 tree type, tree arg0)
8778 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
8779 if (!res)
8781 gcall *stmt;
8782 if (internal_fn_p (fn))
8783 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
8784 else
8786 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8787 stmt = gimple_build_call (decl, 1, arg0);
8789 if (!VOID_TYPE_P (type))
8791 res = create_tmp_reg_or_ssa_name (type);
8792 gimple_call_set_lhs (stmt, res);
8794 gimple_set_location (stmt, loc);
8795 gimple_seq_add_stmt_without_update (seq, stmt);
8797 return res;
8800 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
8801 (or no result if TYPE is void) with location LOC,
8802 simplifying it first if possible. Returns the built
8803 expression value (or NULL_TREE if TYPE is void) and appends
8804 statements possibly defining it to SEQ. */
8806 tree
8807 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8808 tree type, tree arg0, tree arg1)
8810 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
8811 if (!res)
8813 gcall *stmt;
8814 if (internal_fn_p (fn))
8815 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
8816 else
8818 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8819 stmt = gimple_build_call (decl, 2, arg0, arg1);
8821 if (!VOID_TYPE_P (type))
8823 res = create_tmp_reg_or_ssa_name (type);
8824 gimple_call_set_lhs (stmt, res);
8826 gimple_set_location (stmt, loc);
8827 gimple_seq_add_stmt_without_update (seq, stmt);
8829 return res;
8832 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8833 (or no result if TYPE is void) with location LOC,
8834 simplifying it first if possible. Returns the built
8835 expression value (or NULL_TREE if TYPE is void) and appends
8836 statements possibly defining it to SEQ. */
8838 tree
8839 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8840 tree type, tree arg0, tree arg1, tree arg2)
8842 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
8843 seq, gimple_build_valueize);
8844 if (!res)
8846 gcall *stmt;
8847 if (internal_fn_p (fn))
8848 stmt = gimple_build_call_internal (as_internal_fn (fn),
8849 3, arg0, arg1, arg2);
8850 else
8852 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8853 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
8855 if (!VOID_TYPE_P (type))
8857 res = create_tmp_reg_or_ssa_name (type);
8858 gimple_call_set_lhs (stmt, res);
8860 gimple_set_location (stmt, loc);
8861 gimple_seq_add_stmt_without_update (seq, stmt);
8863 return res;
8866 /* Build CODE (OP0) with a result of type TYPE (or no result if TYPE is
8867 void) with location LOC, simplifying it first if possible. Returns the
8868 built expression value (or NULL_TREE if TYPE is void) and appends
8869 statements possibly defining it to SEQ. */
8871 tree
8872 gimple_build (gimple_seq *seq, location_t loc, code_helper code,
8873 tree type, tree op0)
8875 if (code.is_tree_code ())
8876 return gimple_build (seq, loc, tree_code (code), type, op0);
8877 return gimple_build (seq, loc, combined_fn (code), type, op0);
8880 /* Build CODE (OP0, OP1) with a result of type TYPE (or no result if TYPE is
8881 void) with location LOC, simplifying it first if possible. Returns the
8882 built expression value (or NULL_TREE if TYPE is void) and appends
8883 statements possibly defining it to SEQ. */
8885 tree
8886 gimple_build (gimple_seq *seq, location_t loc, code_helper code,
8887 tree type, tree op0, tree op1)
8889 if (code.is_tree_code ())
8890 return gimple_build (seq, loc, tree_code (code), type, op0, op1);
8891 return gimple_build (seq, loc, combined_fn (code), type, op0, op1);
8894 /* Build CODE (OP0, OP1, OP2) with a result of type TYPE (or no result if TYPE
8895 is void) with location LOC, simplifying it first if possible. Returns the
8896 built expression value (or NULL_TREE if TYPE is void) and appends statements
8897 possibly defining it to SEQ. */
8899 tree
8900 gimple_build (gimple_seq *seq, location_t loc, code_helper code,
8901 tree type, tree op0, tree op1, tree op2)
8903 if (code.is_tree_code ())
8904 return gimple_build (seq, loc, tree_code (code), type, op0, op1, op2);
8905 return gimple_build (seq, loc, combined_fn (code), type, op0, op1, op2);
8908 /* Build the conversion (TYPE) OP with a result of type TYPE
8909 with location LOC if such conversion is neccesary in GIMPLE,
8910 simplifying it first.
8911 Returns the built expression value and appends
8912 statements possibly defining it to SEQ. */
8914 tree
8915 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
8917 if (useless_type_conversion_p (type, TREE_TYPE (op)))
8918 return op;
8919 return gimple_build (seq, loc, NOP_EXPR, type, op);
8922 /* Build the conversion (ptrofftype) OP with a result of a type
8923 compatible with ptrofftype with location LOC if such conversion
8924 is neccesary in GIMPLE, simplifying it first.
8925 Returns the built expression value and appends
8926 statements possibly defining it to SEQ. */
8928 tree
8929 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
8931 if (ptrofftype_p (TREE_TYPE (op)))
8932 return op;
8933 return gimple_convert (seq, loc, sizetype, op);
8936 /* Build a vector of type TYPE in which each element has the value OP.
8937 Return a gimple value for the result, appending any new statements
8938 to SEQ. */
8940 tree
8941 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
8942 tree op)
8944 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
8945 && !CONSTANT_CLASS_P (op))
8946 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
8948 tree res, vec = build_vector_from_val (type, op);
8949 if (is_gimple_val (vec))
8950 return vec;
8951 if (gimple_in_ssa_p (cfun))
8952 res = make_ssa_name (type);
8953 else
8954 res = create_tmp_reg (type);
8955 gimple *stmt = gimple_build_assign (res, vec);
8956 gimple_set_location (stmt, loc);
8957 gimple_seq_add_stmt_without_update (seq, stmt);
8958 return res;
8961 /* Build a vector from BUILDER, handling the case in which some elements
8962 are non-constant. Return a gimple value for the result, appending any
8963 new instructions to SEQ.
8965 BUILDER must not have a stepped encoding on entry. This is because
8966 the function is not geared up to handle the arithmetic that would
8967 be needed in the variable case, and any code building a vector that
8968 is known to be constant should use BUILDER->build () directly. */
8970 tree
8971 gimple_build_vector (gimple_seq *seq, location_t loc,
8972 tree_vector_builder *builder)
8974 gcc_assert (builder->nelts_per_pattern () <= 2);
8975 unsigned int encoded_nelts = builder->encoded_nelts ();
8976 for (unsigned int i = 0; i < encoded_nelts; ++i)
8977 if (!CONSTANT_CLASS_P ((*builder)[i]))
8979 tree type = builder->type ();
8980 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
8981 vec<constructor_elt, va_gc> *v;
8982 vec_alloc (v, nelts);
8983 for (i = 0; i < nelts; ++i)
8984 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
8986 tree res;
8987 if (gimple_in_ssa_p (cfun))
8988 res = make_ssa_name (type);
8989 else
8990 res = create_tmp_reg (type);
8991 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
8992 gimple_set_location (stmt, loc);
8993 gimple_seq_add_stmt_without_update (seq, stmt);
8994 return res;
8996 return builder->build ();
8999 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
9000 and generate a value guaranteed to be rounded upwards to ALIGN.
9002 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
9004 tree
9005 gimple_build_round_up (gimple_seq *seq, location_t loc, tree type,
9006 tree old_size, unsigned HOST_WIDE_INT align)
9008 unsigned HOST_WIDE_INT tg_mask = align - 1;
9009 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
9010 gcc_assert (INTEGRAL_TYPE_P (type));
9011 tree tree_mask = build_int_cst (type, tg_mask);
9012 tree oversize = gimple_build (seq, loc, PLUS_EXPR, type, old_size,
9013 tree_mask);
9015 tree mask = build_int_cst (type, -align);
9016 return gimple_build (seq, loc, BIT_AND_EXPR, type, oversize, mask);
9019 /* Return true if the result of assignment STMT is known to be non-negative.
9020 If the return value is based on the assumption that signed overflow is
9021 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9022 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9024 static bool
9025 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9026 int depth)
9028 enum tree_code code = gimple_assign_rhs_code (stmt);
9029 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
9030 switch (get_gimple_rhs_class (code))
9032 case GIMPLE_UNARY_RHS:
9033 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
9034 type,
9035 gimple_assign_rhs1 (stmt),
9036 strict_overflow_p, depth);
9037 case GIMPLE_BINARY_RHS:
9038 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
9039 type,
9040 gimple_assign_rhs1 (stmt),
9041 gimple_assign_rhs2 (stmt),
9042 strict_overflow_p, depth);
9043 case GIMPLE_TERNARY_RHS:
9044 return false;
9045 case GIMPLE_SINGLE_RHS:
9046 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
9047 strict_overflow_p, depth);
9048 case GIMPLE_INVALID_RHS:
9049 break;
9051 gcc_unreachable ();
9054 /* Return true if return value of call STMT is known to be non-negative.
9055 If the return value is based on the assumption that signed overflow is
9056 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9057 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9059 static bool
9060 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9061 int depth)
9063 tree arg0 = gimple_call_num_args (stmt) > 0 ?
9064 gimple_call_arg (stmt, 0) : NULL_TREE;
9065 tree arg1 = gimple_call_num_args (stmt) > 1 ?
9066 gimple_call_arg (stmt, 1) : NULL_TREE;
9067 tree lhs = gimple_call_lhs (stmt);
9068 return (lhs
9069 && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs),
9070 gimple_call_combined_fn (stmt),
9071 arg0, arg1,
9072 strict_overflow_p, depth));
9075 /* Return true if return value of call STMT is known to be non-negative.
9076 If the return value is based on the assumption that signed overflow is
9077 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9078 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9080 static bool
9081 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9082 int depth)
9084 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9086 tree arg = gimple_phi_arg_def (stmt, i);
9087 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
9088 return false;
9090 return true;
9093 /* Return true if STMT is known to compute a non-negative value.
9094 If the return value is based on the assumption that signed overflow is
9095 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9096 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9098 bool
9099 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9100 int depth)
9102 switch (gimple_code (stmt))
9104 case GIMPLE_ASSIGN:
9105 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
9106 depth);
9107 case GIMPLE_CALL:
9108 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
9109 depth);
9110 case GIMPLE_PHI:
9111 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
9112 depth);
9113 default:
9114 return false;
9118 /* Return true if the floating-point value computed by assignment STMT
9119 is known to have an integer value. We also allow +Inf, -Inf and NaN
9120 to be considered integer values. Return false for signaling NaN.
9122 DEPTH is the current nesting depth of the query. */
9124 static bool
9125 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
9127 enum tree_code code = gimple_assign_rhs_code (stmt);
9128 switch (get_gimple_rhs_class (code))
9130 case GIMPLE_UNARY_RHS:
9131 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
9132 gimple_assign_rhs1 (stmt), depth);
9133 case GIMPLE_BINARY_RHS:
9134 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
9135 gimple_assign_rhs1 (stmt),
9136 gimple_assign_rhs2 (stmt), depth);
9137 case GIMPLE_TERNARY_RHS:
9138 return false;
9139 case GIMPLE_SINGLE_RHS:
9140 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
9141 case GIMPLE_INVALID_RHS:
9142 break;
9144 gcc_unreachable ();
9147 /* Return true if the floating-point value computed by call STMT is known
9148 to have an integer value. We also allow +Inf, -Inf and NaN to be
9149 considered integer values. Return false for signaling NaN.
9151 DEPTH is the current nesting depth of the query. */
9153 static bool
9154 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
9156 tree arg0 = (gimple_call_num_args (stmt) > 0
9157 ? gimple_call_arg (stmt, 0)
9158 : NULL_TREE);
9159 tree arg1 = (gimple_call_num_args (stmt) > 1
9160 ? gimple_call_arg (stmt, 1)
9161 : NULL_TREE);
9162 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
9163 arg0, arg1, depth);
9166 /* Return true if the floating-point result of phi STMT is known to have
9167 an integer value. We also allow +Inf, -Inf and NaN to be considered
9168 integer values. Return false for signaling NaN.
9170 DEPTH is the current nesting depth of the query. */
9172 static bool
9173 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
9175 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9177 tree arg = gimple_phi_arg_def (stmt, i);
9178 if (!integer_valued_real_single_p (arg, depth + 1))
9179 return false;
9181 return true;
9184 /* Return true if the floating-point value computed by STMT is known
9185 to have an integer value. We also allow +Inf, -Inf and NaN to be
9186 considered integer values. Return false for signaling NaN.
9188 DEPTH is the current nesting depth of the query. */
9190 bool
9191 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
9193 switch (gimple_code (stmt))
9195 case GIMPLE_ASSIGN:
9196 return gimple_assign_integer_valued_real_p (stmt, depth);
9197 case GIMPLE_CALL:
9198 return gimple_call_integer_valued_real_p (stmt, depth);
9199 case GIMPLE_PHI:
9200 return gimple_phi_integer_valued_real_p (stmt, depth);
9201 default:
9202 return false;