Don't warn when alignment of global common data exceeds maximum alignment.
[official-gcc.git] / gcc / gimple-fold.c
blob3f2c176cff6db0ac0c90add00002d3d3312d5eb8
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2021 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-access.h"
34 #include "gimple-ssa-warn-restrict.h"
35 #include "fold-const.h"
36 #include "stmt.h"
37 #include "expr.h"
38 #include "stor-layout.h"
39 #include "dumpfile.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "tree-into-ssa.h"
44 #include "tree-dfa.h"
45 #include "tree-object-size.h"
46 #include "tree-ssa.h"
47 #include "tree-ssa-propagate.h"
48 #include "ipa-utils.h"
49 #include "tree-ssa-address.h"
50 #include "langhooks.h"
51 #include "gimplify-me.h"
52 #include "dbgcnt.h"
53 #include "builtins.h"
54 #include "tree-eh.h"
55 #include "gimple-match.h"
56 #include "gomp-constants.h"
57 #include "optabs-query.h"
58 #include "omp-general.h"
59 #include "tree-cfg.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "asan.h"
64 #include "diagnostic-core.h"
65 #include "intl.h"
66 #include "calls.h"
67 #include "tree-vector-builder.h"
68 #include "tree-ssa-strlen.h"
69 #include "varasm.h"
71 enum strlen_range_kind {
72 /* Compute the exact constant string length. */
73 SRK_STRLEN,
74 /* Compute the maximum constant string length. */
75 SRK_STRLENMAX,
76 /* Compute a range of string lengths bounded by object sizes. When
77 the length of a string cannot be determined, consider as the upper
78 bound the size of the enclosing object the string may be a member
79 or element of. Also determine the size of the largest character
80 array the string may refer to. */
81 SRK_LENRANGE,
82 /* Determine the integer value of the argument (not string length). */
83 SRK_INT_VALUE
86 static bool
87 get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
89 /* Return true when DECL can be referenced from current unit.
90 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
91 We can get declarations that are not possible to reference for various
92 reasons:
94 1) When analyzing C++ virtual tables.
95 C++ virtual tables do have known constructors even
96 when they are keyed to other compilation unit.
97 Those tables can contain pointers to methods and vars
98 in other units. Those methods have both STATIC and EXTERNAL
99 set.
100 2) In WHOPR mode devirtualization might lead to reference
101 to method that was partitioned elsehwere.
102 In this case we have static VAR_DECL or FUNCTION_DECL
103 that has no corresponding callgraph/varpool node
104 declaring the body.
105 3) COMDAT functions referred by external vtables that
106 we devirtualize only during final compilation stage.
107 At this time we already decided that we will not output
108 the function body and thus we can't reference the symbol
109 directly. */
111 static bool
112 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
114 varpool_node *vnode;
115 struct cgraph_node *node;
116 symtab_node *snode;
118 if (DECL_ABSTRACT_P (decl))
119 return false;
121 /* We are concerned only about static/external vars and functions. */
122 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
123 || !VAR_OR_FUNCTION_DECL_P (decl))
124 return true;
126 /* Static objects can be referred only if they are defined and not optimized
127 out yet. */
128 if (!TREE_PUBLIC (decl))
130 if (DECL_EXTERNAL (decl))
131 return false;
132 /* Before we start optimizing unreachable code we can be sure all
133 static objects are defined. */
134 if (symtab->function_flags_ready)
135 return true;
136 snode = symtab_node::get (decl);
137 if (!snode || !snode->definition)
138 return false;
139 node = dyn_cast <cgraph_node *> (snode);
140 return !node || !node->inlined_to;
143 /* We will later output the initializer, so we can refer to it.
144 So we are concerned only when DECL comes from initializer of
145 external var or var that has been optimized out. */
146 if (!from_decl
147 || !VAR_P (from_decl)
148 || (!DECL_EXTERNAL (from_decl)
149 && (vnode = varpool_node::get (from_decl)) != NULL
150 && vnode->definition)
151 || (flag_ltrans
152 && (vnode = varpool_node::get (from_decl)) != NULL
153 && vnode->in_other_partition))
154 return true;
155 /* We are folding reference from external vtable. The vtable may reffer
156 to a symbol keyed to other compilation unit. The other compilation
157 unit may be in separate DSO and the symbol may be hidden. */
158 if (DECL_VISIBILITY_SPECIFIED (decl)
159 && DECL_EXTERNAL (decl)
160 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
161 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
162 return false;
163 /* When function is public, we always can introduce new reference.
164 Exception are the COMDAT functions where introducing a direct
165 reference imply need to include function body in the curren tunit. */
166 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
167 return true;
168 /* We have COMDAT. We are going to check if we still have definition
169 or if the definition is going to be output in other partition.
170 Bypass this when gimplifying; all needed functions will be produced.
172 As observed in PR20991 for already optimized out comdat virtual functions
173 it may be tempting to not necessarily give up because the copy will be
174 output elsewhere when corresponding vtable is output.
175 This is however not possible - ABI specify that COMDATs are output in
176 units where they are used and when the other unit was compiled with LTO
177 it is possible that vtable was kept public while the function itself
178 was privatized. */
179 if (!symtab->function_flags_ready)
180 return true;
182 snode = symtab_node::get (decl);
183 if (!snode
184 || ((!snode->definition || DECL_EXTERNAL (decl))
185 && (!snode->in_other_partition
186 || (!snode->forced_by_abi && !snode->force_output))))
187 return false;
188 node = dyn_cast <cgraph_node *> (snode);
189 return !node || !node->inlined_to;
192 /* Create a temporary for TYPE for a statement STMT. If the current function
193 is in SSA form, a SSA name is created. Otherwise a temporary register
194 is made. */
196 tree
197 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
199 if (gimple_in_ssa_p (cfun))
200 return make_ssa_name (type, stmt);
201 else
202 return create_tmp_reg (type);
205 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
206 acceptable form for is_gimple_min_invariant.
207 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
209 tree
210 canonicalize_constructor_val (tree cval, tree from_decl)
212 if (CONSTANT_CLASS_P (cval))
213 return cval;
215 tree orig_cval = cval;
216 STRIP_NOPS (cval);
217 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
218 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
220 tree ptr = TREE_OPERAND (cval, 0);
221 if (is_gimple_min_invariant (ptr))
222 cval = build1_loc (EXPR_LOCATION (cval),
223 ADDR_EXPR, TREE_TYPE (ptr),
224 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
225 ptr,
226 fold_convert (ptr_type_node,
227 TREE_OPERAND (cval, 1))));
229 if (TREE_CODE (cval) == ADDR_EXPR)
231 tree base = NULL_TREE;
232 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
234 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
235 if (base)
236 TREE_OPERAND (cval, 0) = base;
238 else
239 base = get_base_address (TREE_OPERAND (cval, 0));
240 if (!base)
241 return NULL_TREE;
243 if (VAR_OR_FUNCTION_DECL_P (base)
244 && !can_refer_decl_in_current_unit_p (base, from_decl))
245 return NULL_TREE;
246 if (TREE_TYPE (base) == error_mark_node)
247 return NULL_TREE;
248 if (VAR_P (base))
249 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
250 but since the use can be in a debug stmt we can't. */
252 else if (TREE_CODE (base) == FUNCTION_DECL)
254 /* Make sure we create a cgraph node for functions we'll reference.
255 They can be non-existent if the reference comes from an entry
256 of an external vtable for example. */
257 cgraph_node::get_create (base);
259 /* Fixup types in global initializers. */
260 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
261 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
263 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
264 cval = fold_convert (TREE_TYPE (orig_cval), cval);
265 return cval;
267 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
268 if (TREE_CODE (cval) == INTEGER_CST)
270 if (TREE_OVERFLOW_P (cval))
271 cval = drop_tree_overflow (cval);
272 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
273 cval = fold_convert (TREE_TYPE (orig_cval), cval);
274 return cval;
276 return orig_cval;
279 /* If SYM is a constant variable with known value, return the value.
280 NULL_TREE is returned otherwise. */
282 tree
283 get_symbol_constant_value (tree sym)
285 tree val = ctor_for_folding (sym);
286 if (val != error_mark_node)
288 if (val)
290 val = canonicalize_constructor_val (unshare_expr (val), sym);
291 if (val && is_gimple_min_invariant (val))
292 return val;
293 else
294 return NULL_TREE;
296 /* Variables declared 'const' without an initializer
297 have zero as the initializer if they may not be
298 overridden at link or run time. */
299 if (!val
300 && is_gimple_reg_type (TREE_TYPE (sym)))
301 return build_zero_cst (TREE_TYPE (sym));
304 return NULL_TREE;
309 /* Subroutine of fold_stmt. We perform constant folding of the
310 memory reference tree EXPR. */
312 static tree
313 maybe_fold_reference (tree expr)
315 tree result = NULL_TREE;
317 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
318 || TREE_CODE (expr) == REALPART_EXPR
319 || TREE_CODE (expr) == IMAGPART_EXPR)
320 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
321 result = fold_unary_loc (EXPR_LOCATION (expr),
322 TREE_CODE (expr),
323 TREE_TYPE (expr),
324 TREE_OPERAND (expr, 0));
325 else if (TREE_CODE (expr) == BIT_FIELD_REF
326 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
327 result = fold_ternary_loc (EXPR_LOCATION (expr),
328 TREE_CODE (expr),
329 TREE_TYPE (expr),
330 TREE_OPERAND (expr, 0),
331 TREE_OPERAND (expr, 1),
332 TREE_OPERAND (expr, 2));
333 else
334 result = fold_const_aggregate_ref (expr);
336 if (result && is_gimple_min_invariant (result))
337 return result;
339 return NULL_TREE;
342 /* Return true if EXPR is an acceptable right-hand-side for a
343 GIMPLE assignment. We validate the entire tree, not just
344 the root node, thus catching expressions that embed complex
345 operands that are not permitted in GIMPLE. This function
346 is needed because the folding routines in fold-const.c
347 may return such expressions in some cases, e.g., an array
348 access with an embedded index addition. It may make more
349 sense to have folding routines that are sensitive to the
350 constraints on GIMPLE operands, rather than abandoning any
351 any attempt to fold if the usual folding turns out to be too
352 aggressive. */
354 bool
355 valid_gimple_rhs_p (tree expr)
357 enum tree_code code = TREE_CODE (expr);
359 switch (TREE_CODE_CLASS (code))
361 case tcc_declaration:
362 if (!is_gimple_variable (expr))
363 return false;
364 break;
366 case tcc_constant:
367 /* All constants are ok. */
368 break;
370 case tcc_comparison:
371 /* GENERIC allows comparisons with non-boolean types, reject
372 those for GIMPLE. Let vector-typed comparisons pass - rules
373 for GENERIC and GIMPLE are the same here. */
374 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
375 && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
376 || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
377 && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
378 return false;
380 /* Fallthru. */
381 case tcc_binary:
382 if (!is_gimple_val (TREE_OPERAND (expr, 0))
383 || !is_gimple_val (TREE_OPERAND (expr, 1)))
384 return false;
385 break;
387 case tcc_unary:
388 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
389 return false;
390 break;
392 case tcc_expression:
393 switch (code)
395 case ADDR_EXPR:
397 tree t;
398 if (is_gimple_min_invariant (expr))
399 return true;
400 t = TREE_OPERAND (expr, 0);
401 while (handled_component_p (t))
403 /* ??? More checks needed, see the GIMPLE verifier. */
404 if ((TREE_CODE (t) == ARRAY_REF
405 || TREE_CODE (t) == ARRAY_RANGE_REF)
406 && !is_gimple_val (TREE_OPERAND (t, 1)))
407 return false;
408 t = TREE_OPERAND (t, 0);
410 if (!is_gimple_id (t))
411 return false;
413 break;
415 default:
416 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
418 if ((code == COND_EXPR
419 ? !is_gimple_condexpr (TREE_OPERAND (expr, 0))
420 : !is_gimple_val (TREE_OPERAND (expr, 0)))
421 || !is_gimple_val (TREE_OPERAND (expr, 1))
422 || !is_gimple_val (TREE_OPERAND (expr, 2)))
423 return false;
424 break;
426 return false;
428 break;
430 case tcc_vl_exp:
431 return false;
433 case tcc_exceptional:
434 if (code == CONSTRUCTOR)
436 unsigned i;
437 tree elt;
438 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
439 if (!is_gimple_val (elt))
440 return false;
441 return true;
443 if (code != SSA_NAME)
444 return false;
445 break;
447 case tcc_reference:
448 if (code == BIT_FIELD_REF)
449 return is_gimple_val (TREE_OPERAND (expr, 0));
450 return false;
452 default:
453 return false;
456 return true;
460 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
461 replacement rhs for the statement or NULL_TREE if no simplification
462 could be made. It is assumed that the operands have been previously
463 folded. */
465 static tree
466 fold_gimple_assign (gimple_stmt_iterator *si)
468 gimple *stmt = gsi_stmt (*si);
469 enum tree_code subcode = gimple_assign_rhs_code (stmt);
470 location_t loc = gimple_location (stmt);
472 tree result = NULL_TREE;
474 switch (get_gimple_rhs_class (subcode))
476 case GIMPLE_SINGLE_RHS:
478 tree rhs = gimple_assign_rhs1 (stmt);
480 if (TREE_CLOBBER_P (rhs))
481 return NULL_TREE;
483 if (REFERENCE_CLASS_P (rhs))
484 return maybe_fold_reference (rhs);
486 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
488 tree val = OBJ_TYPE_REF_EXPR (rhs);
489 if (is_gimple_min_invariant (val))
490 return val;
491 else if (flag_devirtualize && virtual_method_call_p (rhs))
493 bool final;
494 vec <cgraph_node *>targets
495 = possible_polymorphic_call_targets (rhs, stmt, &final);
496 if (final && targets.length () <= 1 && dbg_cnt (devirt))
498 if (dump_enabled_p ())
500 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
501 "resolving virtual function address "
502 "reference to function %s\n",
503 targets.length () == 1
504 ? targets[0]->name ()
505 : "NULL");
507 if (targets.length () == 1)
509 val = fold_convert (TREE_TYPE (val),
510 build_fold_addr_expr_loc
511 (loc, targets[0]->decl));
512 STRIP_USELESS_TYPE_CONVERSION (val);
514 else
515 /* We cannot use __builtin_unreachable here because it
516 cannot have address taken. */
517 val = build_int_cst (TREE_TYPE (val), 0);
518 return val;
523 else if (TREE_CODE (rhs) == ADDR_EXPR)
525 tree ref = TREE_OPERAND (rhs, 0);
526 if (TREE_CODE (ref) == MEM_REF
527 && integer_zerop (TREE_OPERAND (ref, 1)))
529 result = TREE_OPERAND (ref, 0);
530 if (!useless_type_conversion_p (TREE_TYPE (rhs),
531 TREE_TYPE (result)))
532 result = build1 (NOP_EXPR, TREE_TYPE (rhs), result);
533 return result;
537 else if (TREE_CODE (rhs) == CONSTRUCTOR
538 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
540 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
541 unsigned i;
542 tree val;
544 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
545 if (! CONSTANT_CLASS_P (val))
546 return NULL_TREE;
548 return build_vector_from_ctor (TREE_TYPE (rhs),
549 CONSTRUCTOR_ELTS (rhs));
552 else if (DECL_P (rhs)
553 && is_gimple_reg_type (TREE_TYPE (rhs)))
554 return get_symbol_constant_value (rhs);
556 break;
558 case GIMPLE_UNARY_RHS:
559 break;
561 case GIMPLE_BINARY_RHS:
562 break;
564 case GIMPLE_TERNARY_RHS:
565 result = fold_ternary_loc (loc, subcode,
566 TREE_TYPE (gimple_assign_lhs (stmt)),
567 gimple_assign_rhs1 (stmt),
568 gimple_assign_rhs2 (stmt),
569 gimple_assign_rhs3 (stmt));
571 if (result)
573 STRIP_USELESS_TYPE_CONVERSION (result);
574 if (valid_gimple_rhs_p (result))
575 return result;
577 break;
579 case GIMPLE_INVALID_RHS:
580 gcc_unreachable ();
583 return NULL_TREE;
587 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
588 adjusting the replacement stmts location and virtual operands.
589 If the statement has a lhs the last stmt in the sequence is expected
590 to assign to that lhs. */
592 static void
593 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
595 gimple *stmt = gsi_stmt (*si_p);
597 if (gimple_has_location (stmt))
598 annotate_all_with_location (stmts, gimple_location (stmt));
600 /* First iterate over the replacement statements backward, assigning
601 virtual operands to their defining statements. */
602 gimple *laststore = NULL;
603 for (gimple_stmt_iterator i = gsi_last (stmts);
604 !gsi_end_p (i); gsi_prev (&i))
606 gimple *new_stmt = gsi_stmt (i);
607 if ((gimple_assign_single_p (new_stmt)
608 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
609 || (is_gimple_call (new_stmt)
610 && (gimple_call_flags (new_stmt)
611 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
613 tree vdef;
614 if (!laststore)
615 vdef = gimple_vdef (stmt);
616 else
617 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
618 gimple_set_vdef (new_stmt, vdef);
619 if (vdef && TREE_CODE (vdef) == SSA_NAME)
620 SSA_NAME_DEF_STMT (vdef) = new_stmt;
621 laststore = new_stmt;
625 /* Second iterate over the statements forward, assigning virtual
626 operands to their uses. */
627 tree reaching_vuse = gimple_vuse (stmt);
628 for (gimple_stmt_iterator i = gsi_start (stmts);
629 !gsi_end_p (i); gsi_next (&i))
631 gimple *new_stmt = gsi_stmt (i);
632 /* If the new statement possibly has a VUSE, update it with exact SSA
633 name we know will reach this one. */
634 if (gimple_has_mem_ops (new_stmt))
635 gimple_set_vuse (new_stmt, reaching_vuse);
636 gimple_set_modified (new_stmt, true);
637 if (gimple_vdef (new_stmt))
638 reaching_vuse = gimple_vdef (new_stmt);
641 /* If the new sequence does not do a store release the virtual
642 definition of the original statement. */
643 if (reaching_vuse
644 && reaching_vuse == gimple_vuse (stmt))
646 tree vdef = gimple_vdef (stmt);
647 if (vdef
648 && TREE_CODE (vdef) == SSA_NAME)
650 unlink_stmt_vdef (stmt);
651 release_ssa_name (vdef);
655 /* Finally replace the original statement with the sequence. */
656 gsi_replace_with_seq (si_p, stmts, false);
659 /* Helper function for update_gimple_call and
660 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
661 with GIMPLE_CALL NEW_STMT. */
663 static void
664 finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
665 gimple *stmt)
667 tree lhs = gimple_call_lhs (stmt);
668 gimple_call_set_lhs (new_stmt, lhs);
669 if (lhs && TREE_CODE (lhs) == SSA_NAME)
670 SSA_NAME_DEF_STMT (lhs) = new_stmt;
671 gimple_move_vops (new_stmt, stmt);
672 gimple_set_location (new_stmt, gimple_location (stmt));
673 if (gimple_block (new_stmt) == NULL_TREE)
674 gimple_set_block (new_stmt, gimple_block (stmt));
675 gsi_replace (si_p, new_stmt, false);
678 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
679 with number of arguments NARGS, where the arguments in GIMPLE form
680 follow NARGS argument. */
682 bool
683 update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
685 va_list ap;
686 gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
688 gcc_assert (is_gimple_call (stmt));
689 va_start (ap, nargs);
690 new_stmt = gimple_build_call_valist (fn, nargs, ap);
691 finish_update_gimple_call (si_p, new_stmt, stmt);
692 va_end (ap);
693 return true;
696 /* Return true if EXPR is a CALL_EXPR suitable for representation
697 as a single GIMPLE_CALL statement. If the arguments require
698 further gimplification, return false. */
700 static bool
701 valid_gimple_call_p (tree expr)
703 unsigned i, nargs;
705 if (TREE_CODE (expr) != CALL_EXPR)
706 return false;
708 nargs = call_expr_nargs (expr);
709 for (i = 0; i < nargs; i++)
711 tree arg = CALL_EXPR_ARG (expr, i);
712 if (is_gimple_reg_type (TREE_TYPE (arg)))
714 if (!is_gimple_val (arg))
715 return false;
717 else
718 if (!is_gimple_lvalue (arg))
719 return false;
722 return true;
725 /* Convert EXPR into a GIMPLE value suitable for substitution on the
726 RHS of an assignment. Insert the necessary statements before
727 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
728 is replaced. If the call is expected to produces a result, then it
729 is replaced by an assignment of the new RHS to the result variable.
730 If the result is to be ignored, then the call is replaced by a
731 GIMPLE_NOP. A proper VDEF chain is retained by making the first
732 VUSE and the last VDEF of the whole sequence be the same as the replaced
733 statement and using new SSA names for stores in between. */
735 void
736 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
738 tree lhs;
739 gimple *stmt, *new_stmt;
740 gimple_stmt_iterator i;
741 gimple_seq stmts = NULL;
743 stmt = gsi_stmt (*si_p);
745 gcc_assert (is_gimple_call (stmt));
747 if (valid_gimple_call_p (expr))
749 /* The call has simplified to another call. */
750 tree fn = CALL_EXPR_FN (expr);
751 unsigned i;
752 unsigned nargs = call_expr_nargs (expr);
753 vec<tree> args = vNULL;
754 gcall *new_stmt;
756 if (nargs > 0)
758 args.create (nargs);
759 args.safe_grow_cleared (nargs, true);
761 for (i = 0; i < nargs; i++)
762 args[i] = CALL_EXPR_ARG (expr, i);
765 new_stmt = gimple_build_call_vec (fn, args);
766 finish_update_gimple_call (si_p, new_stmt, stmt);
767 args.release ();
768 return;
771 lhs = gimple_call_lhs (stmt);
772 if (lhs == NULL_TREE)
774 push_gimplify_context (gimple_in_ssa_p (cfun));
775 gimplify_and_add (expr, &stmts);
776 pop_gimplify_context (NULL);
778 /* We can end up with folding a memcpy of an empty class assignment
779 which gets optimized away by C++ gimplification. */
780 if (gimple_seq_empty_p (stmts))
782 if (gimple_in_ssa_p (cfun))
784 unlink_stmt_vdef (stmt);
785 release_defs (stmt);
787 gsi_replace (si_p, gimple_build_nop (), false);
788 return;
791 else
793 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
794 new_stmt = gimple_build_assign (lhs, tmp);
795 i = gsi_last (stmts);
796 gsi_insert_after_without_update (&i, new_stmt,
797 GSI_CONTINUE_LINKING);
800 gsi_replace_with_seq_vops (si_p, stmts);
804 /* Replace the call at *GSI with the gimple value VAL. */
806 void
807 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
809 gimple *stmt = gsi_stmt (*gsi);
810 tree lhs = gimple_call_lhs (stmt);
811 gimple *repl;
812 if (lhs)
814 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
815 val = fold_convert (TREE_TYPE (lhs), val);
816 repl = gimple_build_assign (lhs, val);
818 else
819 repl = gimple_build_nop ();
820 tree vdef = gimple_vdef (stmt);
821 if (vdef && TREE_CODE (vdef) == SSA_NAME)
823 unlink_stmt_vdef (stmt);
824 release_ssa_name (vdef);
826 gsi_replace (gsi, repl, false);
829 /* Replace the call at *GSI with the new call REPL and fold that
830 again. */
832 static void
833 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
835 gimple *stmt = gsi_stmt (*gsi);
836 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
837 gimple_set_location (repl, gimple_location (stmt));
838 gimple_move_vops (repl, stmt);
839 gsi_replace (gsi, repl, false);
840 fold_stmt (gsi);
843 /* Return true if VAR is a VAR_DECL or a component thereof. */
845 static bool
846 var_decl_component_p (tree var)
848 tree inner = var;
849 while (handled_component_p (inner))
850 inner = TREE_OPERAND (inner, 0);
851 return (DECL_P (inner)
852 || (TREE_CODE (inner) == MEM_REF
853 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
856 /* Return TRUE if the SIZE argument, representing the size of an
857 object, is in a range of values of which exactly zero is valid. */
859 static bool
860 size_must_be_zero_p (tree size)
862 if (integer_zerop (size))
863 return true;
865 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
866 return false;
868 tree type = TREE_TYPE (size);
869 int prec = TYPE_PRECISION (type);
871 /* Compute the value of SSIZE_MAX, the largest positive value that
872 can be stored in ssize_t, the signed counterpart of size_t. */
873 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
874 value_range valid_range (build_int_cst (type, 0),
875 wide_int_to_tree (type, ssize_max));
876 value_range vr;
877 if (cfun)
878 get_range_query (cfun)->range_of_expr (vr, size);
879 else
880 get_global_range_query ()->range_of_expr (vr, size);
881 if (vr.undefined_p ())
882 vr.set_varying (TREE_TYPE (size));
883 vr.intersect (&valid_range);
884 return vr.zero_p ();
887 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
888 diagnose (otherwise undefined) overlapping copies without preventing
889 folding. When folded, GCC guarantees that overlapping memcpy has
890 the same semantics as memmove. Call to the library memcpy need not
891 provide the same guarantee. Return false if no simplification can
892 be made. */
894 static bool
895 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
896 tree dest, tree src, enum built_in_function code)
898 gimple *stmt = gsi_stmt (*gsi);
899 tree lhs = gimple_call_lhs (stmt);
900 tree len = gimple_call_arg (stmt, 2);
901 location_t loc = gimple_location (stmt);
903 /* If the LEN parameter is a constant zero or in range where
904 the only valid value is zero, return DEST. */
905 if (size_must_be_zero_p (len))
907 gimple *repl;
908 if (gimple_call_lhs (stmt))
909 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
910 else
911 repl = gimple_build_nop ();
912 tree vdef = gimple_vdef (stmt);
913 if (vdef && TREE_CODE (vdef) == SSA_NAME)
915 unlink_stmt_vdef (stmt);
916 release_ssa_name (vdef);
918 gsi_replace (gsi, repl, false);
919 return true;
922 /* If SRC and DEST are the same (and not volatile), return
923 DEST{,+LEN,+LEN-1}. */
924 if (operand_equal_p (src, dest, 0))
926 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
927 It's safe and may even be emitted by GCC itself (see bug
928 32667). */
929 unlink_stmt_vdef (stmt);
930 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
931 release_ssa_name (gimple_vdef (stmt));
932 if (!lhs)
934 gsi_replace (gsi, gimple_build_nop (), false);
935 return true;
937 goto done;
939 else
941 /* We cannot (easily) change the type of the copy if it is a storage
942 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
943 modify the storage order of objects (see storage_order_barrier_p). */
944 tree srctype
945 = POINTER_TYPE_P (TREE_TYPE (src))
946 ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
947 tree desttype
948 = POINTER_TYPE_P (TREE_TYPE (dest))
949 ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
950 tree destvar, srcvar, srcoff;
951 unsigned int src_align, dest_align;
952 unsigned HOST_WIDE_INT tmp_len;
953 const char *tmp_str;
955 /* Build accesses at offset zero with a ref-all character type. */
956 tree off0
957 = build_int_cst (build_pointer_type_for_mode (char_type_node,
958 ptr_mode, true), 0);
960 /* If we can perform the copy efficiently with first doing all loads
961 and then all stores inline it that way. Currently efficiently
962 means that we can load all the memory into a single integer
963 register which is what MOVE_MAX gives us. */
964 src_align = get_pointer_alignment (src);
965 dest_align = get_pointer_alignment (dest);
966 if (tree_fits_uhwi_p (len)
967 && compare_tree_int (len, MOVE_MAX) <= 0
968 /* FIXME: Don't transform copies from strings with known length.
969 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
970 from being handled, and the case was XFAILed for that reason.
971 Now that it is handled and the XFAIL removed, as soon as other
972 strlenopt tests that rely on it for passing are adjusted, this
973 hack can be removed. */
974 && !c_strlen (src, 1)
975 && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
976 && memchr (tmp_str, 0, tmp_len) == NULL)
977 && !(srctype
978 && AGGREGATE_TYPE_P (srctype)
979 && TYPE_REVERSE_STORAGE_ORDER (srctype))
980 && !(desttype
981 && AGGREGATE_TYPE_P (desttype)
982 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
984 unsigned ilen = tree_to_uhwi (len);
985 if (pow2p_hwi (ilen))
987 /* Detect out-of-bounds accesses without issuing warnings.
988 Avoid folding out-of-bounds copies but to avoid false
989 positives for unreachable code defer warning until after
990 DCE has worked its magic.
991 -Wrestrict is still diagnosed. */
992 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
993 dest, src, len, len,
994 false, false))
995 if (warning != OPT_Wrestrict)
996 return false;
998 scalar_int_mode mode;
999 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
1000 if (type
1001 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
1002 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
1003 /* If the destination pointer is not aligned we must be able
1004 to emit an unaligned store. */
1005 && (dest_align >= GET_MODE_ALIGNMENT (mode)
1006 || !targetm.slow_unaligned_access (mode, dest_align)
1007 || (optab_handler (movmisalign_optab, mode)
1008 != CODE_FOR_nothing)))
1010 tree srctype = type;
1011 tree desttype = type;
1012 if (src_align < GET_MODE_ALIGNMENT (mode))
1013 srctype = build_aligned_type (type, src_align);
1014 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
1015 tree tem = fold_const_aggregate_ref (srcmem);
1016 if (tem)
1017 srcmem = tem;
1018 else if (src_align < GET_MODE_ALIGNMENT (mode)
1019 && targetm.slow_unaligned_access (mode, src_align)
1020 && (optab_handler (movmisalign_optab, mode)
1021 == CODE_FOR_nothing))
1022 srcmem = NULL_TREE;
1023 if (srcmem)
1025 gimple *new_stmt;
1026 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
1028 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
1029 srcmem
1030 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
1031 new_stmt);
1032 gimple_assign_set_lhs (new_stmt, srcmem);
1033 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1034 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1036 if (dest_align < GET_MODE_ALIGNMENT (mode))
1037 desttype = build_aligned_type (type, dest_align);
1038 new_stmt
1039 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
1040 dest, off0),
1041 srcmem);
1042 gimple_move_vops (new_stmt, stmt);
1043 if (!lhs)
1045 gsi_replace (gsi, new_stmt, false);
1046 return true;
1048 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1049 goto done;
1055 if (code == BUILT_IN_MEMMOVE)
1057 /* Both DEST and SRC must be pointer types.
1058 ??? This is what old code did. Is the testing for pointer types
1059 really mandatory?
1061 If either SRC is readonly or length is 1, we can use memcpy. */
1062 if (!dest_align || !src_align)
1063 return false;
1064 if (readonly_data_expr (src)
1065 || (tree_fits_uhwi_p (len)
1066 && (MIN (src_align, dest_align) / BITS_PER_UNIT
1067 >= tree_to_uhwi (len))))
1069 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1070 if (!fn)
1071 return false;
1072 gimple_call_set_fndecl (stmt, fn);
1073 gimple_call_set_arg (stmt, 0, dest);
1074 gimple_call_set_arg (stmt, 1, src);
1075 fold_stmt (gsi);
1076 return true;
1079 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1080 if (TREE_CODE (src) == ADDR_EXPR
1081 && TREE_CODE (dest) == ADDR_EXPR)
1083 tree src_base, dest_base, fn;
1084 poly_int64 src_offset = 0, dest_offset = 0;
1085 poly_uint64 maxsize;
1087 srcvar = TREE_OPERAND (src, 0);
1088 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
1089 if (src_base == NULL)
1090 src_base = srcvar;
1091 destvar = TREE_OPERAND (dest, 0);
1092 dest_base = get_addr_base_and_unit_offset (destvar,
1093 &dest_offset);
1094 if (dest_base == NULL)
1095 dest_base = destvar;
1096 if (!poly_int_tree_p (len, &maxsize))
1097 maxsize = -1;
1098 if (SSA_VAR_P (src_base)
1099 && SSA_VAR_P (dest_base))
1101 if (operand_equal_p (src_base, dest_base, 0)
1102 && ranges_maybe_overlap_p (src_offset, maxsize,
1103 dest_offset, maxsize))
1104 return false;
1106 else if (TREE_CODE (src_base) == MEM_REF
1107 && TREE_CODE (dest_base) == MEM_REF)
1109 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
1110 TREE_OPERAND (dest_base, 0), 0))
1111 return false;
1112 poly_offset_int full_src_offset
1113 = mem_ref_offset (src_base) + src_offset;
1114 poly_offset_int full_dest_offset
1115 = mem_ref_offset (dest_base) + dest_offset;
1116 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
1117 full_dest_offset, maxsize))
1118 return false;
1120 else
1121 return false;
1123 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1124 if (!fn)
1125 return false;
1126 gimple_call_set_fndecl (stmt, fn);
1127 gimple_call_set_arg (stmt, 0, dest);
1128 gimple_call_set_arg (stmt, 1, src);
1129 fold_stmt (gsi);
1130 return true;
1133 /* If the destination and source do not alias optimize into
1134 memcpy as well. */
1135 if ((is_gimple_min_invariant (dest)
1136 || TREE_CODE (dest) == SSA_NAME)
1137 && (is_gimple_min_invariant (src)
1138 || TREE_CODE (src) == SSA_NAME))
1140 ao_ref destr, srcr;
1141 ao_ref_init_from_ptr_and_size (&destr, dest, len);
1142 ao_ref_init_from_ptr_and_size (&srcr, src, len);
1143 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1145 tree fn;
1146 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1147 if (!fn)
1148 return false;
1149 gimple_call_set_fndecl (stmt, fn);
1150 gimple_call_set_arg (stmt, 0, dest);
1151 gimple_call_set_arg (stmt, 1, src);
1152 fold_stmt (gsi);
1153 return true;
1157 return false;
1160 if (!tree_fits_shwi_p (len))
1161 return false;
1162 if (!srctype
1163 || (AGGREGATE_TYPE_P (srctype)
1164 && TYPE_REVERSE_STORAGE_ORDER (srctype)))
1165 return false;
1166 if (!desttype
1167 || (AGGREGATE_TYPE_P (desttype)
1168 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
1169 return false;
1170 /* In the following try to find a type that is most natural to be
1171 used for the memcpy source and destination and that allows
1172 the most optimization when memcpy is turned into a plain assignment
1173 using that type. In theory we could always use a char[len] type
1174 but that only gains us that the destination and source possibly
1175 no longer will have their address taken. */
1176 if (TREE_CODE (srctype) == ARRAY_TYPE
1177 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1178 srctype = TREE_TYPE (srctype);
1179 if (TREE_CODE (desttype) == ARRAY_TYPE
1180 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1181 desttype = TREE_TYPE (desttype);
1182 if (TREE_ADDRESSABLE (srctype)
1183 || TREE_ADDRESSABLE (desttype))
1184 return false;
1186 /* Make sure we are not copying using a floating-point mode or
1187 a type whose size possibly does not match its precision. */
1188 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1189 || TREE_CODE (desttype) == BOOLEAN_TYPE
1190 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1191 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1192 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1193 || TREE_CODE (srctype) == BOOLEAN_TYPE
1194 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1195 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1196 if (!srctype)
1197 srctype = desttype;
1198 if (!desttype)
1199 desttype = srctype;
1200 if (!srctype)
1201 return false;
1203 src_align = get_pointer_alignment (src);
1204 dest_align = get_pointer_alignment (dest);
1206 /* Choose between src and destination type for the access based
1207 on alignment, whether the access constitutes a register access
1208 and whether it may actually expose a declaration for SSA rewrite
1209 or SRA decomposition. Also try to expose a string constant, we
1210 might be able to concatenate several of them later into a single
1211 string store. */
1212 destvar = NULL_TREE;
1213 srcvar = NULL_TREE;
1214 if (TREE_CODE (dest) == ADDR_EXPR
1215 && var_decl_component_p (TREE_OPERAND (dest, 0))
1216 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1217 && dest_align >= TYPE_ALIGN (desttype)
1218 && (is_gimple_reg_type (desttype)
1219 || src_align >= TYPE_ALIGN (desttype)))
1220 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1221 else if (TREE_CODE (src) == ADDR_EXPR
1222 && var_decl_component_p (TREE_OPERAND (src, 0))
1223 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1224 && src_align >= TYPE_ALIGN (srctype)
1225 && (is_gimple_reg_type (srctype)
1226 || dest_align >= TYPE_ALIGN (srctype)))
1227 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1228 /* FIXME: Don't transform copies from strings with known original length.
1229 As soon as strlenopt tests that rely on it for passing are adjusted,
1230 this hack can be removed. */
1231 else if (gimple_call_alloca_for_var_p (stmt)
1232 && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1233 && integer_zerop (srcoff)
1234 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1235 && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1236 srctype = TREE_TYPE (srcvar);
1237 else
1238 return false;
1240 /* Now that we chose an access type express the other side in
1241 terms of it if the target allows that with respect to alignment
1242 constraints. */
1243 if (srcvar == NULL_TREE)
1245 if (src_align >= TYPE_ALIGN (desttype))
1246 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1247 else
1249 if (STRICT_ALIGNMENT)
1250 return false;
1251 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1252 src_align);
1253 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1256 else if (destvar == NULL_TREE)
1258 if (dest_align >= TYPE_ALIGN (srctype))
1259 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1260 else
1262 if (STRICT_ALIGNMENT)
1263 return false;
1264 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1265 dest_align);
1266 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1270 /* Same as above, detect out-of-bounds accesses without issuing
1271 warnings. Avoid folding out-of-bounds copies but to avoid
1272 false positives for unreachable code defer warning until
1273 after DCE has worked its magic.
1274 -Wrestrict is still diagnosed. */
1275 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1276 dest, src, len, len,
1277 false, false))
1278 if (warning != OPT_Wrestrict)
1279 return false;
1281 gimple *new_stmt;
1282 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1284 tree tem = fold_const_aggregate_ref (srcvar);
1285 if (tem)
1286 srcvar = tem;
1287 if (! is_gimple_min_invariant (srcvar))
1289 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1290 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1291 new_stmt);
1292 gimple_assign_set_lhs (new_stmt, srcvar);
1293 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1294 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1296 new_stmt = gimple_build_assign (destvar, srcvar);
1297 goto set_vop_and_replace;
1300 /* We get an aggregate copy. If the source is a STRING_CST, then
1301 directly use its type to perform the copy. */
1302 if (TREE_CODE (srcvar) == STRING_CST)
1303 desttype = srctype;
1305 /* Or else, use an unsigned char[] type to perform the copy in order
1306 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1307 types or float modes behavior on copying. */
1308 else
1310 desttype = build_array_type_nelts (unsigned_char_type_node,
1311 tree_to_uhwi (len));
1312 srctype = desttype;
1313 if (src_align > TYPE_ALIGN (srctype))
1314 srctype = build_aligned_type (srctype, src_align);
1315 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1318 if (dest_align > TYPE_ALIGN (desttype))
1319 desttype = build_aligned_type (desttype, dest_align);
1320 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1321 new_stmt = gimple_build_assign (destvar, srcvar);
1323 set_vop_and_replace:
1324 gimple_move_vops (new_stmt, stmt);
1325 if (!lhs)
1327 gsi_replace (gsi, new_stmt, false);
1328 return true;
1330 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1333 done:
1334 gimple_seq stmts = NULL;
1335 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1336 len = NULL_TREE;
1337 else if (code == BUILT_IN_MEMPCPY)
1339 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1340 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1341 TREE_TYPE (dest), dest, len);
1343 else
1344 gcc_unreachable ();
1346 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1347 gimple *repl = gimple_build_assign (lhs, dest);
1348 gsi_replace (gsi, repl, false);
1349 return true;
1352 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1353 to built-in memcmp (a, b, len). */
1355 static bool
1356 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1358 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1360 if (!fn)
1361 return false;
1363 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1365 gimple *stmt = gsi_stmt (*gsi);
1366 tree a = gimple_call_arg (stmt, 0);
1367 tree b = gimple_call_arg (stmt, 1);
1368 tree len = gimple_call_arg (stmt, 2);
1370 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1371 replace_call_with_call_and_fold (gsi, repl);
1373 return true;
1376 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1377 to built-in memmove (dest, src, len). */
1379 static bool
1380 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1382 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1384 if (!fn)
1385 return false;
1387 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1388 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1389 len) into memmove (dest, src, len). */
1391 gimple *stmt = gsi_stmt (*gsi);
1392 tree src = gimple_call_arg (stmt, 0);
1393 tree dest = gimple_call_arg (stmt, 1);
1394 tree len = gimple_call_arg (stmt, 2);
1396 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1397 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1398 replace_call_with_call_and_fold (gsi, repl);
1400 return true;
1403 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1404 to built-in memset (dest, 0, len). */
1406 static bool
1407 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1409 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1411 if (!fn)
1412 return false;
1414 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1416 gimple *stmt = gsi_stmt (*gsi);
1417 tree dest = gimple_call_arg (stmt, 0);
1418 tree len = gimple_call_arg (stmt, 1);
1420 gimple_seq seq = NULL;
1421 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1422 gimple_seq_add_stmt_without_update (&seq, repl);
1423 gsi_replace_with_seq_vops (gsi, seq);
1424 fold_stmt (gsi);
1426 return true;
1429 /* Fold function call to builtin memset or bzero at *GSI setting the
1430 memory of size LEN to VAL. Return whether a simplification was made. */
1432 static bool
1433 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1435 gimple *stmt = gsi_stmt (*gsi);
1436 tree etype;
1437 unsigned HOST_WIDE_INT length, cval;
1439 /* If the LEN parameter is zero, return DEST. */
1440 if (integer_zerop (len))
1442 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1443 return true;
1446 if (! tree_fits_uhwi_p (len))
1447 return false;
1449 if (TREE_CODE (c) != INTEGER_CST)
1450 return false;
1452 tree dest = gimple_call_arg (stmt, 0);
1453 tree var = dest;
1454 if (TREE_CODE (var) != ADDR_EXPR)
1455 return false;
1457 var = TREE_OPERAND (var, 0);
1458 if (TREE_THIS_VOLATILE (var))
1459 return false;
1461 etype = TREE_TYPE (var);
1462 if (TREE_CODE (etype) == ARRAY_TYPE)
1463 etype = TREE_TYPE (etype);
1465 if (!INTEGRAL_TYPE_P (etype)
1466 && !POINTER_TYPE_P (etype))
1467 return NULL_TREE;
1469 if (! var_decl_component_p (var))
1470 return NULL_TREE;
1472 length = tree_to_uhwi (len);
1473 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1474 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1475 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
1476 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1477 return NULL_TREE;
1479 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1480 return NULL_TREE;
1482 if (!type_has_mode_precision_p (etype))
1483 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1484 TYPE_UNSIGNED (etype));
1486 if (integer_zerop (c))
1487 cval = 0;
1488 else
1490 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1491 return NULL_TREE;
1493 cval = TREE_INT_CST_LOW (c);
1494 cval &= 0xff;
1495 cval |= cval << 8;
1496 cval |= cval << 16;
1497 cval |= (cval << 31) << 1;
1500 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1501 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1502 gimple_move_vops (store, stmt);
1503 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1504 if (gimple_call_lhs (stmt))
1506 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1507 gsi_replace (gsi, asgn, false);
1509 else
1511 gimple_stmt_iterator gsi2 = *gsi;
1512 gsi_prev (gsi);
1513 gsi_remove (&gsi2, true);
1516 return true;
1519 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1521 static bool
1522 get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1523 c_strlen_data *pdata, unsigned eltsize)
1525 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1527 /* The length computed by this invocation of the function. */
1528 tree val = NULL_TREE;
1530 /* True if VAL is an optimistic (tight) bound determined from
1531 the size of the character array in which the string may be
1532 stored. In that case, the computed VAL is used to set
1533 PDATA->MAXBOUND. */
1534 bool tight_bound = false;
1536 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1537 if (TREE_CODE (arg) == ADDR_EXPR
1538 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1540 tree op = TREE_OPERAND (arg, 0);
1541 if (integer_zerop (TREE_OPERAND (op, 1)))
1543 tree aop0 = TREE_OPERAND (op, 0);
1544 if (TREE_CODE (aop0) == INDIRECT_REF
1545 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1546 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1547 pdata, eltsize);
1549 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1550 && rkind == SRK_LENRANGE)
1552 /* Fail if an array is the last member of a struct object
1553 since it could be treated as a (fake) flexible array
1554 member. */
1555 tree idx = TREE_OPERAND (op, 1);
1557 arg = TREE_OPERAND (op, 0);
1558 tree optype = TREE_TYPE (arg);
1559 if (tree dom = TYPE_DOMAIN (optype))
1560 if (tree bound = TYPE_MAX_VALUE (dom))
1561 if (TREE_CODE (bound) == INTEGER_CST
1562 && TREE_CODE (idx) == INTEGER_CST
1563 && tree_int_cst_lt (bound, idx))
1564 return false;
1568 if (rkind == SRK_INT_VALUE)
1570 /* We are computing the maximum value (not string length). */
1571 val = arg;
1572 if (TREE_CODE (val) != INTEGER_CST
1573 || tree_int_cst_sgn (val) < 0)
1574 return false;
1576 else
1578 c_strlen_data lendata = { };
1579 val = c_strlen (arg, 1, &lendata, eltsize);
1581 if (!val && lendata.decl)
1583 /* ARG refers to an unterminated const character array.
1584 DATA.DECL with size DATA.LEN. */
1585 val = lendata.minlen;
1586 pdata->decl = lendata.decl;
1590 /* Set if VAL represents the maximum length based on array size (set
1591 when exact length cannot be determined). */
1592 bool maxbound = false;
1594 if (!val && rkind == SRK_LENRANGE)
1596 if (TREE_CODE (arg) == ADDR_EXPR)
1597 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1598 pdata, eltsize);
1600 if (TREE_CODE (arg) == ARRAY_REF)
1602 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1604 /* Determine the "innermost" array type. */
1605 while (TREE_CODE (optype) == ARRAY_TYPE
1606 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1607 optype = TREE_TYPE (optype);
1609 /* Avoid arrays of pointers. */
1610 tree eltype = TREE_TYPE (optype);
1611 if (TREE_CODE (optype) != ARRAY_TYPE
1612 || !INTEGRAL_TYPE_P (eltype))
1613 return false;
1615 /* Fail when the array bound is unknown or zero. */
1616 val = TYPE_SIZE_UNIT (optype);
1617 if (!val
1618 || TREE_CODE (val) != INTEGER_CST
1619 || integer_zerop (val))
1620 return false;
1622 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1623 integer_one_node);
1625 /* Set the minimum size to zero since the string in
1626 the array could have zero length. */
1627 pdata->minlen = ssize_int (0);
1629 tight_bound = true;
1631 else if (TREE_CODE (arg) == COMPONENT_REF
1632 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1633 == ARRAY_TYPE))
1635 /* Use the type of the member array to determine the upper
1636 bound on the length of the array. This may be overly
1637 optimistic if the array itself isn't NUL-terminated and
1638 the caller relies on the subsequent member to contain
1639 the NUL but that would only be considered valid if
1640 the array were the last member of a struct. */
1642 tree fld = TREE_OPERAND (arg, 1);
1644 tree optype = TREE_TYPE (fld);
1646 /* Determine the "innermost" array type. */
1647 while (TREE_CODE (optype) == ARRAY_TYPE
1648 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1649 optype = TREE_TYPE (optype);
1651 /* Fail when the array bound is unknown or zero. */
1652 val = TYPE_SIZE_UNIT (optype);
1653 if (!val
1654 || TREE_CODE (val) != INTEGER_CST
1655 || integer_zerop (val))
1656 return false;
1657 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1658 integer_one_node);
1660 /* Set the minimum size to zero since the string in
1661 the array could have zero length. */
1662 pdata->minlen = ssize_int (0);
1664 /* The array size determined above is an optimistic bound
1665 on the length. If the array isn't nul-terminated the
1666 length computed by the library function would be greater.
1667 Even though using strlen to cross the subobject boundary
1668 is undefined, avoid drawing conclusions from the member
1669 type about the length here. */
1670 tight_bound = true;
1672 else if (TREE_CODE (arg) == MEM_REF
1673 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1674 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1675 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1677 /* Handle a MEM_REF into a DECL accessing an array of integers,
1678 being conservative about references to extern structures with
1679 flexible array members that can be initialized to arbitrary
1680 numbers of elements as an extension (static structs are okay).
1681 FIXME: Make this less conservative -- see
1682 component_ref_size in tree.c. */
1683 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1684 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1685 && (decl_binds_to_current_def_p (ref)
1686 || !array_at_struct_end_p (arg)))
1688 /* Fail if the offset is out of bounds. Such accesses
1689 should be diagnosed at some point. */
1690 val = DECL_SIZE_UNIT (ref);
1691 if (!val
1692 || TREE_CODE (val) != INTEGER_CST
1693 || integer_zerop (val))
1694 return false;
1696 poly_offset_int psiz = wi::to_offset (val);
1697 poly_offset_int poff = mem_ref_offset (arg);
1698 if (known_le (psiz, poff))
1699 return false;
1701 pdata->minlen = ssize_int (0);
1703 /* Subtract the offset and one for the terminating nul. */
1704 psiz -= poff;
1705 psiz -= 1;
1706 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1707 /* Since VAL reflects the size of a declared object
1708 rather the type of the access it is not a tight bound. */
1711 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
1713 /* Avoid handling pointers to arrays. GCC might misuse
1714 a pointer to an array of one bound to point to an array
1715 object of a greater bound. */
1716 tree argtype = TREE_TYPE (arg);
1717 if (TREE_CODE (argtype) == ARRAY_TYPE)
1719 val = TYPE_SIZE_UNIT (argtype);
1720 if (!val
1721 || TREE_CODE (val) != INTEGER_CST
1722 || integer_zerop (val))
1723 return false;
1724 val = wide_int_to_tree (TREE_TYPE (val),
1725 wi::sub (wi::to_wide (val), 1));
1727 /* Set the minimum size to zero since the string in
1728 the array could have zero length. */
1729 pdata->minlen = ssize_int (0);
1732 maxbound = true;
1735 if (!val)
1736 return false;
1738 /* Adjust the lower bound on the string length as necessary. */
1739 if (!pdata->minlen
1740 || (rkind != SRK_STRLEN
1741 && TREE_CODE (pdata->minlen) == INTEGER_CST
1742 && TREE_CODE (val) == INTEGER_CST
1743 && tree_int_cst_lt (val, pdata->minlen)))
1744 pdata->minlen = val;
1746 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1748 /* Adjust the tighter (more optimistic) string length bound
1749 if necessary and proceed to adjust the more conservative
1750 bound. */
1751 if (TREE_CODE (val) == INTEGER_CST)
1753 if (tree_int_cst_lt (pdata->maxbound, val))
1754 pdata->maxbound = val;
1756 else
1757 pdata->maxbound = val;
1759 else if (pdata->maxbound || maxbound)
1760 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1761 if VAL corresponds to the maximum length determined based
1762 on the type of the object. */
1763 pdata->maxbound = val;
1765 if (tight_bound)
1767 /* VAL computed above represents an optimistically tight bound
1768 on the length of the string based on the referenced object's
1769 or subobject's type. Determine the conservative upper bound
1770 based on the enclosing object's size if possible. */
1771 if (rkind == SRK_LENRANGE)
1773 poly_int64 offset;
1774 tree base = get_addr_base_and_unit_offset (arg, &offset);
1775 if (!base)
1777 /* When the call above fails due to a non-constant offset
1778 assume the offset is zero and use the size of the whole
1779 enclosing object instead. */
1780 base = get_base_address (arg);
1781 offset = 0;
1783 /* If the base object is a pointer no upper bound on the length
1784 can be determined. Otherwise the maximum length is equal to
1785 the size of the enclosing object minus the offset of
1786 the referenced subobject minus 1 (for the terminating nul). */
1787 tree type = TREE_TYPE (base);
1788 if (TREE_CODE (type) == POINTER_TYPE
1789 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1790 || !(val = DECL_SIZE_UNIT (base)))
1791 val = build_all_ones_cst (size_type_node);
1792 else
1794 val = DECL_SIZE_UNIT (base);
1795 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1796 size_int (offset + 1));
1799 else
1800 return false;
1803 if (pdata->maxlen)
1805 /* Adjust the more conservative bound if possible/necessary
1806 and fail otherwise. */
1807 if (rkind != SRK_STRLEN)
1809 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1810 || TREE_CODE (val) != INTEGER_CST)
1811 return false;
1813 if (tree_int_cst_lt (pdata->maxlen, val))
1814 pdata->maxlen = val;
1815 return true;
1817 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1819 /* Fail if the length of this ARG is different from that
1820 previously determined from another ARG. */
1821 return false;
1825 pdata->maxlen = val;
1826 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1829 /* For an ARG referencing one or more strings, try to obtain the range
1830 of their lengths, or the size of the largest array ARG referes to if
1831 the range of lengths cannot be determined, and store all in *PDATA.
1832 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1833 the maximum constant value.
1834 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1835 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1836 length or if we are unable to determine the length, return false.
1837 VISITED is a bitmap of visited variables.
1838 RKIND determines the kind of value or range to obtain (see
1839 strlen_range_kind).
1840 Set PDATA->DECL if ARG refers to an unterminated constant array.
1841 On input, set ELTSIZE to 1 for normal single byte character strings,
1842 and either 2 or 4 for wide characer strings (the size of wchar_t).
1843 Return true if *PDATA was successfully populated and false otherwise. */
1845 static bool
1846 get_range_strlen (tree arg, bitmap *visited,
1847 strlen_range_kind rkind,
1848 c_strlen_data *pdata, unsigned eltsize)
1851 if (TREE_CODE (arg) != SSA_NAME)
1852 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1854 /* If ARG is registered for SSA update we cannot look at its defining
1855 statement. */
1856 if (name_registered_for_update_p (arg))
1857 return false;
1859 /* If we were already here, break the infinite cycle. */
1860 if (!*visited)
1861 *visited = BITMAP_ALLOC (NULL);
1862 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1863 return true;
1865 tree var = arg;
1866 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1868 switch (gimple_code (def_stmt))
1870 case GIMPLE_ASSIGN:
1871 /* The RHS of the statement defining VAR must either have a
1872 constant length or come from another SSA_NAME with a constant
1873 length. */
1874 if (gimple_assign_single_p (def_stmt)
1875 || gimple_assign_unary_nop_p (def_stmt))
1877 tree rhs = gimple_assign_rhs1 (def_stmt);
1878 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1880 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1882 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1883 gimple_assign_rhs3 (def_stmt) };
1885 for (unsigned int i = 0; i < 2; i++)
1886 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1888 if (rkind != SRK_LENRANGE)
1889 return false;
1890 /* Set the upper bound to the maximum to prevent
1891 it from being adjusted in the next iteration but
1892 leave MINLEN and the more conservative MAXBOUND
1893 determined so far alone (or leave them null if
1894 they haven't been set yet). That the MINLEN is
1895 in fact zero can be determined from MAXLEN being
1896 unbounded but the discovered minimum is used for
1897 diagnostics. */
1898 pdata->maxlen = build_all_ones_cst (size_type_node);
1900 return true;
1902 return false;
1904 case GIMPLE_PHI:
1905 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1906 must have a constant length. */
1907 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1909 tree arg = gimple_phi_arg (def_stmt, i)->def;
1911 /* If this PHI has itself as an argument, we cannot
1912 determine the string length of this argument. However,
1913 if we can find a constant string length for the other
1914 PHI args then we can still be sure that this is a
1915 constant string length. So be optimistic and just
1916 continue with the next argument. */
1917 if (arg == gimple_phi_result (def_stmt))
1918 continue;
1920 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1922 if (rkind != SRK_LENRANGE)
1923 return false;
1924 /* Set the upper bound to the maximum to prevent
1925 it from being adjusted in the next iteration but
1926 leave MINLEN and the more conservative MAXBOUND
1927 determined so far alone (or leave them null if
1928 they haven't been set yet). That the MINLEN is
1929 in fact zero can be determined from MAXLEN being
1930 unbounded but the discovered minimum is used for
1931 diagnostics. */
1932 pdata->maxlen = build_all_ones_cst (size_type_node);
1935 return true;
1937 default:
1938 return false;
1942 /* Try to obtain the range of the lengths of the string(s) referenced
1943 by ARG, or the size of the largest array ARG refers to if the range
1944 of lengths cannot be determined, and store all in *PDATA which must
1945 be zero-initialized on input except PDATA->MAXBOUND may be set to
1946 a non-null tree node other than INTEGER_CST to request to have it
1947 set to the length of the longest string in a PHI. ELTSIZE is
1948 the expected size of the string element in bytes: 1 for char and
1949 some power of 2 for wide characters.
1950 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1951 for optimization. Returning false means that a nonzero PDATA->MINLEN
1952 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1953 is -1 (in that case, the actual range is indeterminate, i.e.,
1954 [0, PTRDIFF_MAX - 2]. */
1956 bool
1957 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1959 bitmap visited = NULL;
1960 tree maxbound = pdata->maxbound;
1962 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
1964 /* On failure extend the length range to an impossible maximum
1965 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1966 members can stay unchanged regardless. */
1967 pdata->minlen = ssize_int (0);
1968 pdata->maxlen = build_all_ones_cst (size_type_node);
1970 else if (!pdata->minlen)
1971 pdata->minlen = ssize_int (0);
1973 /* If it's unchanged from it initial non-null value, set the conservative
1974 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1975 if (maxbound && pdata->maxbound == maxbound)
1976 pdata->maxbound = build_all_ones_cst (size_type_node);
1978 if (visited)
1979 BITMAP_FREE (visited);
1981 return !integer_all_onesp (pdata->maxlen);
1984 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1985 For ARG of pointer types, NONSTR indicates if the caller is prepared
1986 to handle unterminated strings. For integer ARG and when RKIND ==
1987 SRK_INT_VALUE, NONSTR must be null.
1989 If an unterminated array is discovered and our caller handles
1990 unterminated arrays, then bubble up the offending DECL and
1991 return the maximum size. Otherwise return NULL. */
1993 static tree
1994 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1996 /* A non-null NONSTR is meaningless when determining the maximum
1997 value of an integer ARG. */
1998 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1999 /* ARG must have an integral type when RKIND says so. */
2000 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
2002 bitmap visited = NULL;
2004 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2005 is unbounded. */
2006 c_strlen_data lendata = { };
2007 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
2008 lendata.maxlen = NULL_TREE;
2009 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
2010 lendata.maxlen = NULL_TREE;
2012 if (visited)
2013 BITMAP_FREE (visited);
2015 if (nonstr)
2017 /* For callers prepared to handle unterminated arrays set
2018 *NONSTR to point to the declaration of the array and return
2019 the maximum length/size. */
2020 *nonstr = lendata.decl;
2021 return lendata.maxlen;
2024 /* Fail if the constant array isn't nul-terminated. */
2025 return lendata.decl ? NULL_TREE : lendata.maxlen;
2029 /* Fold function call to builtin strcpy with arguments DEST and SRC.
2030 If LEN is not NULL, it represents the length of the string to be
2031 copied. Return NULL_TREE if no simplification can be made. */
2033 static bool
2034 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
2035 tree dest, tree src)
2037 gimple *stmt = gsi_stmt (*gsi);
2038 location_t loc = gimple_location (stmt);
2039 tree fn;
2041 /* If SRC and DEST are the same (and not volatile), return DEST. */
2042 if (operand_equal_p (src, dest, 0))
2044 /* Issue -Wrestrict unless the pointers are null (those do
2045 not point to objects and so do not indicate an overlap;
2046 such calls could be the result of sanitization and jump
2047 threading). */
2048 if (!integer_zerop (dest) && !warning_suppressed_p (stmt, OPT_Wrestrict))
2050 tree func = gimple_call_fndecl (stmt);
2052 warning_at (loc, OPT_Wrestrict,
2053 "%qD source argument is the same as destination",
2054 func);
2057 replace_call_with_value (gsi, dest);
2058 return true;
2061 if (optimize_function_for_size_p (cfun))
2062 return false;
2064 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2065 if (!fn)
2066 return false;
2068 /* Set to non-null if ARG refers to an unterminated array. */
2069 tree nonstr = NULL;
2070 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
2072 if (nonstr)
2074 /* Avoid folding calls with unterminated arrays. */
2075 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
2076 warn_string_no_nul (loc, stmt, "strcpy", src, nonstr);
2077 suppress_warning (stmt, OPT_Wstringop_overread);
2078 return false;
2081 if (!len)
2082 return false;
2084 len = fold_convert_loc (loc, size_type_node, len);
2085 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
2086 len = force_gimple_operand_gsi (gsi, len, true,
2087 NULL_TREE, true, GSI_SAME_STMT);
2088 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2089 replace_call_with_call_and_fold (gsi, repl);
2090 return true;
2093 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2094 If SLEN is not NULL, it represents the length of the source string.
2095 Return NULL_TREE if no simplification can be made. */
2097 static bool
2098 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
2099 tree dest, tree src, tree len)
2101 gimple *stmt = gsi_stmt (*gsi);
2102 location_t loc = gimple_location (stmt);
2103 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
2105 /* If the LEN parameter is zero, return DEST. */
2106 if (integer_zerop (len))
2108 /* Avoid warning if the destination refers to an array/pointer
2109 decorate with attribute nonstring. */
2110 if (!nonstring)
2112 tree fndecl = gimple_call_fndecl (stmt);
2114 /* Warn about the lack of nul termination: the result is not
2115 a (nul-terminated) string. */
2116 tree slen = get_maxval_strlen (src, SRK_STRLEN);
2117 if (slen && !integer_zerop (slen))
2118 warning_at (loc, OPT_Wstringop_truncation,
2119 "%qD destination unchanged after copying no bytes "
2120 "from a string of length %E",
2121 fndecl, slen);
2122 else
2123 warning_at (loc, OPT_Wstringop_truncation,
2124 "%qD destination unchanged after copying no bytes",
2125 fndecl);
2128 replace_call_with_value (gsi, dest);
2129 return true;
2132 /* We can't compare slen with len as constants below if len is not a
2133 constant. */
2134 if (TREE_CODE (len) != INTEGER_CST)
2135 return false;
2137 /* Now, we must be passed a constant src ptr parameter. */
2138 tree slen = get_maxval_strlen (src, SRK_STRLEN);
2139 if (!slen || TREE_CODE (slen) != INTEGER_CST)
2140 return false;
2142 /* The size of the source string including the terminating nul. */
2143 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
2145 /* We do not support simplification of this case, though we do
2146 support it when expanding trees into RTL. */
2147 /* FIXME: generate a call to __builtin_memset. */
2148 if (tree_int_cst_lt (ssize, len))
2149 return false;
2151 /* Diagnose truncation that leaves the copy unterminated. */
2152 maybe_diag_stxncpy_trunc (*gsi, src, len);
2154 /* OK transform into builtin memcpy. */
2155 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2156 if (!fn)
2157 return false;
2159 len = fold_convert_loc (loc, size_type_node, len);
2160 len = force_gimple_operand_gsi (gsi, len, true,
2161 NULL_TREE, true, GSI_SAME_STMT);
2162 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2163 replace_call_with_call_and_fold (gsi, repl);
2165 return true;
2168 /* Fold function call to builtin strchr or strrchr.
2169 If both arguments are constant, evaluate and fold the result,
2170 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2171 In general strlen is significantly faster than strchr
2172 due to being a simpler operation. */
2173 static bool
2174 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
2176 gimple *stmt = gsi_stmt (*gsi);
2177 tree str = gimple_call_arg (stmt, 0);
2178 tree c = gimple_call_arg (stmt, 1);
2179 location_t loc = gimple_location (stmt);
2180 const char *p;
2181 char ch;
2183 if (!gimple_call_lhs (stmt))
2184 return false;
2186 /* Avoid folding if the first argument is not a nul-terminated array.
2187 Defer warning until later. */
2188 if (!check_nul_terminated_array (NULL_TREE, str))
2189 return false;
2191 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
2193 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
2195 if (p1 == NULL)
2197 replace_call_with_value (gsi, integer_zero_node);
2198 return true;
2201 tree len = build_int_cst (size_type_node, p1 - p);
2202 gimple_seq stmts = NULL;
2203 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2204 POINTER_PLUS_EXPR, str, len);
2205 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2206 gsi_replace_with_seq_vops (gsi, stmts);
2207 return true;
2210 if (!integer_zerop (c))
2211 return false;
2213 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2214 if (is_strrchr && optimize_function_for_size_p (cfun))
2216 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2218 if (strchr_fn)
2220 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2221 replace_call_with_call_and_fold (gsi, repl);
2222 return true;
2225 return false;
2228 tree len;
2229 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2231 if (!strlen_fn)
2232 return false;
2234 /* Create newstr = strlen (str). */
2235 gimple_seq stmts = NULL;
2236 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2237 gimple_set_location (new_stmt, loc);
2238 len = create_tmp_reg_or_ssa_name (size_type_node);
2239 gimple_call_set_lhs (new_stmt, len);
2240 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2242 /* Create (str p+ strlen (str)). */
2243 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2244 POINTER_PLUS_EXPR, str, len);
2245 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2246 gsi_replace_with_seq_vops (gsi, stmts);
2247 /* gsi now points at the assignment to the lhs, get a
2248 stmt iterator to the strlen.
2249 ??? We can't use gsi_for_stmt as that doesn't work when the
2250 CFG isn't built yet. */
2251 gimple_stmt_iterator gsi2 = *gsi;
2252 gsi_prev (&gsi2);
2253 fold_stmt (&gsi2);
2254 return true;
2257 /* Fold function call to builtin strstr.
2258 If both arguments are constant, evaluate and fold the result,
2259 additionally fold strstr (x, "") into x and strstr (x, "c")
2260 into strchr (x, 'c'). */
2261 static bool
2262 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2264 gimple *stmt = gsi_stmt (*gsi);
2265 if (!gimple_call_lhs (stmt))
2266 return false;
2268 tree haystack = gimple_call_arg (stmt, 0);
2269 tree needle = gimple_call_arg (stmt, 1);
2271 /* Avoid folding if either argument is not a nul-terminated array.
2272 Defer warning until later. */
2273 if (!check_nul_terminated_array (NULL_TREE, haystack)
2274 || !check_nul_terminated_array (NULL_TREE, needle))
2275 return false;
2277 const char *q = c_getstr (needle);
2278 if (q == NULL)
2279 return false;
2281 if (const char *p = c_getstr (haystack))
2283 const char *r = strstr (p, q);
2285 if (r == NULL)
2287 replace_call_with_value (gsi, integer_zero_node);
2288 return true;
2291 tree len = build_int_cst (size_type_node, r - p);
2292 gimple_seq stmts = NULL;
2293 gimple *new_stmt
2294 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2295 haystack, len);
2296 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2297 gsi_replace_with_seq_vops (gsi, stmts);
2298 return true;
2301 /* For strstr (x, "") return x. */
2302 if (q[0] == '\0')
2304 replace_call_with_value (gsi, haystack);
2305 return true;
2308 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2309 if (q[1] == '\0')
2311 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2312 if (strchr_fn)
2314 tree c = build_int_cst (integer_type_node, q[0]);
2315 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2316 replace_call_with_call_and_fold (gsi, repl);
2317 return true;
2321 return false;
2324 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2325 to the call.
2327 Return NULL_TREE if no simplification was possible, otherwise return the
2328 simplified form of the call as a tree.
2330 The simplified form may be a constant or other expression which
2331 computes the same value, but in a more efficient manner (including
2332 calls to other builtin functions).
2334 The call may contain arguments which need to be evaluated, but
2335 which are not useful to determine the result of the call. In
2336 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2337 COMPOUND_EXPR will be an argument which must be evaluated.
2338 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2339 COMPOUND_EXPR in the chain will contain the tree for the simplified
2340 form of the builtin function call. */
2342 static bool
2343 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2345 gimple *stmt = gsi_stmt (*gsi);
2346 location_t loc = gimple_location (stmt);
2348 const char *p = c_getstr (src);
2350 /* If the string length is zero, return the dst parameter. */
2351 if (p && *p == '\0')
2353 replace_call_with_value (gsi, dst);
2354 return true;
2357 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2358 return false;
2360 /* See if we can store by pieces into (dst + strlen(dst)). */
2361 tree newdst;
2362 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2363 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2365 if (!strlen_fn || !memcpy_fn)
2366 return false;
2368 /* If the length of the source string isn't computable don't
2369 split strcat into strlen and memcpy. */
2370 tree len = get_maxval_strlen (src, SRK_STRLEN);
2371 if (! len)
2372 return false;
2374 /* Create strlen (dst). */
2375 gimple_seq stmts = NULL, stmts2;
2376 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2377 gimple_set_location (repl, loc);
2378 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2379 gimple_call_set_lhs (repl, newdst);
2380 gimple_seq_add_stmt_without_update (&stmts, repl);
2382 /* Create (dst p+ strlen (dst)). */
2383 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2384 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2385 gimple_seq_add_seq_without_update (&stmts, stmts2);
2387 len = fold_convert_loc (loc, size_type_node, len);
2388 len = size_binop_loc (loc, PLUS_EXPR, len,
2389 build_int_cst (size_type_node, 1));
2390 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2391 gimple_seq_add_seq_without_update (&stmts, stmts2);
2393 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2394 gimple_seq_add_stmt_without_update (&stmts, repl);
2395 if (gimple_call_lhs (stmt))
2397 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2398 gimple_seq_add_stmt_without_update (&stmts, repl);
2399 gsi_replace_with_seq_vops (gsi, stmts);
2400 /* gsi now points at the assignment to the lhs, get a
2401 stmt iterator to the memcpy call.
2402 ??? We can't use gsi_for_stmt as that doesn't work when the
2403 CFG isn't built yet. */
2404 gimple_stmt_iterator gsi2 = *gsi;
2405 gsi_prev (&gsi2);
2406 fold_stmt (&gsi2);
2408 else
2410 gsi_replace_with_seq_vops (gsi, stmts);
2411 fold_stmt (gsi);
2413 return true;
2416 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2417 are the arguments to the call. */
2419 static bool
2420 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2422 gimple *stmt = gsi_stmt (*gsi);
2423 tree dest = gimple_call_arg (stmt, 0);
2424 tree src = gimple_call_arg (stmt, 1);
2425 tree size = gimple_call_arg (stmt, 2);
2426 tree fn;
2427 const char *p;
2430 p = c_getstr (src);
2431 /* If the SRC parameter is "", return DEST. */
2432 if (p && *p == '\0')
2434 replace_call_with_value (gsi, dest);
2435 return true;
2438 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2439 return false;
2441 /* If __builtin_strcat_chk is used, assume strcat is available. */
2442 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2443 if (!fn)
2444 return false;
2446 gimple *repl = gimple_build_call (fn, 2, dest, src);
2447 replace_call_with_call_and_fold (gsi, repl);
2448 return true;
2451 /* Simplify a call to the strncat builtin. */
2453 static bool
2454 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2456 gimple *stmt = gsi_stmt (*gsi);
2457 tree dst = gimple_call_arg (stmt, 0);
2458 tree src = gimple_call_arg (stmt, 1);
2459 tree len = gimple_call_arg (stmt, 2);
2461 const char *p = c_getstr (src);
2463 /* If the requested length is zero, or the src parameter string
2464 length is zero, return the dst parameter. */
2465 if (integer_zerop (len) || (p && *p == '\0'))
2467 replace_call_with_value (gsi, dst);
2468 return true;
2471 if (TREE_CODE (len) != INTEGER_CST || !p)
2472 return false;
2474 unsigned srclen = strlen (p);
2476 int cmpsrc = compare_tree_int (len, srclen);
2478 /* Return early if the requested len is less than the string length.
2479 Warnings will be issued elsewhere later. */
2480 if (cmpsrc < 0)
2481 return false;
2483 unsigned HOST_WIDE_INT dstsize;
2485 bool nowarn = warning_suppressed_p (stmt, OPT_Wstringop_overflow_);
2487 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2489 int cmpdst = compare_tree_int (len, dstsize);
2491 if (cmpdst >= 0)
2493 tree fndecl = gimple_call_fndecl (stmt);
2495 /* Strncat copies (at most) LEN bytes and always appends
2496 the terminating NUL so the specified bound should never
2497 be equal to (or greater than) the size of the destination.
2498 If it is, the copy could overflow. */
2499 location_t loc = gimple_location (stmt);
2500 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2501 cmpdst == 0
2502 ? G_("%qD specified bound %E equals "
2503 "destination size")
2504 : G_("%qD specified bound %E exceeds "
2505 "destination size %wu"),
2506 fndecl, len, dstsize);
2507 if (nowarn)
2508 suppress_warning (stmt, OPT_Wstringop_overflow_);
2512 if (!nowarn && cmpsrc == 0)
2514 tree fndecl = gimple_call_fndecl (stmt);
2515 location_t loc = gimple_location (stmt);
2517 /* To avoid possible overflow the specified bound should also
2518 not be equal to the length of the source, even when the size
2519 of the destination is unknown (it's not an uncommon mistake
2520 to specify as the bound to strncpy the length of the source). */
2521 if (warning_at (loc, OPT_Wstringop_overflow_,
2522 "%qD specified bound %E equals source length",
2523 fndecl, len))
2524 suppress_warning (stmt, OPT_Wstringop_overflow_);
2527 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2529 /* If the replacement _DECL isn't initialized, don't do the
2530 transformation. */
2531 if (!fn)
2532 return false;
2534 /* Otherwise, emit a call to strcat. */
2535 gcall *repl = gimple_build_call (fn, 2, dst, src);
2536 replace_call_with_call_and_fold (gsi, repl);
2537 return true;
2540 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2541 LEN, and SIZE. */
2543 static bool
2544 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2546 gimple *stmt = gsi_stmt (*gsi);
2547 tree dest = gimple_call_arg (stmt, 0);
2548 tree src = gimple_call_arg (stmt, 1);
2549 tree len = gimple_call_arg (stmt, 2);
2550 tree size = gimple_call_arg (stmt, 3);
2551 tree fn;
2552 const char *p;
2554 p = c_getstr (src);
2555 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2556 if ((p && *p == '\0')
2557 || integer_zerop (len))
2559 replace_call_with_value (gsi, dest);
2560 return true;
2563 if (! tree_fits_uhwi_p (size))
2564 return false;
2566 if (! integer_all_onesp (size))
2568 tree src_len = c_strlen (src, 1);
2569 if (src_len
2570 && tree_fits_uhwi_p (src_len)
2571 && tree_fits_uhwi_p (len)
2572 && ! tree_int_cst_lt (len, src_len))
2574 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2575 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2576 if (!fn)
2577 return false;
2579 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2580 replace_call_with_call_and_fold (gsi, repl);
2581 return true;
2583 return false;
2586 /* If __builtin_strncat_chk is used, assume strncat is available. */
2587 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2588 if (!fn)
2589 return false;
2591 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2592 replace_call_with_call_and_fold (gsi, repl);
2593 return true;
2596 /* Build and append gimple statements to STMTS that would load a first
2597 character of a memory location identified by STR. LOC is location
2598 of the statement. */
2600 static tree
2601 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2603 tree var;
2605 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2606 tree cst_uchar_ptr_node
2607 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2608 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2610 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2611 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2612 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2614 gimple_assign_set_lhs (stmt, var);
2615 gimple_seq_add_stmt_without_update (stmts, stmt);
2617 return var;
2620 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2622 static bool
2623 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2625 gimple *stmt = gsi_stmt (*gsi);
2626 tree callee = gimple_call_fndecl (stmt);
2627 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2629 tree type = integer_type_node;
2630 tree str1 = gimple_call_arg (stmt, 0);
2631 tree str2 = gimple_call_arg (stmt, 1);
2632 tree lhs = gimple_call_lhs (stmt);
2634 tree bound_node = NULL_TREE;
2635 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
2637 /* Handle strncmp and strncasecmp functions. */
2638 if (gimple_call_num_args (stmt) == 3)
2640 bound_node = gimple_call_arg (stmt, 2);
2641 if (tree_fits_uhwi_p (bound_node))
2642 bound = tree_to_uhwi (bound_node);
2645 /* If the BOUND parameter is zero, return zero. */
2646 if (bound == 0)
2648 replace_call_with_value (gsi, integer_zero_node);
2649 return true;
2652 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2653 if (operand_equal_p (str1, str2, 0))
2655 replace_call_with_value (gsi, integer_zero_node);
2656 return true;
2659 /* Initially set to the number of characters, including the terminating
2660 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2661 the array Sx is not terminated by a nul.
2662 For nul-terminated strings then adjusted to their length so that
2663 LENx == NULPOSx holds. */
2664 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2665 const char *p1 = getbyterep (str1, &len1);
2666 const char *p2 = getbyterep (str2, &len2);
2668 /* The position of the terminating nul character if one exists, otherwise
2669 a value greater than LENx. */
2670 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2672 if (p1)
2674 size_t n = strnlen (p1, len1);
2675 if (n < len1)
2676 len1 = nulpos1 = n;
2679 if (p2)
2681 size_t n = strnlen (p2, len2);
2682 if (n < len2)
2683 len2 = nulpos2 = n;
2686 /* For known strings, return an immediate value. */
2687 if (p1 && p2)
2689 int r = 0;
2690 bool known_result = false;
2692 switch (fcode)
2694 case BUILT_IN_STRCMP:
2695 case BUILT_IN_STRCMP_EQ:
2696 if (len1 != nulpos1 || len2 != nulpos2)
2697 break;
2699 r = strcmp (p1, p2);
2700 known_result = true;
2701 break;
2703 case BUILT_IN_STRNCMP:
2704 case BUILT_IN_STRNCMP_EQ:
2706 if (bound == HOST_WIDE_INT_M1U)
2707 break;
2709 /* Reduce the bound to be no more than the length
2710 of the shorter of the two strings, or the sizes
2711 of the unterminated arrays. */
2712 unsigned HOST_WIDE_INT n = bound;
2714 if (len1 == nulpos1 && len1 < n)
2715 n = len1 + 1;
2716 if (len2 == nulpos2 && len2 < n)
2717 n = len2 + 1;
2719 if (MIN (nulpos1, nulpos2) + 1 < n)
2720 break;
2722 r = strncmp (p1, p2, n);
2723 known_result = true;
2724 break;
2726 /* Only handleable situation is where the string are equal (result 0),
2727 which is already handled by operand_equal_p case. */
2728 case BUILT_IN_STRCASECMP:
2729 break;
2730 case BUILT_IN_STRNCASECMP:
2732 if (bound == HOST_WIDE_INT_M1U)
2733 break;
2734 r = strncmp (p1, p2, bound);
2735 if (r == 0)
2736 known_result = true;
2737 break;
2739 default:
2740 gcc_unreachable ();
2743 if (known_result)
2745 replace_call_with_value (gsi, build_cmp_result (type, r));
2746 return true;
2750 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
2751 || fcode == BUILT_IN_STRCMP
2752 || fcode == BUILT_IN_STRCMP_EQ
2753 || fcode == BUILT_IN_STRCASECMP;
2755 location_t loc = gimple_location (stmt);
2757 /* If the second arg is "", return *(const unsigned char*)arg1. */
2758 if (p2 && *p2 == '\0' && nonzero_bound)
2760 gimple_seq stmts = NULL;
2761 tree var = gimple_load_first_char (loc, str1, &stmts);
2762 if (lhs)
2764 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2765 gimple_seq_add_stmt_without_update (&stmts, stmt);
2768 gsi_replace_with_seq_vops (gsi, stmts);
2769 return true;
2772 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2773 if (p1 && *p1 == '\0' && nonzero_bound)
2775 gimple_seq stmts = NULL;
2776 tree var = gimple_load_first_char (loc, str2, &stmts);
2778 if (lhs)
2780 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2781 stmt = gimple_build_assign (c, NOP_EXPR, var);
2782 gimple_seq_add_stmt_without_update (&stmts, stmt);
2784 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2785 gimple_seq_add_stmt_without_update (&stmts, stmt);
2788 gsi_replace_with_seq_vops (gsi, stmts);
2789 return true;
2792 /* If BOUND is one, return an expression corresponding to
2793 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2794 if (fcode == BUILT_IN_STRNCMP && bound == 1)
2796 gimple_seq stmts = NULL;
2797 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2798 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2800 if (lhs)
2802 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2803 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2804 gimple_seq_add_stmt_without_update (&stmts, convert1);
2806 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2807 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2808 gimple_seq_add_stmt_without_update (&stmts, convert2);
2810 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2811 gimple_seq_add_stmt_without_update (&stmts, stmt);
2814 gsi_replace_with_seq_vops (gsi, stmts);
2815 return true;
2818 /* If BOUND is greater than the length of one constant string,
2819 and the other argument is also a nul-terminated string, replace
2820 strncmp with strcmp. */
2821 if (fcode == BUILT_IN_STRNCMP
2822 && bound > 0 && bound < HOST_WIDE_INT_M1U
2823 && ((p2 && len2 < bound && len2 == nulpos2)
2824 || (p1 && len1 < bound && len1 == nulpos1)))
2826 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2827 if (!fn)
2828 return false;
2829 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2830 replace_call_with_call_and_fold (gsi, repl);
2831 return true;
2834 return false;
2837 /* Fold a call to the memchr pointed by GSI iterator. */
2839 static bool
2840 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2842 gimple *stmt = gsi_stmt (*gsi);
2843 tree lhs = gimple_call_lhs (stmt);
2844 tree arg1 = gimple_call_arg (stmt, 0);
2845 tree arg2 = gimple_call_arg (stmt, 1);
2846 tree len = gimple_call_arg (stmt, 2);
2848 /* If the LEN parameter is zero, return zero. */
2849 if (integer_zerop (len))
2851 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2852 return true;
2855 char c;
2856 if (TREE_CODE (arg2) != INTEGER_CST
2857 || !tree_fits_uhwi_p (len)
2858 || !target_char_cst_p (arg2, &c))
2859 return false;
2861 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2862 unsigned HOST_WIDE_INT string_length;
2863 const char *p1 = getbyterep (arg1, &string_length);
2865 if (p1)
2867 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2868 if (r == NULL)
2870 tree mem_size, offset_node;
2871 byte_representation (arg1, &offset_node, &mem_size, NULL);
2872 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2873 ? 0 : tree_to_uhwi (offset_node);
2874 /* MEM_SIZE is the size of the array the string literal
2875 is stored in. */
2876 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2877 gcc_checking_assert (string_length <= string_size);
2878 if (length <= string_size)
2880 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2881 return true;
2884 else
2886 unsigned HOST_WIDE_INT offset = r - p1;
2887 gimple_seq stmts = NULL;
2888 if (lhs != NULL_TREE)
2890 tree offset_cst = build_int_cst (sizetype, offset);
2891 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2892 arg1, offset_cst);
2893 gimple_seq_add_stmt_without_update (&stmts, stmt);
2895 else
2896 gimple_seq_add_stmt_without_update (&stmts,
2897 gimple_build_nop ());
2899 gsi_replace_with_seq_vops (gsi, stmts);
2900 return true;
2904 return false;
2907 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2908 to the call. IGNORE is true if the value returned
2909 by the builtin will be ignored. UNLOCKED is true is true if this
2910 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2911 the known length of the string. Return NULL_TREE if no simplification
2912 was possible. */
2914 static bool
2915 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2916 tree arg0, tree arg1,
2917 bool unlocked)
2919 gimple *stmt = gsi_stmt (*gsi);
2921 /* If we're using an unlocked function, assume the other unlocked
2922 functions exist explicitly. */
2923 tree const fn_fputc = (unlocked
2924 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2925 : builtin_decl_implicit (BUILT_IN_FPUTC));
2926 tree const fn_fwrite = (unlocked
2927 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2928 : builtin_decl_implicit (BUILT_IN_FWRITE));
2930 /* If the return value is used, don't do the transformation. */
2931 if (gimple_call_lhs (stmt))
2932 return false;
2934 /* Get the length of the string passed to fputs. If the length
2935 can't be determined, punt. */
2936 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2937 if (!len
2938 || TREE_CODE (len) != INTEGER_CST)
2939 return false;
2941 switch (compare_tree_int (len, 1))
2943 case -1: /* length is 0, delete the call entirely . */
2944 replace_call_with_value (gsi, integer_zero_node);
2945 return true;
2947 case 0: /* length is 1, call fputc. */
2949 const char *p = c_getstr (arg0);
2950 if (p != NULL)
2952 if (!fn_fputc)
2953 return false;
2955 gimple *repl = gimple_build_call (fn_fputc, 2,
2956 build_int_cst
2957 (integer_type_node, p[0]), arg1);
2958 replace_call_with_call_and_fold (gsi, repl);
2959 return true;
2962 /* FALLTHROUGH */
2963 case 1: /* length is greater than 1, call fwrite. */
2965 /* If optimizing for size keep fputs. */
2966 if (optimize_function_for_size_p (cfun))
2967 return false;
2968 /* New argument list transforming fputs(string, stream) to
2969 fwrite(string, 1, len, stream). */
2970 if (!fn_fwrite)
2971 return false;
2973 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2974 size_one_node, len, arg1);
2975 replace_call_with_call_and_fold (gsi, repl);
2976 return true;
2978 default:
2979 gcc_unreachable ();
2981 return false;
2984 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2985 DEST, SRC, LEN, and SIZE are the arguments to the call.
2986 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2987 code of the builtin. If MAXLEN is not NULL, it is maximum length
2988 passed as third argument. */
2990 static bool
2991 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2992 tree dest, tree src, tree len, tree size,
2993 enum built_in_function fcode)
2995 gimple *stmt = gsi_stmt (*gsi);
2996 location_t loc = gimple_location (stmt);
2997 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2998 tree fn;
3000 /* If SRC and DEST are the same (and not volatile), return DEST
3001 (resp. DEST+LEN for __mempcpy_chk). */
3002 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
3004 if (fcode != BUILT_IN_MEMPCPY_CHK)
3006 replace_call_with_value (gsi, dest);
3007 return true;
3009 else
3011 gimple_seq stmts = NULL;
3012 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
3013 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
3014 TREE_TYPE (dest), dest, len);
3015 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3016 replace_call_with_value (gsi, temp);
3017 return true;
3021 if (! tree_fits_uhwi_p (size))
3022 return false;
3024 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3025 if (! integer_all_onesp (size))
3027 if (! tree_fits_uhwi_p (len))
3029 /* If LEN is not constant, try MAXLEN too.
3030 For MAXLEN only allow optimizing into non-_ocs function
3031 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3032 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3034 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
3036 /* (void) __mempcpy_chk () can be optimized into
3037 (void) __memcpy_chk (). */
3038 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3039 if (!fn)
3040 return false;
3042 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3043 replace_call_with_call_and_fold (gsi, repl);
3044 return true;
3046 return false;
3049 else
3050 maxlen = len;
3052 if (tree_int_cst_lt (size, maxlen))
3053 return false;
3056 fn = NULL_TREE;
3057 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3058 mem{cpy,pcpy,move,set} is available. */
3059 switch (fcode)
3061 case BUILT_IN_MEMCPY_CHK:
3062 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
3063 break;
3064 case BUILT_IN_MEMPCPY_CHK:
3065 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
3066 break;
3067 case BUILT_IN_MEMMOVE_CHK:
3068 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
3069 break;
3070 case BUILT_IN_MEMSET_CHK:
3071 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
3072 break;
3073 default:
3074 break;
3077 if (!fn)
3078 return false;
3080 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3081 replace_call_with_call_and_fold (gsi, repl);
3082 return true;
3085 /* Fold a call to the __st[rp]cpy_chk builtin.
3086 DEST, SRC, and SIZE are the arguments to the call.
3087 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3088 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3089 strings passed as second argument. */
3091 static bool
3092 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
3093 tree dest,
3094 tree src, tree size,
3095 enum built_in_function fcode)
3097 gimple *stmt = gsi_stmt (*gsi);
3098 location_t loc = gimple_location (stmt);
3099 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3100 tree len, fn;
3102 /* If SRC and DEST are the same (and not volatile), return DEST. */
3103 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
3105 /* Issue -Wrestrict unless the pointers are null (those do
3106 not point to objects and so do not indicate an overlap;
3107 such calls could be the result of sanitization and jump
3108 threading). */
3109 if (!integer_zerop (dest)
3110 && !warning_suppressed_p (stmt, OPT_Wrestrict))
3112 tree func = gimple_call_fndecl (stmt);
3114 warning_at (loc, OPT_Wrestrict,
3115 "%qD source argument is the same as destination",
3116 func);
3119 replace_call_with_value (gsi, dest);
3120 return true;
3123 if (! tree_fits_uhwi_p (size))
3124 return false;
3126 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
3127 if (! integer_all_onesp (size))
3129 len = c_strlen (src, 1);
3130 if (! len || ! tree_fits_uhwi_p (len))
3132 /* If LEN is not constant, try MAXLEN too.
3133 For MAXLEN only allow optimizing into non-_ocs function
3134 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3135 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3137 if (fcode == BUILT_IN_STPCPY_CHK)
3139 if (! ignore)
3140 return false;
3142 /* If return value of __stpcpy_chk is ignored,
3143 optimize into __strcpy_chk. */
3144 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
3145 if (!fn)
3146 return false;
3148 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
3149 replace_call_with_call_and_fold (gsi, repl);
3150 return true;
3153 if (! len || TREE_SIDE_EFFECTS (len))
3154 return false;
3156 /* If c_strlen returned something, but not a constant,
3157 transform __strcpy_chk into __memcpy_chk. */
3158 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3159 if (!fn)
3160 return false;
3162 gimple_seq stmts = NULL;
3163 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
3164 len = gimple_convert (&stmts, loc, size_type_node, len);
3165 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
3166 build_int_cst (size_type_node, 1));
3167 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3168 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3169 replace_call_with_call_and_fold (gsi, repl);
3170 return true;
3173 else
3174 maxlen = len;
3176 if (! tree_int_cst_lt (maxlen, size))
3177 return false;
3180 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3181 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
3182 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
3183 if (!fn)
3184 return false;
3186 gimple *repl = gimple_build_call (fn, 2, dest, src);
3187 replace_call_with_call_and_fold (gsi, repl);
3188 return true;
3191 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3192 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3193 length passed as third argument. IGNORE is true if return value can be
3194 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3196 static bool
3197 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
3198 tree dest, tree src,
3199 tree len, tree size,
3200 enum built_in_function fcode)
3202 gimple *stmt = gsi_stmt (*gsi);
3203 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3204 tree fn;
3206 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
3208 /* If return value of __stpncpy_chk is ignored,
3209 optimize into __strncpy_chk. */
3210 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3211 if (fn)
3213 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3214 replace_call_with_call_and_fold (gsi, repl);
3215 return true;
3219 if (! tree_fits_uhwi_p (size))
3220 return false;
3222 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3223 if (! integer_all_onesp (size))
3225 if (! tree_fits_uhwi_p (len))
3227 /* If LEN is not constant, try MAXLEN too.
3228 For MAXLEN only allow optimizing into non-_ocs function
3229 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3230 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3231 return false;
3233 else
3234 maxlen = len;
3236 if (tree_int_cst_lt (size, maxlen))
3237 return false;
3240 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3241 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
3242 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3243 if (!fn)
3244 return false;
3246 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3247 replace_call_with_call_and_fold (gsi, repl);
3248 return true;
3251 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3252 Return NULL_TREE if no simplification can be made. */
3254 static bool
3255 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3257 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3258 location_t loc = gimple_location (stmt);
3259 tree dest = gimple_call_arg (stmt, 0);
3260 tree src = gimple_call_arg (stmt, 1);
3261 tree fn, lenp1;
3263 /* If the result is unused, replace stpcpy with strcpy. */
3264 if (gimple_call_lhs (stmt) == NULL_TREE)
3266 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3267 if (!fn)
3268 return false;
3269 gimple_call_set_fndecl (stmt, fn);
3270 fold_stmt (gsi);
3271 return true;
3274 /* Set to non-null if ARG refers to an unterminated array. */
3275 c_strlen_data data = { };
3276 /* The size of the unterminated array if SRC referes to one. */
3277 tree size;
3278 /* True if the size is exact/constant, false if it's the lower bound
3279 of a range. */
3280 bool exact;
3281 tree len = c_strlen (src, 1, &data, 1);
3282 if (!len
3283 || TREE_CODE (len) != INTEGER_CST)
3285 data.decl = unterminated_array (src, &size, &exact);
3286 if (!data.decl)
3287 return false;
3290 if (data.decl)
3292 /* Avoid folding calls with unterminated arrays. */
3293 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
3294 warn_string_no_nul (loc, stmt, "stpcpy", src, data.decl, size,
3295 exact);
3296 suppress_warning (stmt, OPT_Wstringop_overread);
3297 return false;
3300 if (optimize_function_for_size_p (cfun)
3301 /* If length is zero it's small enough. */
3302 && !integer_zerop (len))
3303 return false;
3305 /* If the source has a known length replace stpcpy with memcpy. */
3306 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3307 if (!fn)
3308 return false;
3310 gimple_seq stmts = NULL;
3311 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3312 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3313 tem, build_int_cst (size_type_node, 1));
3314 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3315 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
3316 gimple_move_vops (repl, stmt);
3317 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3318 /* Replace the result with dest + len. */
3319 stmts = NULL;
3320 tem = gimple_convert (&stmts, loc, sizetype, len);
3321 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3322 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3323 POINTER_PLUS_EXPR, dest, tem);
3324 gsi_replace (gsi, ret, false);
3325 /* Finally fold the memcpy call. */
3326 gimple_stmt_iterator gsi2 = *gsi;
3327 gsi_prev (&gsi2);
3328 fold_stmt (&gsi2);
3329 return true;
3332 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3333 NULL_TREE if a normal call should be emitted rather than expanding
3334 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3335 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3336 passed as second argument. */
3338 static bool
3339 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3340 enum built_in_function fcode)
3342 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3343 tree dest, size, len, fn, fmt, flag;
3344 const char *fmt_str;
3346 /* Verify the required arguments in the original call. */
3347 if (gimple_call_num_args (stmt) < 5)
3348 return false;
3350 dest = gimple_call_arg (stmt, 0);
3351 len = gimple_call_arg (stmt, 1);
3352 flag = gimple_call_arg (stmt, 2);
3353 size = gimple_call_arg (stmt, 3);
3354 fmt = gimple_call_arg (stmt, 4);
3356 if (! tree_fits_uhwi_p (size))
3357 return false;
3359 if (! integer_all_onesp (size))
3361 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3362 if (! tree_fits_uhwi_p (len))
3364 /* If LEN is not constant, try MAXLEN too.
3365 For MAXLEN only allow optimizing into non-_ocs function
3366 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3367 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3368 return false;
3370 else
3371 maxlen = len;
3373 if (tree_int_cst_lt (size, maxlen))
3374 return false;
3377 if (!init_target_chars ())
3378 return false;
3380 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3381 or if format doesn't contain % chars or is "%s". */
3382 if (! integer_zerop (flag))
3384 fmt_str = c_getstr (fmt);
3385 if (fmt_str == NULL)
3386 return false;
3387 if (strchr (fmt_str, target_percent) != NULL
3388 && strcmp (fmt_str, target_percent_s))
3389 return false;
3392 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3393 available. */
3394 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3395 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3396 if (!fn)
3397 return false;
3399 /* Replace the called function and the first 5 argument by 3 retaining
3400 trailing varargs. */
3401 gimple_call_set_fndecl (stmt, fn);
3402 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3403 gimple_call_set_arg (stmt, 0, dest);
3404 gimple_call_set_arg (stmt, 1, len);
3405 gimple_call_set_arg (stmt, 2, fmt);
3406 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3407 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3408 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3409 fold_stmt (gsi);
3410 return true;
3413 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3414 Return NULL_TREE if a normal call should be emitted rather than
3415 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3416 or BUILT_IN_VSPRINTF_CHK. */
3418 static bool
3419 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3420 enum built_in_function fcode)
3422 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3423 tree dest, size, len, fn, fmt, flag;
3424 const char *fmt_str;
3425 unsigned nargs = gimple_call_num_args (stmt);
3427 /* Verify the required arguments in the original call. */
3428 if (nargs < 4)
3429 return false;
3430 dest = gimple_call_arg (stmt, 0);
3431 flag = gimple_call_arg (stmt, 1);
3432 size = gimple_call_arg (stmt, 2);
3433 fmt = gimple_call_arg (stmt, 3);
3435 if (! tree_fits_uhwi_p (size))
3436 return false;
3438 len = NULL_TREE;
3440 if (!init_target_chars ())
3441 return false;
3443 /* Check whether the format is a literal string constant. */
3444 fmt_str = c_getstr (fmt);
3445 if (fmt_str != NULL)
3447 /* If the format doesn't contain % args or %%, we know the size. */
3448 if (strchr (fmt_str, target_percent) == 0)
3450 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3451 len = build_int_cstu (size_type_node, strlen (fmt_str));
3453 /* If the format is "%s" and first ... argument is a string literal,
3454 we know the size too. */
3455 else if (fcode == BUILT_IN_SPRINTF_CHK
3456 && strcmp (fmt_str, target_percent_s) == 0)
3458 tree arg;
3460 if (nargs == 5)
3462 arg = gimple_call_arg (stmt, 4);
3463 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3465 len = c_strlen (arg, 1);
3466 if (! len || ! tree_fits_uhwi_p (len))
3467 len = NULL_TREE;
3473 if (! integer_all_onesp (size))
3475 if (! len || ! tree_int_cst_lt (len, size))
3476 return false;
3479 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3480 or if format doesn't contain % chars or is "%s". */
3481 if (! integer_zerop (flag))
3483 if (fmt_str == NULL)
3484 return false;
3485 if (strchr (fmt_str, target_percent) != NULL
3486 && strcmp (fmt_str, target_percent_s))
3487 return false;
3490 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3491 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3492 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3493 if (!fn)
3494 return false;
3496 /* Replace the called function and the first 4 argument by 2 retaining
3497 trailing varargs. */
3498 gimple_call_set_fndecl (stmt, fn);
3499 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3500 gimple_call_set_arg (stmt, 0, dest);
3501 gimple_call_set_arg (stmt, 1, fmt);
3502 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3503 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3504 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3505 fold_stmt (gsi);
3506 return true;
3509 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3510 ORIG may be null if this is a 2-argument call. We don't attempt to
3511 simplify calls with more than 3 arguments.
3513 Return true if simplification was possible, otherwise false. */
3515 bool
3516 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3518 gimple *stmt = gsi_stmt (*gsi);
3520 /* Verify the required arguments in the original call. We deal with two
3521 types of sprintf() calls: 'sprintf (str, fmt)' and
3522 'sprintf (dest, "%s", orig)'. */
3523 if (gimple_call_num_args (stmt) > 3)
3524 return false;
3526 tree orig = NULL_TREE;
3527 if (gimple_call_num_args (stmt) == 3)
3528 orig = gimple_call_arg (stmt, 2);
3530 /* Check whether the format is a literal string constant. */
3531 tree fmt = gimple_call_arg (stmt, 1);
3532 const char *fmt_str = c_getstr (fmt);
3533 if (fmt_str == NULL)
3534 return false;
3536 tree dest = gimple_call_arg (stmt, 0);
3538 if (!init_target_chars ())
3539 return false;
3541 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3542 if (!fn)
3543 return false;
3545 /* If the format doesn't contain % args or %%, use strcpy. */
3546 if (strchr (fmt_str, target_percent) == NULL)
3548 /* Don't optimize sprintf (buf, "abc", ptr++). */
3549 if (orig)
3550 return false;
3552 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3553 'format' is known to contain no % formats. */
3554 gimple_seq stmts = NULL;
3555 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3557 /* Propagate the NO_WARNING bit to avoid issuing the same
3558 warning more than once. */
3559 copy_warning (repl, stmt);
3561 gimple_seq_add_stmt_without_update (&stmts, repl);
3562 if (tree lhs = gimple_call_lhs (stmt))
3564 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3565 strlen (fmt_str)));
3566 gimple_seq_add_stmt_without_update (&stmts, repl);
3567 gsi_replace_with_seq_vops (gsi, stmts);
3568 /* gsi now points at the assignment to the lhs, get a
3569 stmt iterator to the memcpy call.
3570 ??? We can't use gsi_for_stmt as that doesn't work when the
3571 CFG isn't built yet. */
3572 gimple_stmt_iterator gsi2 = *gsi;
3573 gsi_prev (&gsi2);
3574 fold_stmt (&gsi2);
3576 else
3578 gsi_replace_with_seq_vops (gsi, stmts);
3579 fold_stmt (gsi);
3581 return true;
3584 /* If the format is "%s", use strcpy if the result isn't used. */
3585 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3587 /* Don't crash on sprintf (str1, "%s"). */
3588 if (!orig)
3589 return false;
3591 /* Don't fold calls with source arguments of invalid (nonpointer)
3592 types. */
3593 if (!POINTER_TYPE_P (TREE_TYPE (orig)))
3594 return false;
3596 tree orig_len = NULL_TREE;
3597 if (gimple_call_lhs (stmt))
3599 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3600 if (!orig_len)
3601 return false;
3604 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3605 gimple_seq stmts = NULL;
3606 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3608 /* Propagate the NO_WARNING bit to avoid issuing the same
3609 warning more than once. */
3610 copy_warning (repl, stmt);
3612 gimple_seq_add_stmt_without_update (&stmts, repl);
3613 if (tree lhs = gimple_call_lhs (stmt))
3615 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3616 TREE_TYPE (orig_len)))
3617 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3618 repl = gimple_build_assign (lhs, orig_len);
3619 gimple_seq_add_stmt_without_update (&stmts, repl);
3620 gsi_replace_with_seq_vops (gsi, stmts);
3621 /* gsi now points at the assignment to the lhs, get a
3622 stmt iterator to the memcpy call.
3623 ??? We can't use gsi_for_stmt as that doesn't work when the
3624 CFG isn't built yet. */
3625 gimple_stmt_iterator gsi2 = *gsi;
3626 gsi_prev (&gsi2);
3627 fold_stmt (&gsi2);
3629 else
3631 gsi_replace_with_seq_vops (gsi, stmts);
3632 fold_stmt (gsi);
3634 return true;
3636 return false;
3639 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3640 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3641 attempt to simplify calls with more than 4 arguments.
3643 Return true if simplification was possible, otherwise false. */
3645 bool
3646 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3648 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3649 tree dest = gimple_call_arg (stmt, 0);
3650 tree destsize = gimple_call_arg (stmt, 1);
3651 tree fmt = gimple_call_arg (stmt, 2);
3652 tree orig = NULL_TREE;
3653 const char *fmt_str = NULL;
3655 if (gimple_call_num_args (stmt) > 4)
3656 return false;
3658 if (gimple_call_num_args (stmt) == 4)
3659 orig = gimple_call_arg (stmt, 3);
3661 if (!tree_fits_uhwi_p (destsize))
3662 return false;
3663 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3665 /* Check whether the format is a literal string constant. */
3666 fmt_str = c_getstr (fmt);
3667 if (fmt_str == NULL)
3668 return false;
3670 if (!init_target_chars ())
3671 return false;
3673 /* If the format doesn't contain % args or %%, use strcpy. */
3674 if (strchr (fmt_str, target_percent) == NULL)
3676 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3677 if (!fn)
3678 return false;
3680 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3681 if (orig)
3682 return false;
3684 /* We could expand this as
3685 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3686 or to
3687 memcpy (str, fmt_with_nul_at_cstm1, cst);
3688 but in the former case that might increase code size
3689 and in the latter case grow .rodata section too much.
3690 So punt for now. */
3691 size_t len = strlen (fmt_str);
3692 if (len >= destlen)
3693 return false;
3695 gimple_seq stmts = NULL;
3696 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3697 gimple_seq_add_stmt_without_update (&stmts, repl);
3698 if (tree lhs = gimple_call_lhs (stmt))
3700 repl = gimple_build_assign (lhs,
3701 build_int_cst (TREE_TYPE (lhs), len));
3702 gimple_seq_add_stmt_without_update (&stmts, repl);
3703 gsi_replace_with_seq_vops (gsi, stmts);
3704 /* gsi now points at the assignment to the lhs, get a
3705 stmt iterator to the memcpy call.
3706 ??? We can't use gsi_for_stmt as that doesn't work when the
3707 CFG isn't built yet. */
3708 gimple_stmt_iterator gsi2 = *gsi;
3709 gsi_prev (&gsi2);
3710 fold_stmt (&gsi2);
3712 else
3714 gsi_replace_with_seq_vops (gsi, stmts);
3715 fold_stmt (gsi);
3717 return true;
3720 /* If the format is "%s", use strcpy if the result isn't used. */
3721 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3723 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3724 if (!fn)
3725 return false;
3727 /* Don't crash on snprintf (str1, cst, "%s"). */
3728 if (!orig)
3729 return false;
3731 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3732 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3733 return false;
3735 /* We could expand this as
3736 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3737 or to
3738 memcpy (str1, str2_with_nul_at_cstm1, cst);
3739 but in the former case that might increase code size
3740 and in the latter case grow .rodata section too much.
3741 So punt for now. */
3742 if (compare_tree_int (orig_len, destlen) >= 0)
3743 return false;
3745 /* Convert snprintf (str1, cst, "%s", str2) into
3746 strcpy (str1, str2) if strlen (str2) < cst. */
3747 gimple_seq stmts = NULL;
3748 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3749 gimple_seq_add_stmt_without_update (&stmts, repl);
3750 if (tree lhs = gimple_call_lhs (stmt))
3752 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3753 TREE_TYPE (orig_len)))
3754 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3755 repl = gimple_build_assign (lhs, orig_len);
3756 gimple_seq_add_stmt_without_update (&stmts, repl);
3757 gsi_replace_with_seq_vops (gsi, stmts);
3758 /* gsi now points at the assignment to the lhs, get a
3759 stmt iterator to the memcpy call.
3760 ??? We can't use gsi_for_stmt as that doesn't work when the
3761 CFG isn't built yet. */
3762 gimple_stmt_iterator gsi2 = *gsi;
3763 gsi_prev (&gsi2);
3764 fold_stmt (&gsi2);
3766 else
3768 gsi_replace_with_seq_vops (gsi, stmts);
3769 fold_stmt (gsi);
3771 return true;
3773 return false;
3776 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3777 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3778 more than 3 arguments, and ARG may be null in the 2-argument case.
3780 Return NULL_TREE if no simplification was possible, otherwise return the
3781 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3782 code of the function to be simplified. */
3784 static bool
3785 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3786 tree fp, tree fmt, tree arg,
3787 enum built_in_function fcode)
3789 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3790 tree fn_fputc, fn_fputs;
3791 const char *fmt_str = NULL;
3793 /* If the return value is used, don't do the transformation. */
3794 if (gimple_call_lhs (stmt) != NULL_TREE)
3795 return false;
3797 /* Check whether the format is a literal string constant. */
3798 fmt_str = c_getstr (fmt);
3799 if (fmt_str == NULL)
3800 return false;
3802 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3804 /* If we're using an unlocked function, assume the other
3805 unlocked functions exist explicitly. */
3806 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3807 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3809 else
3811 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3812 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3815 if (!init_target_chars ())
3816 return false;
3818 /* If the format doesn't contain % args or %%, use strcpy. */
3819 if (strchr (fmt_str, target_percent) == NULL)
3821 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3822 && arg)
3823 return false;
3825 /* If the format specifier was "", fprintf does nothing. */
3826 if (fmt_str[0] == '\0')
3828 replace_call_with_value (gsi, NULL_TREE);
3829 return true;
3832 /* When "string" doesn't contain %, replace all cases of
3833 fprintf (fp, string) with fputs (string, fp). The fputs
3834 builtin will take care of special cases like length == 1. */
3835 if (fn_fputs)
3837 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3838 replace_call_with_call_and_fold (gsi, repl);
3839 return true;
3843 /* The other optimizations can be done only on the non-va_list variants. */
3844 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3845 return false;
3847 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3848 else if (strcmp (fmt_str, target_percent_s) == 0)
3850 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3851 return false;
3852 if (fn_fputs)
3854 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3855 replace_call_with_call_and_fold (gsi, repl);
3856 return true;
3860 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3861 else if (strcmp (fmt_str, target_percent_c) == 0)
3863 if (!arg
3864 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3865 return false;
3866 if (fn_fputc)
3868 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3869 replace_call_with_call_and_fold (gsi, repl);
3870 return true;
3874 return false;
3877 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3878 FMT and ARG are the arguments to the call; we don't fold cases with
3879 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3881 Return NULL_TREE if no simplification was possible, otherwise return the
3882 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3883 code of the function to be simplified. */
3885 static bool
3886 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3887 tree arg, enum built_in_function fcode)
3889 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3890 tree fn_putchar, fn_puts, newarg;
3891 const char *fmt_str = NULL;
3893 /* If the return value is used, don't do the transformation. */
3894 if (gimple_call_lhs (stmt) != NULL_TREE)
3895 return false;
3897 /* Check whether the format is a literal string constant. */
3898 fmt_str = c_getstr (fmt);
3899 if (fmt_str == NULL)
3900 return false;
3902 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3904 /* If we're using an unlocked function, assume the other
3905 unlocked functions exist explicitly. */
3906 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3907 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3909 else
3911 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3912 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3915 if (!init_target_chars ())
3916 return false;
3918 if (strcmp (fmt_str, target_percent_s) == 0
3919 || strchr (fmt_str, target_percent) == NULL)
3921 const char *str;
3923 if (strcmp (fmt_str, target_percent_s) == 0)
3925 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3926 return false;
3928 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3929 return false;
3931 str = c_getstr (arg);
3932 if (str == NULL)
3933 return false;
3935 else
3937 /* The format specifier doesn't contain any '%' characters. */
3938 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3939 && arg)
3940 return false;
3941 str = fmt_str;
3944 /* If the string was "", printf does nothing. */
3945 if (str[0] == '\0')
3947 replace_call_with_value (gsi, NULL_TREE);
3948 return true;
3951 /* If the string has length of 1, call putchar. */
3952 if (str[1] == '\0')
3954 /* Given printf("c"), (where c is any one character,)
3955 convert "c"[0] to an int and pass that to the replacement
3956 function. */
3957 newarg = build_int_cst (integer_type_node, str[0]);
3958 if (fn_putchar)
3960 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3961 replace_call_with_call_and_fold (gsi, repl);
3962 return true;
3965 else
3967 /* If the string was "string\n", call puts("string"). */
3968 size_t len = strlen (str);
3969 if ((unsigned char)str[len - 1] == target_newline
3970 && (size_t) (int) len == len
3971 && (int) len > 0)
3973 char *newstr;
3975 /* Create a NUL-terminated string that's one char shorter
3976 than the original, stripping off the trailing '\n'. */
3977 newstr = xstrdup (str);
3978 newstr[len - 1] = '\0';
3979 newarg = build_string_literal (len, newstr);
3980 free (newstr);
3981 if (fn_puts)
3983 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3984 replace_call_with_call_and_fold (gsi, repl);
3985 return true;
3988 else
3989 /* We'd like to arrange to call fputs(string,stdout) here,
3990 but we need stdout and don't have a way to get it yet. */
3991 return false;
3995 /* The other optimizations can be done only on the non-va_list variants. */
3996 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3997 return false;
3999 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
4000 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
4002 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
4003 return false;
4004 if (fn_puts)
4006 gcall *repl = gimple_build_call (fn_puts, 1, arg);
4007 replace_call_with_call_and_fold (gsi, repl);
4008 return true;
4012 /* If the format specifier was "%c", call __builtin_putchar(arg). */
4013 else if (strcmp (fmt_str, target_percent_c) == 0)
4015 if (!arg || ! useless_type_conversion_p (integer_type_node,
4016 TREE_TYPE (arg)))
4017 return false;
4018 if (fn_putchar)
4020 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
4021 replace_call_with_call_and_fold (gsi, repl);
4022 return true;
4026 return false;
4031 /* Fold a call to __builtin_strlen with known length LEN. */
4033 static bool
4034 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
4036 gimple *stmt = gsi_stmt (*gsi);
4037 tree arg = gimple_call_arg (stmt, 0);
4039 wide_int minlen;
4040 wide_int maxlen;
4042 c_strlen_data lendata = { };
4043 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
4044 && !lendata.decl
4045 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
4046 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
4048 /* The range of lengths refers to either a single constant
4049 string or to the longest and shortest constant string
4050 referenced by the argument of the strlen() call, or to
4051 the strings that can possibly be stored in the arrays
4052 the argument refers to. */
4053 minlen = wi::to_wide (lendata.minlen);
4054 maxlen = wi::to_wide (lendata.maxlen);
4056 else
4058 unsigned prec = TYPE_PRECISION (sizetype);
4060 minlen = wi::shwi (0, prec);
4061 maxlen = wi::to_wide (max_object_size (), prec) - 2;
4064 if (minlen == maxlen)
4066 /* Fold the strlen call to a constant. */
4067 tree type = TREE_TYPE (lendata.minlen);
4068 tree len = force_gimple_operand_gsi (gsi,
4069 wide_int_to_tree (type, minlen),
4070 true, NULL, true, GSI_SAME_STMT);
4071 replace_call_with_value (gsi, len);
4072 return true;
4075 /* Set the strlen() range to [0, MAXLEN]. */
4076 if (tree lhs = gimple_call_lhs (stmt))
4077 set_strlen_range (lhs, minlen, maxlen);
4079 return false;
4082 /* Fold a call to __builtin_acc_on_device. */
4084 static bool
4085 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
4087 /* Defer folding until we know which compiler we're in. */
4088 if (symtab->state != EXPANSION)
4089 return false;
4091 unsigned val_host = GOMP_DEVICE_HOST;
4092 unsigned val_dev = GOMP_DEVICE_NONE;
4094 #ifdef ACCEL_COMPILER
4095 val_host = GOMP_DEVICE_NOT_HOST;
4096 val_dev = ACCEL_COMPILER_acc_device;
4097 #endif
4099 location_t loc = gimple_location (gsi_stmt (*gsi));
4101 tree host_eq = make_ssa_name (boolean_type_node);
4102 gimple *host_ass = gimple_build_assign
4103 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
4104 gimple_set_location (host_ass, loc);
4105 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
4107 tree dev_eq = make_ssa_name (boolean_type_node);
4108 gimple *dev_ass = gimple_build_assign
4109 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
4110 gimple_set_location (dev_ass, loc);
4111 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
4113 tree result = make_ssa_name (boolean_type_node);
4114 gimple *result_ass = gimple_build_assign
4115 (result, BIT_IOR_EXPR, host_eq, dev_eq);
4116 gimple_set_location (result_ass, loc);
4117 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
4119 replace_call_with_value (gsi, result);
4121 return true;
4124 /* Fold realloc (0, n) -> malloc (n). */
4126 static bool
4127 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
4129 gimple *stmt = gsi_stmt (*gsi);
4130 tree arg = gimple_call_arg (stmt, 0);
4131 tree size = gimple_call_arg (stmt, 1);
4133 if (operand_equal_p (arg, null_pointer_node, 0))
4135 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
4136 if (fn_malloc)
4138 gcall *repl = gimple_build_call (fn_malloc, 1, size);
4139 replace_call_with_call_and_fold (gsi, repl);
4140 return true;
4143 return false;
4146 /* Number of bytes into which any type but aggregate or vector types
4147 should fit. */
4148 static constexpr size_t clear_padding_unit
4149 = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
4150 /* Buffer size on which __builtin_clear_padding folding code works. */
4151 static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
4153 /* Data passed through __builtin_clear_padding folding. */
4154 struct clear_padding_struct {
4155 location_t loc;
4156 /* 0 during __builtin_clear_padding folding, nonzero during
4157 clear_type_padding_in_mask. In that case, instead of clearing the
4158 non-padding bits in union_ptr array clear the padding bits in there. */
4159 bool clear_in_mask;
4160 tree base;
4161 tree alias_type;
4162 gimple_stmt_iterator *gsi;
4163 /* Alignment of buf->base + 0. */
4164 unsigned align;
4165 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4166 HOST_WIDE_INT off;
4167 /* Number of padding bytes before buf->off that don't have padding clear
4168 code emitted yet. */
4169 HOST_WIDE_INT padding_bytes;
4170 /* The size of the whole object. Never emit code to touch
4171 buf->base + buf->sz or following bytes. */
4172 HOST_WIDE_INT sz;
4173 /* Number of bytes recorded in buf->buf. */
4174 size_t size;
4175 /* When inside union, instead of emitting code we and bits inside of
4176 the union_ptr array. */
4177 unsigned char *union_ptr;
4178 /* Set bits mean padding bits that need to be cleared by the builtin. */
4179 unsigned char buf[clear_padding_buf_size + clear_padding_unit];
4182 /* Emit code to clear padding requested in BUF->buf - set bits
4183 in there stand for padding that should be cleared. FULL is true
4184 if everything from the buffer should be flushed, otherwise
4185 it can leave up to 2 * clear_padding_unit bytes for further
4186 processing. */
4188 static void
4189 clear_padding_flush (clear_padding_struct *buf, bool full)
4191 gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
4192 if (!full && buf->size < 2 * clear_padding_unit)
4193 return;
4194 gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
4195 size_t end = buf->size;
4196 if (!full)
4197 end = ((end - clear_padding_unit - 1) / clear_padding_unit
4198 * clear_padding_unit);
4199 size_t padding_bytes = buf->padding_bytes;
4200 if (buf->union_ptr)
4202 if (buf->clear_in_mask)
4204 /* During clear_type_padding_in_mask, clear the padding
4205 bits set in buf->buf in the buf->union_ptr mask. */
4206 for (size_t i = 0; i < end; i++)
4208 if (buf->buf[i] == (unsigned char) ~0)
4209 padding_bytes++;
4210 else
4212 memset (&buf->union_ptr[buf->off + i - padding_bytes],
4213 0, padding_bytes);
4214 padding_bytes = 0;
4215 buf->union_ptr[buf->off + i] &= ~buf->buf[i];
4218 if (full)
4220 memset (&buf->union_ptr[buf->off + end - padding_bytes],
4221 0, padding_bytes);
4222 buf->off = 0;
4223 buf->size = 0;
4224 buf->padding_bytes = 0;
4226 else
4228 memmove (buf->buf, buf->buf + end, buf->size - end);
4229 buf->off += end;
4230 buf->size -= end;
4231 buf->padding_bytes = padding_bytes;
4233 return;
4235 /* Inside of a union, instead of emitting any code, instead
4236 clear all bits in the union_ptr buffer that are clear
4237 in buf. Whole padding bytes don't clear anything. */
4238 for (size_t i = 0; i < end; i++)
4240 if (buf->buf[i] == (unsigned char) ~0)
4241 padding_bytes++;
4242 else
4244 padding_bytes = 0;
4245 buf->union_ptr[buf->off + i] &= buf->buf[i];
4248 if (full)
4250 buf->off = 0;
4251 buf->size = 0;
4252 buf->padding_bytes = 0;
4254 else
4256 memmove (buf->buf, buf->buf + end, buf->size - end);
4257 buf->off += end;
4258 buf->size -= end;
4259 buf->padding_bytes = padding_bytes;
4261 return;
4263 size_t wordsize = UNITS_PER_WORD;
4264 for (size_t i = 0; i < end; i += wordsize)
4266 size_t nonzero_first = wordsize;
4267 size_t nonzero_last = 0;
4268 size_t zero_first = wordsize;
4269 size_t zero_last = 0;
4270 bool all_ones = true, bytes_only = true;
4271 if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4272 > (unsigned HOST_WIDE_INT) buf->sz)
4274 gcc_assert (wordsize > 1);
4275 wordsize /= 2;
4276 i -= wordsize;
4277 continue;
4279 for (size_t j = i; j < i + wordsize && j < end; j++)
4281 if (buf->buf[j])
4283 if (nonzero_first == wordsize)
4285 nonzero_first = j - i;
4286 nonzero_last = j - i;
4288 if (nonzero_last != j - i)
4289 all_ones = false;
4290 nonzero_last = j + 1 - i;
4292 else
4294 if (zero_first == wordsize)
4295 zero_first = j - i;
4296 zero_last = j + 1 - i;
4298 if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4300 all_ones = false;
4301 bytes_only = false;
4304 size_t padding_end = i;
4305 if (padding_bytes)
4307 if (nonzero_first == 0
4308 && nonzero_last == wordsize
4309 && all_ones)
4311 /* All bits are padding and we had some padding
4312 before too. Just extend it. */
4313 padding_bytes += wordsize;
4314 continue;
4316 if (all_ones && nonzero_first == 0)
4318 padding_bytes += nonzero_last;
4319 padding_end += nonzero_last;
4320 nonzero_first = wordsize;
4321 nonzero_last = 0;
4323 else if (bytes_only && nonzero_first == 0)
4325 gcc_assert (zero_first && zero_first != wordsize);
4326 padding_bytes += zero_first;
4327 padding_end += zero_first;
4329 tree atype, src;
4330 if (padding_bytes == 1)
4332 atype = char_type_node;
4333 src = build_zero_cst (char_type_node);
4335 else
4337 atype = build_array_type_nelts (char_type_node, padding_bytes);
4338 src = build_constructor (atype, NULL);
4340 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4341 build_int_cst (buf->alias_type,
4342 buf->off + padding_end
4343 - padding_bytes));
4344 gimple *g = gimple_build_assign (dst, src);
4345 gimple_set_location (g, buf->loc);
4346 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4347 padding_bytes = 0;
4348 buf->padding_bytes = 0;
4350 if (nonzero_first == wordsize)
4351 /* All bits in a word are 0, there are no padding bits. */
4352 continue;
4353 if (all_ones && nonzero_last == wordsize)
4355 /* All bits between nonzero_first and end of word are padding
4356 bits, start counting padding_bytes. */
4357 padding_bytes = nonzero_last - nonzero_first;
4358 continue;
4360 if (bytes_only)
4362 /* If bitfields aren't involved in this word, prefer storing
4363 individual bytes or groups of them over performing a RMW
4364 operation on the whole word. */
4365 gcc_assert (i + zero_last <= end);
4366 for (size_t j = padding_end; j < i + zero_last; j++)
4368 if (buf->buf[j])
4370 size_t k;
4371 for (k = j; k < i + zero_last; k++)
4372 if (buf->buf[k] == 0)
4373 break;
4374 HOST_WIDE_INT off = buf->off + j;
4375 tree atype, src;
4376 if (k - j == 1)
4378 atype = char_type_node;
4379 src = build_zero_cst (char_type_node);
4381 else
4383 atype = build_array_type_nelts (char_type_node, k - j);
4384 src = build_constructor (atype, NULL);
4386 tree dst = build2_loc (buf->loc, MEM_REF, atype,
4387 buf->base,
4388 build_int_cst (buf->alias_type, off));
4389 gimple *g = gimple_build_assign (dst, src);
4390 gimple_set_location (g, buf->loc);
4391 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4392 j = k;
4395 if (nonzero_last == wordsize)
4396 padding_bytes = nonzero_last - zero_last;
4397 continue;
4399 for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4401 if (nonzero_last - nonzero_first <= eltsz
4402 && ((nonzero_first & ~(eltsz - 1))
4403 == ((nonzero_last - 1) & ~(eltsz - 1))))
4405 tree type;
4406 if (eltsz == 1)
4407 type = char_type_node;
4408 else
4409 type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4411 size_t start = nonzero_first & ~(eltsz - 1);
4412 HOST_WIDE_INT off = buf->off + i + start;
4413 tree atype = type;
4414 if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4415 atype = build_aligned_type (type, buf->align);
4416 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4417 build_int_cst (buf->alias_type, off));
4418 tree src;
4419 gimple *g;
4420 if (all_ones
4421 && nonzero_first == start
4422 && nonzero_last == start + eltsz)
4423 src = build_zero_cst (type);
4424 else
4426 src = make_ssa_name (type);
4427 g = gimple_build_assign (src, unshare_expr (dst));
4428 gimple_set_location (g, buf->loc);
4429 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4430 tree mask = native_interpret_expr (type,
4431 buf->buf + i + start,
4432 eltsz);
4433 gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4434 mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4435 tree src_masked = make_ssa_name (type);
4436 g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4437 src, mask);
4438 gimple_set_location (g, buf->loc);
4439 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4440 src = src_masked;
4442 g = gimple_build_assign (dst, src);
4443 gimple_set_location (g, buf->loc);
4444 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4445 break;
4449 if (full)
4451 if (padding_bytes)
4453 tree atype, src;
4454 if (padding_bytes == 1)
4456 atype = char_type_node;
4457 src = build_zero_cst (char_type_node);
4459 else
4461 atype = build_array_type_nelts (char_type_node, padding_bytes);
4462 src = build_constructor (atype, NULL);
4464 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4465 build_int_cst (buf->alias_type,
4466 buf->off + end
4467 - padding_bytes));
4468 gimple *g = gimple_build_assign (dst, src);
4469 gimple_set_location (g, buf->loc);
4470 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4472 size_t end_rem = end % UNITS_PER_WORD;
4473 buf->off += end - end_rem;
4474 buf->size = end_rem;
4475 memset (buf->buf, 0, buf->size);
4476 buf->padding_bytes = 0;
4478 else
4480 memmove (buf->buf, buf->buf + end, buf->size - end);
4481 buf->off += end;
4482 buf->size -= end;
4483 buf->padding_bytes = padding_bytes;
4487 /* Append PADDING_BYTES padding bytes. */
4489 static void
4490 clear_padding_add_padding (clear_padding_struct *buf,
4491 HOST_WIDE_INT padding_bytes)
4493 if (padding_bytes == 0)
4494 return;
4495 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4496 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4497 clear_padding_flush (buf, false);
4498 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4499 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4501 memset (buf->buf + buf->size, ~0, clear_padding_buf_size - buf->size);
4502 padding_bytes -= clear_padding_buf_size - buf->size;
4503 buf->size = clear_padding_buf_size;
4504 clear_padding_flush (buf, false);
4505 gcc_assert (buf->padding_bytes);
4506 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4507 is guaranteed to be all ones. */
4508 padding_bytes += buf->size;
4509 buf->size = padding_bytes % UNITS_PER_WORD;
4510 memset (buf->buf, ~0, buf->size);
4511 buf->off += padding_bytes - buf->size;
4512 buf->padding_bytes += padding_bytes - buf->size;
4514 else
4516 memset (buf->buf + buf->size, ~0, padding_bytes);
4517 buf->size += padding_bytes;
4521 static void clear_padding_type (clear_padding_struct *, tree, HOST_WIDE_INT);
4523 /* Clear padding bits of union type TYPE. */
4525 static void
4526 clear_padding_union (clear_padding_struct *buf, tree type, HOST_WIDE_INT sz)
4528 clear_padding_struct *union_buf;
4529 HOST_WIDE_INT start_off = 0, next_off = 0;
4530 size_t start_size = 0;
4531 if (buf->union_ptr)
4533 start_off = buf->off + buf->size;
4534 next_off = start_off + sz;
4535 start_size = start_off % UNITS_PER_WORD;
4536 start_off -= start_size;
4537 clear_padding_flush (buf, true);
4538 union_buf = buf;
4540 else
4542 if (sz + buf->size > clear_padding_buf_size)
4543 clear_padding_flush (buf, false);
4544 union_buf = XALLOCA (clear_padding_struct);
4545 union_buf->loc = buf->loc;
4546 union_buf->clear_in_mask = buf->clear_in_mask;
4547 union_buf->base = NULL_TREE;
4548 union_buf->alias_type = NULL_TREE;
4549 union_buf->gsi = NULL;
4550 union_buf->align = 0;
4551 union_buf->off = 0;
4552 union_buf->padding_bytes = 0;
4553 union_buf->sz = sz;
4554 union_buf->size = 0;
4555 if (sz + buf->size <= clear_padding_buf_size)
4556 union_buf->union_ptr = buf->buf + buf->size;
4557 else
4558 union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4559 memset (union_buf->union_ptr, ~0, sz);
4562 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4563 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4565 if (DECL_SIZE_UNIT (field) == NULL_TREE)
4567 if (TREE_TYPE (field) == error_mark_node)
4568 continue;
4569 gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4570 && !COMPLETE_TYPE_P (TREE_TYPE (field)));
4571 if (!buf->clear_in_mask)
4572 error_at (buf->loc, "flexible array member %qD does not have "
4573 "well defined padding bits for %qs",
4574 field, "__builtin_clear_padding");
4575 continue;
4577 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4578 gcc_assert (union_buf->size == 0);
4579 union_buf->off = start_off;
4580 union_buf->size = start_size;
4581 memset (union_buf->buf, ~0, start_size);
4582 clear_padding_type (union_buf, TREE_TYPE (field), fldsz);
4583 clear_padding_add_padding (union_buf, sz - fldsz);
4584 clear_padding_flush (union_buf, true);
4587 if (buf == union_buf)
4589 buf->off = next_off;
4590 buf->size = next_off % UNITS_PER_WORD;
4591 buf->off -= buf->size;
4592 memset (buf->buf, ~0, buf->size);
4594 else if (sz + buf->size <= clear_padding_buf_size)
4595 buf->size += sz;
4596 else
4598 unsigned char *union_ptr = union_buf->union_ptr;
4599 while (sz)
4601 clear_padding_flush (buf, false);
4602 HOST_WIDE_INT this_sz
4603 = MIN ((unsigned HOST_WIDE_INT) sz,
4604 clear_padding_buf_size - buf->size);
4605 memcpy (buf->buf + buf->size, union_ptr, this_sz);
4606 buf->size += this_sz;
4607 union_ptr += this_sz;
4608 sz -= this_sz;
4610 XDELETE (union_buf->union_ptr);
4614 /* The only known floating point formats with padding bits are the
4615 IEEE extended ones. */
4617 static bool
4618 clear_padding_real_needs_padding_p (tree type)
4620 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4621 return (fmt->b == 2
4622 && fmt->signbit_ro == fmt->signbit_rw
4623 && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4626 /* Return true if TYPE might contain any padding bits. */
4628 static bool
4629 clear_padding_type_may_have_padding_p (tree type)
4631 switch (TREE_CODE (type))
4633 case RECORD_TYPE:
4634 case UNION_TYPE:
4635 return true;
4636 case ARRAY_TYPE:
4637 case COMPLEX_TYPE:
4638 case VECTOR_TYPE:
4639 return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4640 case REAL_TYPE:
4641 return clear_padding_real_needs_padding_p (type);
4642 default:
4643 return false;
4647 /* Emit a runtime loop:
4648 for (; buf.base != end; buf.base += sz)
4649 __builtin_clear_padding (buf.base); */
4651 static void
4652 clear_padding_emit_loop (clear_padding_struct *buf, tree type, tree end)
4654 tree l1 = create_artificial_label (buf->loc);
4655 tree l2 = create_artificial_label (buf->loc);
4656 tree l3 = create_artificial_label (buf->loc);
4657 gimple *g = gimple_build_goto (l2);
4658 gimple_set_location (g, buf->loc);
4659 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4660 g = gimple_build_label (l1);
4661 gimple_set_location (g, buf->loc);
4662 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4663 clear_padding_type (buf, type, buf->sz);
4664 clear_padding_flush (buf, true);
4665 g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4666 size_int (buf->sz));
4667 gimple_set_location (g, buf->loc);
4668 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4669 g = gimple_build_label (l2);
4670 gimple_set_location (g, buf->loc);
4671 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4672 g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4673 gimple_set_location (g, buf->loc);
4674 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4675 g = gimple_build_label (l3);
4676 gimple_set_location (g, buf->loc);
4677 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4680 /* Clear padding bits for TYPE. Called recursively from
4681 gimple_fold_builtin_clear_padding. */
4683 static void
4684 clear_padding_type (clear_padding_struct *buf, tree type, HOST_WIDE_INT sz)
4686 switch (TREE_CODE (type))
4688 case RECORD_TYPE:
4689 HOST_WIDE_INT cur_pos;
4690 cur_pos = 0;
4691 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4692 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4694 tree ftype = TREE_TYPE (field);
4695 if (DECL_BIT_FIELD (field))
4697 HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
4698 if (fldsz == 0)
4699 continue;
4700 HOST_WIDE_INT pos = int_byte_position (field);
4701 if (pos >= sz)
4702 continue;
4703 HOST_WIDE_INT bpos
4704 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4705 bpos %= BITS_PER_UNIT;
4706 HOST_WIDE_INT end
4707 = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4708 if (pos + end > cur_pos)
4710 clear_padding_add_padding (buf, pos + end - cur_pos);
4711 cur_pos = pos + end;
4713 gcc_assert (cur_pos > pos
4714 && ((unsigned HOST_WIDE_INT) buf->size
4715 >= (unsigned HOST_WIDE_INT) cur_pos - pos));
4716 unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4717 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4718 sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4719 " in %qs", "__builtin_clear_padding");
4720 else if (BYTES_BIG_ENDIAN)
4722 /* Big endian. */
4723 if (bpos + fldsz <= BITS_PER_UNIT)
4724 *p &= ~(((1 << fldsz) - 1)
4725 << (BITS_PER_UNIT - bpos - fldsz));
4726 else
4728 if (bpos)
4730 *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4731 p++;
4732 fldsz -= BITS_PER_UNIT - bpos;
4734 memset (p, 0, fldsz / BITS_PER_UNIT);
4735 p += fldsz / BITS_PER_UNIT;
4736 fldsz %= BITS_PER_UNIT;
4737 if (fldsz)
4738 *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4741 else
4743 /* Little endian. */
4744 if (bpos + fldsz <= BITS_PER_UNIT)
4745 *p &= ~(((1 << fldsz) - 1) << bpos);
4746 else
4748 if (bpos)
4750 *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4751 p++;
4752 fldsz -= BITS_PER_UNIT - bpos;
4754 memset (p, 0, fldsz / BITS_PER_UNIT);
4755 p += fldsz / BITS_PER_UNIT;
4756 fldsz %= BITS_PER_UNIT;
4757 if (fldsz)
4758 *p &= ~((1 << fldsz) - 1);
4762 else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4764 if (ftype == error_mark_node)
4765 continue;
4766 gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4767 && !COMPLETE_TYPE_P (ftype));
4768 if (!buf->clear_in_mask)
4769 error_at (buf->loc, "flexible array member %qD does not "
4770 "have well defined padding bits for %qs",
4771 field, "__builtin_clear_padding");
4773 else if (is_empty_type (TREE_TYPE (field)))
4774 continue;
4775 else
4777 HOST_WIDE_INT pos = int_byte_position (field);
4778 if (pos >= sz)
4779 continue;
4780 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4781 gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4782 clear_padding_add_padding (buf, pos - cur_pos);
4783 cur_pos = pos;
4784 clear_padding_type (buf, TREE_TYPE (field), fldsz);
4785 cur_pos += fldsz;
4788 gcc_assert (sz >= cur_pos);
4789 clear_padding_add_padding (buf, sz - cur_pos);
4790 break;
4791 case ARRAY_TYPE:
4792 HOST_WIDE_INT nelts, fldsz;
4793 fldsz = int_size_in_bytes (TREE_TYPE (type));
4794 if (fldsz == 0)
4795 break;
4796 nelts = sz / fldsz;
4797 if (nelts > 1
4798 && sz > 8 * UNITS_PER_WORD
4799 && buf->union_ptr == NULL
4800 && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4802 /* For sufficiently large array of more than one elements,
4803 emit a runtime loop to keep code size manageable. */
4804 tree base = buf->base;
4805 unsigned int prev_align = buf->align;
4806 HOST_WIDE_INT off = buf->off + buf->size;
4807 HOST_WIDE_INT prev_sz = buf->sz;
4808 clear_padding_flush (buf, true);
4809 tree elttype = TREE_TYPE (type);
4810 buf->base = create_tmp_var (build_pointer_type (elttype));
4811 tree end = make_ssa_name (TREE_TYPE (buf->base));
4812 gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4813 base, size_int (off));
4814 gimple_set_location (g, buf->loc);
4815 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4816 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4817 size_int (sz));
4818 gimple_set_location (g, buf->loc);
4819 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4820 buf->sz = fldsz;
4821 buf->align = TYPE_ALIGN (elttype);
4822 buf->off = 0;
4823 buf->size = 0;
4824 clear_padding_emit_loop (buf, elttype, end);
4825 buf->base = base;
4826 buf->sz = prev_sz;
4827 buf->align = prev_align;
4828 buf->size = off % UNITS_PER_WORD;
4829 buf->off = off - buf->size;
4830 memset (buf->buf, 0, buf->size);
4831 break;
4833 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4834 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4835 break;
4836 case UNION_TYPE:
4837 clear_padding_union (buf, type, sz);
4838 break;
4839 case REAL_TYPE:
4840 gcc_assert ((size_t) sz <= clear_padding_unit);
4841 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4842 clear_padding_flush (buf, false);
4843 if (clear_padding_real_needs_padding_p (type))
4845 /* Use native_interpret_expr + native_encode_expr to figure out
4846 which bits are padding. */
4847 memset (buf->buf + buf->size, ~0, sz);
4848 tree cst = native_interpret_expr (type, buf->buf + buf->size, sz);
4849 gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
4850 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4851 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4852 for (size_t i = 0; i < (size_t) sz; i++)
4853 buf->buf[buf->size + i] ^= ~0;
4855 else
4856 memset (buf->buf + buf->size, 0, sz);
4857 buf->size += sz;
4858 break;
4859 case COMPLEX_TYPE:
4860 fldsz = int_size_in_bytes (TREE_TYPE (type));
4861 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4862 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4863 break;
4864 case VECTOR_TYPE:
4865 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
4866 fldsz = int_size_in_bytes (TREE_TYPE (type));
4867 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4868 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4869 break;
4870 case NULLPTR_TYPE:
4871 gcc_assert ((size_t) sz <= clear_padding_unit);
4872 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4873 clear_padding_flush (buf, false);
4874 memset (buf->buf + buf->size, ~0, sz);
4875 buf->size += sz;
4876 break;
4877 default:
4878 gcc_assert ((size_t) sz <= clear_padding_unit);
4879 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4880 clear_padding_flush (buf, false);
4881 memset (buf->buf + buf->size, 0, sz);
4882 buf->size += sz;
4883 break;
4887 /* Clear padding bits of TYPE in MASK. */
4889 void
4890 clear_type_padding_in_mask (tree type, unsigned char *mask)
4892 clear_padding_struct buf;
4893 buf.loc = UNKNOWN_LOCATION;
4894 buf.clear_in_mask = true;
4895 buf.base = NULL_TREE;
4896 buf.alias_type = NULL_TREE;
4897 buf.gsi = NULL;
4898 buf.align = 0;
4899 buf.off = 0;
4900 buf.padding_bytes = 0;
4901 buf.sz = int_size_in_bytes (type);
4902 buf.size = 0;
4903 buf.union_ptr = mask;
4904 clear_padding_type (&buf, type, buf.sz);
4905 clear_padding_flush (&buf, true);
4908 /* Fold __builtin_clear_padding builtin. */
4910 static bool
4911 gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
4913 gimple *stmt = gsi_stmt (*gsi);
4914 gcc_assert (gimple_call_num_args (stmt) == 2);
4915 tree ptr = gimple_call_arg (stmt, 0);
4916 tree typearg = gimple_call_arg (stmt, 1);
4917 tree type = TREE_TYPE (TREE_TYPE (typearg));
4918 location_t loc = gimple_location (stmt);
4919 clear_padding_struct buf;
4920 gimple_stmt_iterator gsiprev = *gsi;
4921 /* This should be folded during the lower pass. */
4922 gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
4923 gcc_assert (COMPLETE_TYPE_P (type));
4924 gsi_prev (&gsiprev);
4926 buf.loc = loc;
4927 buf.clear_in_mask = false;
4928 buf.base = ptr;
4929 buf.alias_type = NULL_TREE;
4930 buf.gsi = gsi;
4931 buf.align = get_pointer_alignment (ptr);
4932 unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
4933 buf.align = MAX (buf.align, talign);
4934 buf.off = 0;
4935 buf.padding_bytes = 0;
4936 buf.size = 0;
4937 buf.sz = int_size_in_bytes (type);
4938 buf.union_ptr = NULL;
4939 if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
4940 sorry_at (loc, "%s not supported for variable length aggregates",
4941 "__builtin_clear_padding");
4942 /* The implementation currently assumes 8-bit host and target
4943 chars which is the case for all currently supported targets
4944 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4945 else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
4946 sorry_at (loc, "%s not supported on this target",
4947 "__builtin_clear_padding");
4948 else if (!clear_padding_type_may_have_padding_p (type))
4950 else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
4952 tree sz = TYPE_SIZE_UNIT (type);
4953 tree elttype = type;
4954 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4955 while (TREE_CODE (elttype) == ARRAY_TYPE
4956 && int_size_in_bytes (elttype) < 0)
4957 elttype = TREE_TYPE (elttype);
4958 HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
4959 gcc_assert (eltsz >= 0);
4960 if (eltsz)
4962 buf.base = create_tmp_var (build_pointer_type (elttype));
4963 tree end = make_ssa_name (TREE_TYPE (buf.base));
4964 gimple *g = gimple_build_assign (buf.base, ptr);
4965 gimple_set_location (g, loc);
4966 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4967 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
4968 gimple_set_location (g, loc);
4969 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4970 buf.sz = eltsz;
4971 buf.align = TYPE_ALIGN (elttype);
4972 buf.alias_type = build_pointer_type (elttype);
4973 clear_padding_emit_loop (&buf, elttype, end);
4976 else
4978 if (!is_gimple_mem_ref_addr (buf.base))
4980 buf.base = make_ssa_name (TREE_TYPE (ptr));
4981 gimple *g = gimple_build_assign (buf.base, ptr);
4982 gimple_set_location (g, loc);
4983 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4985 buf.alias_type = build_pointer_type (type);
4986 clear_padding_type (&buf, type, buf.sz);
4987 clear_padding_flush (&buf, true);
4990 gimple_stmt_iterator gsiprev2 = *gsi;
4991 gsi_prev (&gsiprev2);
4992 if (gsi_stmt (gsiprev) == gsi_stmt (gsiprev2))
4993 gsi_replace (gsi, gimple_build_nop (), true);
4994 else
4996 gsi_remove (gsi, true);
4997 *gsi = gsiprev2;
4999 return true;
5002 /* Fold the non-target builtin at *GSI and return whether any simplification
5003 was made. */
5005 static bool
5006 gimple_fold_builtin (gimple_stmt_iterator *gsi)
5008 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
5009 tree callee = gimple_call_fndecl (stmt);
5011 /* Give up for always_inline inline builtins until they are
5012 inlined. */
5013 if (avoid_folding_inline_builtin (callee))
5014 return false;
5016 unsigned n = gimple_call_num_args (stmt);
5017 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
5018 switch (fcode)
5020 case BUILT_IN_BCMP:
5021 return gimple_fold_builtin_bcmp (gsi);
5022 case BUILT_IN_BCOPY:
5023 return gimple_fold_builtin_bcopy (gsi);
5024 case BUILT_IN_BZERO:
5025 return gimple_fold_builtin_bzero (gsi);
5027 case BUILT_IN_MEMSET:
5028 return gimple_fold_builtin_memset (gsi,
5029 gimple_call_arg (stmt, 1),
5030 gimple_call_arg (stmt, 2));
5031 case BUILT_IN_MEMCPY:
5032 case BUILT_IN_MEMPCPY:
5033 case BUILT_IN_MEMMOVE:
5034 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
5035 gimple_call_arg (stmt, 1), fcode);
5036 case BUILT_IN_SPRINTF_CHK:
5037 case BUILT_IN_VSPRINTF_CHK:
5038 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
5039 case BUILT_IN_STRCAT_CHK:
5040 return gimple_fold_builtin_strcat_chk (gsi);
5041 case BUILT_IN_STRNCAT_CHK:
5042 return gimple_fold_builtin_strncat_chk (gsi);
5043 case BUILT_IN_STRLEN:
5044 return gimple_fold_builtin_strlen (gsi);
5045 case BUILT_IN_STRCPY:
5046 return gimple_fold_builtin_strcpy (gsi,
5047 gimple_call_arg (stmt, 0),
5048 gimple_call_arg (stmt, 1));
5049 case BUILT_IN_STRNCPY:
5050 return gimple_fold_builtin_strncpy (gsi,
5051 gimple_call_arg (stmt, 0),
5052 gimple_call_arg (stmt, 1),
5053 gimple_call_arg (stmt, 2));
5054 case BUILT_IN_STRCAT:
5055 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
5056 gimple_call_arg (stmt, 1));
5057 case BUILT_IN_STRNCAT:
5058 return gimple_fold_builtin_strncat (gsi);
5059 case BUILT_IN_INDEX:
5060 case BUILT_IN_STRCHR:
5061 return gimple_fold_builtin_strchr (gsi, false);
5062 case BUILT_IN_RINDEX:
5063 case BUILT_IN_STRRCHR:
5064 return gimple_fold_builtin_strchr (gsi, true);
5065 case BUILT_IN_STRSTR:
5066 return gimple_fold_builtin_strstr (gsi);
5067 case BUILT_IN_STRCMP:
5068 case BUILT_IN_STRCMP_EQ:
5069 case BUILT_IN_STRCASECMP:
5070 case BUILT_IN_STRNCMP:
5071 case BUILT_IN_STRNCMP_EQ:
5072 case BUILT_IN_STRNCASECMP:
5073 return gimple_fold_builtin_string_compare (gsi);
5074 case BUILT_IN_MEMCHR:
5075 return gimple_fold_builtin_memchr (gsi);
5076 case BUILT_IN_FPUTS:
5077 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5078 gimple_call_arg (stmt, 1), false);
5079 case BUILT_IN_FPUTS_UNLOCKED:
5080 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5081 gimple_call_arg (stmt, 1), true);
5082 case BUILT_IN_MEMCPY_CHK:
5083 case BUILT_IN_MEMPCPY_CHK:
5084 case BUILT_IN_MEMMOVE_CHK:
5085 case BUILT_IN_MEMSET_CHK:
5086 return gimple_fold_builtin_memory_chk (gsi,
5087 gimple_call_arg (stmt, 0),
5088 gimple_call_arg (stmt, 1),
5089 gimple_call_arg (stmt, 2),
5090 gimple_call_arg (stmt, 3),
5091 fcode);
5092 case BUILT_IN_STPCPY:
5093 return gimple_fold_builtin_stpcpy (gsi);
5094 case BUILT_IN_STRCPY_CHK:
5095 case BUILT_IN_STPCPY_CHK:
5096 return gimple_fold_builtin_stxcpy_chk (gsi,
5097 gimple_call_arg (stmt, 0),
5098 gimple_call_arg (stmt, 1),
5099 gimple_call_arg (stmt, 2),
5100 fcode);
5101 case BUILT_IN_STRNCPY_CHK:
5102 case BUILT_IN_STPNCPY_CHK:
5103 return gimple_fold_builtin_stxncpy_chk (gsi,
5104 gimple_call_arg (stmt, 0),
5105 gimple_call_arg (stmt, 1),
5106 gimple_call_arg (stmt, 2),
5107 gimple_call_arg (stmt, 3),
5108 fcode);
5109 case BUILT_IN_SNPRINTF_CHK:
5110 case BUILT_IN_VSNPRINTF_CHK:
5111 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
5113 case BUILT_IN_FPRINTF:
5114 case BUILT_IN_FPRINTF_UNLOCKED:
5115 case BUILT_IN_VFPRINTF:
5116 if (n == 2 || n == 3)
5117 return gimple_fold_builtin_fprintf (gsi,
5118 gimple_call_arg (stmt, 0),
5119 gimple_call_arg (stmt, 1),
5120 n == 3
5121 ? gimple_call_arg (stmt, 2)
5122 : NULL_TREE,
5123 fcode);
5124 break;
5125 case BUILT_IN_FPRINTF_CHK:
5126 case BUILT_IN_VFPRINTF_CHK:
5127 if (n == 3 || n == 4)
5128 return gimple_fold_builtin_fprintf (gsi,
5129 gimple_call_arg (stmt, 0),
5130 gimple_call_arg (stmt, 2),
5131 n == 4
5132 ? gimple_call_arg (stmt, 3)
5133 : NULL_TREE,
5134 fcode);
5135 break;
5136 case BUILT_IN_PRINTF:
5137 case BUILT_IN_PRINTF_UNLOCKED:
5138 case BUILT_IN_VPRINTF:
5139 if (n == 1 || n == 2)
5140 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
5141 n == 2
5142 ? gimple_call_arg (stmt, 1)
5143 : NULL_TREE, fcode);
5144 break;
5145 case BUILT_IN_PRINTF_CHK:
5146 case BUILT_IN_VPRINTF_CHK:
5147 if (n == 2 || n == 3)
5148 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
5149 n == 3
5150 ? gimple_call_arg (stmt, 2)
5151 : NULL_TREE, fcode);
5152 break;
5153 case BUILT_IN_ACC_ON_DEVICE:
5154 return gimple_fold_builtin_acc_on_device (gsi,
5155 gimple_call_arg (stmt, 0));
5156 case BUILT_IN_REALLOC:
5157 return gimple_fold_builtin_realloc (gsi);
5159 case BUILT_IN_CLEAR_PADDING:
5160 return gimple_fold_builtin_clear_padding (gsi);
5162 default:;
5165 /* Try the generic builtin folder. */
5166 bool ignore = (gimple_call_lhs (stmt) == NULL);
5167 tree result = fold_call_stmt (stmt, ignore);
5168 if (result)
5170 if (ignore)
5171 STRIP_NOPS (result);
5172 else
5173 result = fold_convert (gimple_call_return_type (stmt), result);
5174 gimplify_and_update_call_from_tree (gsi, result);
5175 return true;
5178 return false;
5181 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5182 function calls to constants, where possible. */
5184 static tree
5185 fold_internal_goacc_dim (const gimple *call)
5187 int axis = oacc_get_ifn_dim_arg (call);
5188 int size = oacc_get_fn_dim_size (current_function_decl, axis);
5189 tree result = NULL_TREE;
5190 tree type = TREE_TYPE (gimple_call_lhs (call));
5192 switch (gimple_call_internal_fn (call))
5194 case IFN_GOACC_DIM_POS:
5195 /* If the size is 1, we know the answer. */
5196 if (size == 1)
5197 result = build_int_cst (type, 0);
5198 break;
5199 case IFN_GOACC_DIM_SIZE:
5200 /* If the size is not dynamic, we know the answer. */
5201 if (size)
5202 result = build_int_cst (type, size);
5203 break;
5204 default:
5205 break;
5208 return result;
5211 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5212 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5213 &var where var is only addressable because of such calls. */
5215 bool
5216 optimize_atomic_compare_exchange_p (gimple *stmt)
5218 if (gimple_call_num_args (stmt) != 6
5219 || !flag_inline_atomics
5220 || !optimize
5221 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
5222 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5223 || !gimple_vdef (stmt)
5224 || !gimple_vuse (stmt))
5225 return false;
5227 tree fndecl = gimple_call_fndecl (stmt);
5228 switch (DECL_FUNCTION_CODE (fndecl))
5230 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
5231 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
5232 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
5233 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
5234 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
5235 break;
5236 default:
5237 return false;
5240 tree expected = gimple_call_arg (stmt, 1);
5241 if (TREE_CODE (expected) != ADDR_EXPR
5242 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
5243 return false;
5245 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
5246 if (!is_gimple_reg_type (etype)
5247 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
5248 || TREE_THIS_VOLATILE (etype)
5249 || VECTOR_TYPE_P (etype)
5250 || TREE_CODE (etype) == COMPLEX_TYPE
5251 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5252 might not preserve all the bits. See PR71716. */
5253 || SCALAR_FLOAT_TYPE_P (etype)
5254 || maybe_ne (TYPE_PRECISION (etype),
5255 GET_MODE_BITSIZE (TYPE_MODE (etype))))
5256 return false;
5258 tree weak = gimple_call_arg (stmt, 3);
5259 if (!integer_zerop (weak) && !integer_onep (weak))
5260 return false;
5262 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5263 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5264 machine_mode mode = TYPE_MODE (itype);
5266 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
5267 == CODE_FOR_nothing
5268 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5269 return false;
5271 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
5272 return false;
5274 return true;
5277 /* Fold
5278 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5279 into
5280 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5281 i = IMAGPART_EXPR <t>;
5282 r = (_Bool) i;
5283 e = REALPART_EXPR <t>; */
5285 void
5286 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5288 gimple *stmt = gsi_stmt (*gsi);
5289 tree fndecl = gimple_call_fndecl (stmt);
5290 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5291 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5292 tree ctype = build_complex_type (itype);
5293 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
5294 bool throws = false;
5295 edge e = NULL;
5296 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5297 expected);
5298 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5299 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5300 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5302 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
5303 build1 (VIEW_CONVERT_EXPR, itype,
5304 gimple_assign_lhs (g)));
5305 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5307 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
5308 + int_size_in_bytes (itype);
5309 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5310 gimple_call_arg (stmt, 0),
5311 gimple_assign_lhs (g),
5312 gimple_call_arg (stmt, 2),
5313 build_int_cst (integer_type_node, flag),
5314 gimple_call_arg (stmt, 4),
5315 gimple_call_arg (stmt, 5));
5316 tree lhs = make_ssa_name (ctype);
5317 gimple_call_set_lhs (g, lhs);
5318 gimple_move_vops (g, stmt);
5319 tree oldlhs = gimple_call_lhs (stmt);
5320 if (stmt_can_throw_internal (cfun, stmt))
5322 throws = true;
5323 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
5325 gimple_call_set_nothrow (as_a <gcall *> (g),
5326 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
5327 gimple_call_set_lhs (stmt, NULL_TREE);
5328 gsi_replace (gsi, g, true);
5329 if (oldlhs)
5331 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
5332 build1 (IMAGPART_EXPR, itype, lhs));
5333 if (throws)
5335 gsi_insert_on_edge_immediate (e, g);
5336 *gsi = gsi_for_stmt (g);
5338 else
5339 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5340 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
5341 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5343 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
5344 build1 (REALPART_EXPR, itype, lhs));
5345 if (throws && oldlhs == NULL_TREE)
5347 gsi_insert_on_edge_immediate (e, g);
5348 *gsi = gsi_for_stmt (g);
5350 else
5351 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5352 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5354 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5355 VIEW_CONVERT_EXPR,
5356 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5357 gimple_assign_lhs (g)));
5358 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5360 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
5361 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5362 *gsi = gsiret;
5365 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5366 doesn't fit into TYPE. The test for overflow should be regardless of
5367 -fwrapv, and even for unsigned types. */
5369 bool
5370 arith_overflowed_p (enum tree_code code, const_tree type,
5371 const_tree arg0, const_tree arg1)
5373 widest2_int warg0 = widest2_int_cst (arg0);
5374 widest2_int warg1 = widest2_int_cst (arg1);
5375 widest2_int wres;
5376 switch (code)
5378 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
5379 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
5380 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
5381 default: gcc_unreachable ();
5383 signop sign = TYPE_SIGN (type);
5384 if (sign == UNSIGNED && wi::neg_p (wres))
5385 return true;
5386 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
5389 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5390 for the memory it references, otherwise return null. VECTYPE is the
5391 type of the memory vector. */
5393 static tree
5394 gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
5396 tree ptr = gimple_call_arg (call, 0);
5397 tree alias_align = gimple_call_arg (call, 1);
5398 tree mask = gimple_call_arg (call, 2);
5399 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
5400 return NULL_TREE;
5402 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
5403 if (TYPE_ALIGN (vectype) != align)
5404 vectype = build_aligned_type (vectype, align);
5405 tree offset = build_zero_cst (TREE_TYPE (alias_align));
5406 return fold_build2 (MEM_REF, vectype, ptr, offset);
5409 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5411 static bool
5412 gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
5414 tree lhs = gimple_call_lhs (call);
5415 if (!lhs)
5416 return false;
5418 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
5420 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5421 gimple_set_location (new_stmt, gimple_location (call));
5422 gimple_move_vops (new_stmt, call);
5423 gsi_replace (gsi, new_stmt, false);
5424 return true;
5426 return false;
5429 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5431 static bool
5432 gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
5434 tree rhs = gimple_call_arg (call, 3);
5435 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
5437 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5438 gimple_set_location (new_stmt, gimple_location (call));
5439 gimple_move_vops (new_stmt, call);
5440 gsi_replace (gsi, new_stmt, false);
5441 return true;
5443 return false;
5446 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5447 The statement may be replaced by another statement, e.g., if the call
5448 simplifies to a constant value. Return true if any changes were made.
5449 It is assumed that the operands have been previously folded. */
5451 static bool
5452 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
5454 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
5455 tree callee;
5456 bool changed = false;
5458 /* Check for virtual calls that became direct calls. */
5459 callee = gimple_call_fn (stmt);
5460 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
5462 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
5464 if (dump_file && virtual_method_call_p (callee)
5465 && !possible_polymorphic_call_target_p
5466 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
5467 (OBJ_TYPE_REF_EXPR (callee)))))
5469 fprintf (dump_file,
5470 "Type inheritance inconsistent devirtualization of ");
5471 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5472 fprintf (dump_file, " to ");
5473 print_generic_expr (dump_file, callee, TDF_SLIM);
5474 fprintf (dump_file, "\n");
5477 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
5478 changed = true;
5480 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
5482 bool final;
5483 vec <cgraph_node *>targets
5484 = possible_polymorphic_call_targets (callee, stmt, &final);
5485 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5487 tree lhs = gimple_call_lhs (stmt);
5488 if (dump_enabled_p ())
5490 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5491 "folding virtual function call to %s\n",
5492 targets.length () == 1
5493 ? targets[0]->name ()
5494 : "__builtin_unreachable");
5496 if (targets.length () == 1)
5498 tree fndecl = targets[0]->decl;
5499 gimple_call_set_fndecl (stmt, fndecl);
5500 changed = true;
5501 /* If changing the call to __cxa_pure_virtual
5502 or similar noreturn function, adjust gimple_call_fntype
5503 too. */
5504 if (gimple_call_noreturn_p (stmt)
5505 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
5506 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
5507 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5508 == void_type_node))
5509 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
5510 /* If the call becomes noreturn, remove the lhs. */
5511 if (lhs
5512 && gimple_call_noreturn_p (stmt)
5513 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
5514 || should_remove_lhs_p (lhs)))
5516 if (TREE_CODE (lhs) == SSA_NAME)
5518 tree var = create_tmp_var (TREE_TYPE (lhs));
5519 tree def = get_or_create_ssa_default_def (cfun, var);
5520 gimple *new_stmt = gimple_build_assign (lhs, def);
5521 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
5523 gimple_call_set_lhs (stmt, NULL_TREE);
5525 maybe_remove_unused_call_args (cfun, stmt);
5527 else
5529 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5530 gimple *new_stmt = gimple_build_call (fndecl, 0);
5531 gimple_set_location (new_stmt, gimple_location (stmt));
5532 /* If the call had a SSA name as lhs morph that into
5533 an uninitialized value. */
5534 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5536 tree var = create_tmp_var (TREE_TYPE (lhs));
5537 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
5538 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5539 set_ssa_default_def (cfun, var, lhs);
5541 gimple_move_vops (new_stmt, stmt);
5542 gsi_replace (gsi, new_stmt, false);
5543 return true;
5549 /* Check for indirect calls that became direct calls, and then
5550 no longer require a static chain. */
5551 if (gimple_call_chain (stmt))
5553 tree fn = gimple_call_fndecl (stmt);
5554 if (fn && !DECL_STATIC_CHAIN (fn))
5556 gimple_call_set_chain (stmt, NULL);
5557 changed = true;
5561 if (inplace)
5562 return changed;
5564 /* Check for builtins that CCP can handle using information not
5565 available in the generic fold routines. */
5566 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5568 if (gimple_fold_builtin (gsi))
5569 changed = true;
5571 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
5573 changed |= targetm.gimple_fold_builtin (gsi);
5575 else if (gimple_call_internal_p (stmt))
5577 enum tree_code subcode = ERROR_MARK;
5578 tree result = NULL_TREE;
5579 bool cplx_result = false;
5580 tree overflow = NULL_TREE;
5581 switch (gimple_call_internal_fn (stmt))
5583 case IFN_BUILTIN_EXPECT:
5584 result = fold_builtin_expect (gimple_location (stmt),
5585 gimple_call_arg (stmt, 0),
5586 gimple_call_arg (stmt, 1),
5587 gimple_call_arg (stmt, 2),
5588 NULL_TREE);
5589 break;
5590 case IFN_UBSAN_OBJECT_SIZE:
5592 tree offset = gimple_call_arg (stmt, 1);
5593 tree objsize = gimple_call_arg (stmt, 2);
5594 if (integer_all_onesp (objsize)
5595 || (TREE_CODE (offset) == INTEGER_CST
5596 && TREE_CODE (objsize) == INTEGER_CST
5597 && tree_int_cst_le (offset, objsize)))
5599 replace_call_with_value (gsi, NULL_TREE);
5600 return true;
5603 break;
5604 case IFN_UBSAN_PTR:
5605 if (integer_zerop (gimple_call_arg (stmt, 1)))
5607 replace_call_with_value (gsi, NULL_TREE);
5608 return true;
5610 break;
5611 case IFN_UBSAN_BOUNDS:
5613 tree index = gimple_call_arg (stmt, 1);
5614 tree bound = gimple_call_arg (stmt, 2);
5615 if (TREE_CODE (index) == INTEGER_CST
5616 && TREE_CODE (bound) == INTEGER_CST)
5618 index = fold_convert (TREE_TYPE (bound), index);
5619 if (TREE_CODE (index) == INTEGER_CST
5620 && tree_int_cst_le (index, bound))
5622 replace_call_with_value (gsi, NULL_TREE);
5623 return true;
5627 break;
5628 case IFN_GOACC_DIM_SIZE:
5629 case IFN_GOACC_DIM_POS:
5630 result = fold_internal_goacc_dim (stmt);
5631 break;
5632 case IFN_UBSAN_CHECK_ADD:
5633 subcode = PLUS_EXPR;
5634 break;
5635 case IFN_UBSAN_CHECK_SUB:
5636 subcode = MINUS_EXPR;
5637 break;
5638 case IFN_UBSAN_CHECK_MUL:
5639 subcode = MULT_EXPR;
5640 break;
5641 case IFN_ADD_OVERFLOW:
5642 subcode = PLUS_EXPR;
5643 cplx_result = true;
5644 break;
5645 case IFN_SUB_OVERFLOW:
5646 subcode = MINUS_EXPR;
5647 cplx_result = true;
5648 break;
5649 case IFN_MUL_OVERFLOW:
5650 subcode = MULT_EXPR;
5651 cplx_result = true;
5652 break;
5653 case IFN_MASK_LOAD:
5654 changed |= gimple_fold_mask_load (gsi, stmt);
5655 break;
5656 case IFN_MASK_STORE:
5657 changed |= gimple_fold_mask_store (gsi, stmt);
5658 break;
5659 default:
5660 break;
5662 if (subcode != ERROR_MARK)
5664 tree arg0 = gimple_call_arg (stmt, 0);
5665 tree arg1 = gimple_call_arg (stmt, 1);
5666 tree type = TREE_TYPE (arg0);
5667 if (cplx_result)
5669 tree lhs = gimple_call_lhs (stmt);
5670 if (lhs == NULL_TREE)
5671 type = NULL_TREE;
5672 else
5673 type = TREE_TYPE (TREE_TYPE (lhs));
5675 if (type == NULL_TREE)
5677 /* x = y + 0; x = y - 0; x = y * 0; */
5678 else if (integer_zerop (arg1))
5679 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
5680 /* x = 0 + y; x = 0 * y; */
5681 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
5682 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
5683 /* x = y - y; */
5684 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
5685 result = integer_zero_node;
5686 /* x = y * 1; x = 1 * y; */
5687 else if (subcode == MULT_EXPR && integer_onep (arg1))
5688 result = arg0;
5689 else if (subcode == MULT_EXPR && integer_onep (arg0))
5690 result = arg1;
5691 else if (TREE_CODE (arg0) == INTEGER_CST
5692 && TREE_CODE (arg1) == INTEGER_CST)
5694 if (cplx_result)
5695 result = int_const_binop (subcode, fold_convert (type, arg0),
5696 fold_convert (type, arg1));
5697 else
5698 result = int_const_binop (subcode, arg0, arg1);
5699 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
5701 if (cplx_result)
5702 overflow = build_one_cst (type);
5703 else
5704 result = NULL_TREE;
5707 if (result)
5709 if (result == integer_zero_node)
5710 result = build_zero_cst (type);
5711 else if (cplx_result && TREE_TYPE (result) != type)
5713 if (TREE_CODE (result) == INTEGER_CST)
5715 if (arith_overflowed_p (PLUS_EXPR, type, result,
5716 integer_zero_node))
5717 overflow = build_one_cst (type);
5719 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
5720 && TYPE_UNSIGNED (type))
5721 || (TYPE_PRECISION (type)
5722 < (TYPE_PRECISION (TREE_TYPE (result))
5723 + (TYPE_UNSIGNED (TREE_TYPE (result))
5724 && !TYPE_UNSIGNED (type)))))
5725 result = NULL_TREE;
5726 if (result)
5727 result = fold_convert (type, result);
5732 if (result)
5734 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
5735 result = drop_tree_overflow (result);
5736 if (cplx_result)
5738 if (overflow == NULL_TREE)
5739 overflow = build_zero_cst (TREE_TYPE (result));
5740 tree ctype = build_complex_type (TREE_TYPE (result));
5741 if (TREE_CODE (result) == INTEGER_CST
5742 && TREE_CODE (overflow) == INTEGER_CST)
5743 result = build_complex (ctype, result, overflow);
5744 else
5745 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
5746 ctype, result, overflow);
5748 gimplify_and_update_call_from_tree (gsi, result);
5749 changed = true;
5753 return changed;
5757 /* Return true whether NAME has a use on STMT. */
5759 static bool
5760 has_use_on_stmt (tree name, gimple *stmt)
5762 imm_use_iterator iter;
5763 use_operand_p use_p;
5764 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
5765 if (USE_STMT (use_p) == stmt)
5766 return true;
5767 return false;
5770 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5771 gimple_simplify.
5773 Replaces *GSI with the simplification result in RCODE and OPS
5774 and the associated statements in *SEQ. Does the replacement
5775 according to INPLACE and returns true if the operation succeeded. */
5777 static bool
5778 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5779 gimple_match_op *res_op,
5780 gimple_seq *seq, bool inplace)
5782 gimple *stmt = gsi_stmt (*gsi);
5783 tree *ops = res_op->ops;
5784 unsigned int num_ops = res_op->num_ops;
5786 /* Play safe and do not allow abnormals to be mentioned in
5787 newly created statements. See also maybe_push_res_to_seq.
5788 As an exception allow such uses if there was a use of the
5789 same SSA name on the old stmt. */
5790 for (unsigned int i = 0; i < num_ops; ++i)
5791 if (TREE_CODE (ops[i]) == SSA_NAME
5792 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
5793 && !has_use_on_stmt (ops[i], stmt))
5794 return false;
5796 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
5797 for (unsigned int i = 0; i < 2; ++i)
5798 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
5799 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
5800 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
5801 return false;
5803 /* Don't insert new statements when INPLACE is true, even if we could
5804 reuse STMT for the final statement. */
5805 if (inplace && !gimple_seq_empty_p (*seq))
5806 return false;
5808 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
5810 gcc_assert (res_op->code.is_tree_code ());
5811 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
5812 /* GIMPLE_CONDs condition may not throw. */
5813 && (!flag_exceptions
5814 || !cfun->can_throw_non_call_exceptions
5815 || !operation_could_trap_p (res_op->code,
5816 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
5817 false, NULL_TREE)))
5818 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
5819 else if (res_op->code == SSA_NAME)
5820 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
5821 build_zero_cst (TREE_TYPE (ops[0])));
5822 else if (res_op->code == INTEGER_CST)
5824 if (integer_zerop (ops[0]))
5825 gimple_cond_make_false (cond_stmt);
5826 else
5827 gimple_cond_make_true (cond_stmt);
5829 else if (!inplace)
5831 tree res = maybe_push_res_to_seq (res_op, seq);
5832 if (!res)
5833 return false;
5834 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
5835 build_zero_cst (TREE_TYPE (res)));
5837 else
5838 return false;
5839 if (dump_file && (dump_flags & TDF_DETAILS))
5841 fprintf (dump_file, "gimple_simplified to ");
5842 if (!gimple_seq_empty_p (*seq))
5843 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5844 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5845 0, TDF_SLIM);
5847 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5848 return true;
5850 else if (is_gimple_assign (stmt)
5851 && res_op->code.is_tree_code ())
5853 if (!inplace
5854 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
5856 maybe_build_generic_op (res_op);
5857 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
5858 res_op->op_or_null (0),
5859 res_op->op_or_null (1),
5860 res_op->op_or_null (2));
5861 if (dump_file && (dump_flags & TDF_DETAILS))
5863 fprintf (dump_file, "gimple_simplified to ");
5864 if (!gimple_seq_empty_p (*seq))
5865 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5866 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5867 0, TDF_SLIM);
5869 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5870 return true;
5873 else if (res_op->code.is_fn_code ()
5874 && gimple_call_combined_fn (stmt) == res_op->code)
5876 gcc_assert (num_ops == gimple_call_num_args (stmt));
5877 for (unsigned int i = 0; i < num_ops; ++i)
5878 gimple_call_set_arg (stmt, i, ops[i]);
5879 if (dump_file && (dump_flags & TDF_DETAILS))
5881 fprintf (dump_file, "gimple_simplified to ");
5882 if (!gimple_seq_empty_p (*seq))
5883 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5884 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
5886 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5887 return true;
5889 else if (!inplace)
5891 if (gimple_has_lhs (stmt))
5893 tree lhs = gimple_get_lhs (stmt);
5894 if (!maybe_push_res_to_seq (res_op, seq, lhs))
5895 return false;
5896 if (dump_file && (dump_flags & TDF_DETAILS))
5898 fprintf (dump_file, "gimple_simplified to ");
5899 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5901 gsi_replace_with_seq_vops (gsi, *seq);
5902 return true;
5904 else
5905 gcc_unreachable ();
5908 return false;
5911 /* Canonicalize MEM_REFs invariant address operand after propagation. */
5913 static bool
5914 maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
5916 bool res = false;
5917 tree *orig_t = t;
5919 if (TREE_CODE (*t) == ADDR_EXPR)
5920 t = &TREE_OPERAND (*t, 0);
5922 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5923 generic vector extension. The actual vector referenced is
5924 view-converted to an array type for this purpose. If the index
5925 is constant the canonical representation in the middle-end is a
5926 BIT_FIELD_REF so re-write the former to the latter here. */
5927 if (TREE_CODE (*t) == ARRAY_REF
5928 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
5929 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
5930 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
5932 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
5933 if (VECTOR_TYPE_P (vtype))
5935 tree low = array_ref_low_bound (*t);
5936 if (TREE_CODE (low) == INTEGER_CST)
5938 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
5940 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
5941 wi::to_widest (low));
5942 idx = wi::mul (idx, wi::to_widest
5943 (TYPE_SIZE (TREE_TYPE (*t))));
5944 widest_int ext
5945 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
5946 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
5948 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
5949 TREE_TYPE (*t),
5950 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
5951 TYPE_SIZE (TREE_TYPE (*t)),
5952 wide_int_to_tree (bitsizetype, idx));
5953 res = true;
5960 while (handled_component_p (*t))
5961 t = &TREE_OPERAND (*t, 0);
5963 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5964 of invariant addresses into a SSA name MEM_REF address. */
5965 if (TREE_CODE (*t) == MEM_REF
5966 || TREE_CODE (*t) == TARGET_MEM_REF)
5968 tree addr = TREE_OPERAND (*t, 0);
5969 if (TREE_CODE (addr) == ADDR_EXPR
5970 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
5971 || handled_component_p (TREE_OPERAND (addr, 0))))
5973 tree base;
5974 poly_int64 coffset;
5975 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
5976 &coffset);
5977 if (!base)
5979 if (is_debug)
5980 return false;
5981 gcc_unreachable ();
5984 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
5985 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
5986 TREE_OPERAND (*t, 1),
5987 size_int (coffset));
5988 res = true;
5990 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
5991 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
5994 /* Canonicalize back MEM_REFs to plain reference trees if the object
5995 accessed is a decl that has the same access semantics as the MEM_REF. */
5996 if (TREE_CODE (*t) == MEM_REF
5997 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
5998 && integer_zerop (TREE_OPERAND (*t, 1))
5999 && MR_DEPENDENCE_CLIQUE (*t) == 0)
6001 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6002 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
6003 if (/* Same volatile qualification. */
6004 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
6005 /* Same TBAA behavior with -fstrict-aliasing. */
6006 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
6007 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
6008 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
6009 /* Same alignment. */
6010 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
6011 /* We have to look out here to not drop a required conversion
6012 from the rhs to the lhs if *t appears on the lhs or vice-versa
6013 if it appears on the rhs. Thus require strict type
6014 compatibility. */
6015 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
6017 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6018 res = true;
6022 else if (TREE_CODE (*orig_t) == ADDR_EXPR
6023 && TREE_CODE (*t) == MEM_REF
6024 && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
6026 tree base;
6027 poly_int64 coffset;
6028 base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
6029 &coffset);
6030 if (base)
6032 gcc_assert (TREE_CODE (base) == MEM_REF);
6033 poly_int64 moffset;
6034 if (mem_ref_offset (base).to_shwi (&moffset))
6036 coffset += moffset;
6037 if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
6039 coffset += moffset;
6040 *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
6041 return true;
6047 /* Canonicalize TARGET_MEM_REF in particular with respect to
6048 the indexes becoming constant. */
6049 else if (TREE_CODE (*t) == TARGET_MEM_REF)
6051 tree tem = maybe_fold_tmr (*t);
6052 if (tem)
6054 *t = tem;
6055 if (TREE_CODE (*orig_t) == ADDR_EXPR)
6056 recompute_tree_invariant_for_addr_expr (*orig_t);
6057 res = true;
6061 return res;
6064 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6065 distinguishes both cases. */
6067 static bool
6068 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
6070 bool changed = false;
6071 gimple *stmt = gsi_stmt (*gsi);
6072 bool nowarning = warning_suppressed_p (stmt, OPT_Wstrict_overflow);
6073 unsigned i;
6074 fold_defer_overflow_warnings ();
6076 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6077 after propagation.
6078 ??? This shouldn't be done in generic folding but in the
6079 propagation helpers which also know whether an address was
6080 propagated.
6081 Also canonicalize operand order. */
6082 switch (gimple_code (stmt))
6084 case GIMPLE_ASSIGN:
6085 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
6087 tree *rhs = gimple_assign_rhs1_ptr (stmt);
6088 if ((REFERENCE_CLASS_P (*rhs)
6089 || TREE_CODE (*rhs) == ADDR_EXPR)
6090 && maybe_canonicalize_mem_ref_addr (rhs))
6091 changed = true;
6092 tree *lhs = gimple_assign_lhs_ptr (stmt);
6093 if (REFERENCE_CLASS_P (*lhs)
6094 && maybe_canonicalize_mem_ref_addr (lhs))
6095 changed = true;
6097 else
6099 /* Canonicalize operand order. */
6100 enum tree_code code = gimple_assign_rhs_code (stmt);
6101 if (TREE_CODE_CLASS (code) == tcc_comparison
6102 || commutative_tree_code (code)
6103 || commutative_ternary_tree_code (code))
6105 tree rhs1 = gimple_assign_rhs1 (stmt);
6106 tree rhs2 = gimple_assign_rhs2 (stmt);
6107 if (tree_swap_operands_p (rhs1, rhs2))
6109 gimple_assign_set_rhs1 (stmt, rhs2);
6110 gimple_assign_set_rhs2 (stmt, rhs1);
6111 if (TREE_CODE_CLASS (code) == tcc_comparison)
6112 gimple_assign_set_rhs_code (stmt,
6113 swap_tree_comparison (code));
6114 changed = true;
6118 break;
6119 case GIMPLE_CALL:
6121 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6123 tree *arg = gimple_call_arg_ptr (stmt, i);
6124 if (REFERENCE_CLASS_P (*arg)
6125 && maybe_canonicalize_mem_ref_addr (arg))
6126 changed = true;
6128 tree *lhs = gimple_call_lhs_ptr (stmt);
6129 if (*lhs
6130 && REFERENCE_CLASS_P (*lhs)
6131 && maybe_canonicalize_mem_ref_addr (lhs))
6132 changed = true;
6133 break;
6135 case GIMPLE_ASM:
6137 gasm *asm_stmt = as_a <gasm *> (stmt);
6138 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
6140 tree link = gimple_asm_output_op (asm_stmt, i);
6141 tree op = TREE_VALUE (link);
6142 if (REFERENCE_CLASS_P (op)
6143 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6144 changed = true;
6146 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
6148 tree link = gimple_asm_input_op (asm_stmt, i);
6149 tree op = TREE_VALUE (link);
6150 if ((REFERENCE_CLASS_P (op)
6151 || TREE_CODE (op) == ADDR_EXPR)
6152 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6153 changed = true;
6156 break;
6157 case GIMPLE_DEBUG:
6158 if (gimple_debug_bind_p (stmt))
6160 tree *val = gimple_debug_bind_get_value_ptr (stmt);
6161 if (*val
6162 && (REFERENCE_CLASS_P (*val)
6163 || TREE_CODE (*val) == ADDR_EXPR)
6164 && maybe_canonicalize_mem_ref_addr (val, true))
6165 changed = true;
6167 break;
6168 case GIMPLE_COND:
6170 /* Canonicalize operand order. */
6171 tree lhs = gimple_cond_lhs (stmt);
6172 tree rhs = gimple_cond_rhs (stmt);
6173 if (tree_swap_operands_p (lhs, rhs))
6175 gcond *gc = as_a <gcond *> (stmt);
6176 gimple_cond_set_lhs (gc, rhs);
6177 gimple_cond_set_rhs (gc, lhs);
6178 gimple_cond_set_code (gc,
6179 swap_tree_comparison (gimple_cond_code (gc)));
6180 changed = true;
6183 default:;
6186 /* Dispatch to pattern-based folding. */
6187 if (!inplace
6188 || is_gimple_assign (stmt)
6189 || gimple_code (stmt) == GIMPLE_COND)
6191 gimple_seq seq = NULL;
6192 gimple_match_op res_op;
6193 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
6194 valueize, valueize))
6196 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
6197 changed = true;
6198 else
6199 gimple_seq_discard (seq);
6203 stmt = gsi_stmt (*gsi);
6205 /* Fold the main computation performed by the statement. */
6206 switch (gimple_code (stmt))
6208 case GIMPLE_ASSIGN:
6210 /* Try to canonicalize for boolean-typed X the comparisons
6211 X == 0, X == 1, X != 0, and X != 1. */
6212 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
6213 || gimple_assign_rhs_code (stmt) == NE_EXPR)
6215 tree lhs = gimple_assign_lhs (stmt);
6216 tree op1 = gimple_assign_rhs1 (stmt);
6217 tree op2 = gimple_assign_rhs2 (stmt);
6218 tree type = TREE_TYPE (op1);
6220 /* Check whether the comparison operands are of the same boolean
6221 type as the result type is.
6222 Check that second operand is an integer-constant with value
6223 one or zero. */
6224 if (TREE_CODE (op2) == INTEGER_CST
6225 && (integer_zerop (op2) || integer_onep (op2))
6226 && useless_type_conversion_p (TREE_TYPE (lhs), type))
6228 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
6229 bool is_logical_not = false;
6231 /* X == 0 and X != 1 is a logical-not.of X
6232 X == 1 and X != 0 is X */
6233 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
6234 || (cmp_code == NE_EXPR && integer_onep (op2)))
6235 is_logical_not = true;
6237 if (is_logical_not == false)
6238 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
6239 /* Only for one-bit precision typed X the transformation
6240 !X -> ~X is valied. */
6241 else if (TYPE_PRECISION (type) == 1)
6242 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
6243 /* Otherwise we use !X -> X ^ 1. */
6244 else
6245 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
6246 build_int_cst (type, 1));
6247 changed = true;
6248 break;
6252 unsigned old_num_ops = gimple_num_ops (stmt);
6253 tree lhs = gimple_assign_lhs (stmt);
6254 tree new_rhs = fold_gimple_assign (gsi);
6255 if (new_rhs
6256 && !useless_type_conversion_p (TREE_TYPE (lhs),
6257 TREE_TYPE (new_rhs)))
6258 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6259 if (new_rhs
6260 && (!inplace
6261 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6263 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6264 changed = true;
6266 break;
6269 case GIMPLE_CALL:
6270 changed |= gimple_fold_call (gsi, inplace);
6271 break;
6273 case GIMPLE_DEBUG:
6274 if (gimple_debug_bind_p (stmt))
6276 tree val = gimple_debug_bind_get_value (stmt);
6277 if (val
6278 && REFERENCE_CLASS_P (val))
6280 tree tem = maybe_fold_reference (val);
6281 if (tem)
6283 gimple_debug_bind_set_value (stmt, tem);
6284 changed = true;
6287 else if (val
6288 && TREE_CODE (val) == ADDR_EXPR)
6290 tree ref = TREE_OPERAND (val, 0);
6291 tree tem = maybe_fold_reference (ref);
6292 if (tem)
6294 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
6295 gimple_debug_bind_set_value (stmt, tem);
6296 changed = true;
6300 break;
6302 case GIMPLE_RETURN:
6304 greturn *ret_stmt = as_a<greturn *> (stmt);
6305 tree ret = gimple_return_retval(ret_stmt);
6307 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6309 tree val = valueize (ret);
6310 if (val && val != ret
6311 && may_propagate_copy (ret, val))
6313 gimple_return_set_retval (ret_stmt, val);
6314 changed = true;
6318 break;
6320 default:;
6323 stmt = gsi_stmt (*gsi);
6325 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
6326 return changed;
6329 /* Valueziation callback that ends up not following SSA edges. */
6331 tree
6332 no_follow_ssa_edges (tree)
6334 return NULL_TREE;
6337 /* Valueization callback that ends up following single-use SSA edges only. */
6339 tree
6340 follow_single_use_edges (tree val)
6342 if (TREE_CODE (val) == SSA_NAME
6343 && !has_single_use (val))
6344 return NULL_TREE;
6345 return val;
6348 /* Valueization callback that follows all SSA edges. */
6350 tree
6351 follow_all_ssa_edges (tree val)
6353 return val;
6356 /* Fold the statement pointed to by GSI. In some cases, this function may
6357 replace the whole statement with a new one. Returns true iff folding
6358 makes any changes.
6359 The statement pointed to by GSI should be in valid gimple form but may
6360 be in unfolded state as resulting from for example constant propagation
6361 which can produce *&x = 0. */
6363 bool
6364 fold_stmt (gimple_stmt_iterator *gsi)
6366 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
6369 bool
6370 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
6372 return fold_stmt_1 (gsi, false, valueize);
6375 /* Perform the minimal folding on statement *GSI. Only operations like
6376 *&x created by constant propagation are handled. The statement cannot
6377 be replaced with a new one. Return true if the statement was
6378 changed, false otherwise.
6379 The statement *GSI should be in valid gimple form but may
6380 be in unfolded state as resulting from for example constant propagation
6381 which can produce *&x = 0. */
6383 bool
6384 fold_stmt_inplace (gimple_stmt_iterator *gsi)
6386 gimple *stmt = gsi_stmt (*gsi);
6387 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
6388 gcc_assert (gsi_stmt (*gsi) == stmt);
6389 return changed;
6392 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6393 if EXPR is null or we don't know how.
6394 If non-null, the result always has boolean type. */
6396 static tree
6397 canonicalize_bool (tree expr, bool invert)
6399 if (!expr)
6400 return NULL_TREE;
6401 else if (invert)
6403 if (integer_nonzerop (expr))
6404 return boolean_false_node;
6405 else if (integer_zerop (expr))
6406 return boolean_true_node;
6407 else if (TREE_CODE (expr) == SSA_NAME)
6408 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
6409 build_int_cst (TREE_TYPE (expr), 0));
6410 else if (COMPARISON_CLASS_P (expr))
6411 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
6412 boolean_type_node,
6413 TREE_OPERAND (expr, 0),
6414 TREE_OPERAND (expr, 1));
6415 else
6416 return NULL_TREE;
6418 else
6420 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6421 return expr;
6422 if (integer_nonzerop (expr))
6423 return boolean_true_node;
6424 else if (integer_zerop (expr))
6425 return boolean_false_node;
6426 else if (TREE_CODE (expr) == SSA_NAME)
6427 return fold_build2 (NE_EXPR, boolean_type_node, expr,
6428 build_int_cst (TREE_TYPE (expr), 0));
6429 else if (COMPARISON_CLASS_P (expr))
6430 return fold_build2 (TREE_CODE (expr),
6431 boolean_type_node,
6432 TREE_OPERAND (expr, 0),
6433 TREE_OPERAND (expr, 1));
6434 else
6435 return NULL_TREE;
6439 /* Check to see if a boolean expression EXPR is logically equivalent to the
6440 comparison (OP1 CODE OP2). Check for various identities involving
6441 SSA_NAMEs. */
6443 static bool
6444 same_bool_comparison_p (const_tree expr, enum tree_code code,
6445 const_tree op1, const_tree op2)
6447 gimple *s;
6449 /* The obvious case. */
6450 if (TREE_CODE (expr) == code
6451 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
6452 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
6453 return true;
6455 /* Check for comparing (name, name != 0) and the case where expr
6456 is an SSA_NAME with a definition matching the comparison. */
6457 if (TREE_CODE (expr) == SSA_NAME
6458 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6460 if (operand_equal_p (expr, op1, 0))
6461 return ((code == NE_EXPR && integer_zerop (op2))
6462 || (code == EQ_EXPR && integer_nonzerop (op2)));
6463 s = SSA_NAME_DEF_STMT (expr);
6464 if (is_gimple_assign (s)
6465 && gimple_assign_rhs_code (s) == code
6466 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
6467 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
6468 return true;
6471 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6472 of name is a comparison, recurse. */
6473 if (TREE_CODE (op1) == SSA_NAME
6474 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
6476 s = SSA_NAME_DEF_STMT (op1);
6477 if (is_gimple_assign (s)
6478 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
6480 enum tree_code c = gimple_assign_rhs_code (s);
6481 if ((c == NE_EXPR && integer_zerop (op2))
6482 || (c == EQ_EXPR && integer_nonzerop (op2)))
6483 return same_bool_comparison_p (expr, c,
6484 gimple_assign_rhs1 (s),
6485 gimple_assign_rhs2 (s));
6486 if ((c == EQ_EXPR && integer_zerop (op2))
6487 || (c == NE_EXPR && integer_nonzerop (op2)))
6488 return same_bool_comparison_p (expr,
6489 invert_tree_comparison (c, false),
6490 gimple_assign_rhs1 (s),
6491 gimple_assign_rhs2 (s));
6494 return false;
6497 /* Check to see if two boolean expressions OP1 and OP2 are logically
6498 equivalent. */
6500 static bool
6501 same_bool_result_p (const_tree op1, const_tree op2)
6503 /* Simple cases first. */
6504 if (operand_equal_p (op1, op2, 0))
6505 return true;
6507 /* Check the cases where at least one of the operands is a comparison.
6508 These are a bit smarter than operand_equal_p in that they apply some
6509 identifies on SSA_NAMEs. */
6510 if (COMPARISON_CLASS_P (op2)
6511 && same_bool_comparison_p (op1, TREE_CODE (op2),
6512 TREE_OPERAND (op2, 0),
6513 TREE_OPERAND (op2, 1)))
6514 return true;
6515 if (COMPARISON_CLASS_P (op1)
6516 && same_bool_comparison_p (op2, TREE_CODE (op1),
6517 TREE_OPERAND (op1, 0),
6518 TREE_OPERAND (op1, 1)))
6519 return true;
6521 /* Default case. */
6522 return false;
6525 /* Forward declarations for some mutually recursive functions. */
6527 static tree
6528 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6529 enum tree_code code2, tree op2a, tree op2b);
6530 static tree
6531 and_var_with_comparison (tree type, tree var, bool invert,
6532 enum tree_code code2, tree op2a, tree op2b);
6533 static tree
6534 and_var_with_comparison_1 (tree type, gimple *stmt,
6535 enum tree_code code2, tree op2a, tree op2b);
6536 static tree
6537 or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
6538 enum tree_code code2, tree op2a, tree op2b);
6539 static tree
6540 or_var_with_comparison (tree, tree var, bool invert,
6541 enum tree_code code2, tree op2a, tree op2b);
6542 static tree
6543 or_var_with_comparison_1 (tree, gimple *stmt,
6544 enum tree_code code2, tree op2a, tree op2b);
6546 /* Helper function for and_comparisons_1: try to simplify the AND of the
6547 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6548 If INVERT is true, invert the value of the VAR before doing the AND.
6549 Return NULL_EXPR if we can't simplify this to a single expression. */
6551 static tree
6552 and_var_with_comparison (tree type, tree var, bool invert,
6553 enum tree_code code2, tree op2a, tree op2b)
6555 tree t;
6556 gimple *stmt = SSA_NAME_DEF_STMT (var);
6558 /* We can only deal with variables whose definitions are assignments. */
6559 if (!is_gimple_assign (stmt))
6560 return NULL_TREE;
6562 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6563 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6564 Then we only have to consider the simpler non-inverted cases. */
6565 if (invert)
6566 t = or_var_with_comparison_1 (type, stmt,
6567 invert_tree_comparison (code2, false),
6568 op2a, op2b);
6569 else
6570 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
6571 return canonicalize_bool (t, invert);
6574 /* Try to simplify the AND of the ssa variable defined by the assignment
6575 STMT with the comparison specified by (OP2A CODE2 OP2B).
6576 Return NULL_EXPR if we can't simplify this to a single expression. */
6578 static tree
6579 and_var_with_comparison_1 (tree type, gimple *stmt,
6580 enum tree_code code2, tree op2a, tree op2b)
6582 tree var = gimple_assign_lhs (stmt);
6583 tree true_test_var = NULL_TREE;
6584 tree false_test_var = NULL_TREE;
6585 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6587 /* Check for identities like (var AND (var == 0)) => false. */
6588 if (TREE_CODE (op2a) == SSA_NAME
6589 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6591 if ((code2 == NE_EXPR && integer_zerop (op2b))
6592 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6594 true_test_var = op2a;
6595 if (var == true_test_var)
6596 return var;
6598 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6599 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6601 false_test_var = op2a;
6602 if (var == false_test_var)
6603 return boolean_false_node;
6607 /* If the definition is a comparison, recurse on it. */
6608 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6610 tree t = and_comparisons_1 (type, innercode,
6611 gimple_assign_rhs1 (stmt),
6612 gimple_assign_rhs2 (stmt),
6613 code2,
6614 op2a,
6615 op2b);
6616 if (t)
6617 return t;
6620 /* If the definition is an AND or OR expression, we may be able to
6621 simplify by reassociating. */
6622 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6623 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
6625 tree inner1 = gimple_assign_rhs1 (stmt);
6626 tree inner2 = gimple_assign_rhs2 (stmt);
6627 gimple *s;
6628 tree t;
6629 tree partial = NULL_TREE;
6630 bool is_and = (innercode == BIT_AND_EXPR);
6632 /* Check for boolean identities that don't require recursive examination
6633 of inner1/inner2:
6634 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6635 inner1 AND (inner1 OR inner2) => inner1
6636 !inner1 AND (inner1 AND inner2) => false
6637 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6638 Likewise for similar cases involving inner2. */
6639 if (inner1 == true_test_var)
6640 return (is_and ? var : inner1);
6641 else if (inner2 == true_test_var)
6642 return (is_and ? var : inner2);
6643 else if (inner1 == false_test_var)
6644 return (is_and
6645 ? boolean_false_node
6646 : and_var_with_comparison (type, inner2, false, code2, op2a,
6647 op2b));
6648 else if (inner2 == false_test_var)
6649 return (is_and
6650 ? boolean_false_node
6651 : and_var_with_comparison (type, inner1, false, code2, op2a,
6652 op2b));
6654 /* Next, redistribute/reassociate the AND across the inner tests.
6655 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6656 if (TREE_CODE (inner1) == SSA_NAME
6657 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6658 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6659 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6660 gimple_assign_rhs1 (s),
6661 gimple_assign_rhs2 (s),
6662 code2, op2a, op2b)))
6664 /* Handle the AND case, where we are reassociating:
6665 (inner1 AND inner2) AND (op2a code2 op2b)
6666 => (t AND inner2)
6667 If the partial result t is a constant, we win. Otherwise
6668 continue on to try reassociating with the other inner test. */
6669 if (is_and)
6671 if (integer_onep (t))
6672 return inner2;
6673 else if (integer_zerop (t))
6674 return boolean_false_node;
6677 /* Handle the OR case, where we are redistributing:
6678 (inner1 OR inner2) AND (op2a code2 op2b)
6679 => (t OR (inner2 AND (op2a code2 op2b))) */
6680 else if (integer_onep (t))
6681 return boolean_true_node;
6683 /* Save partial result for later. */
6684 partial = t;
6687 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6688 if (TREE_CODE (inner2) == SSA_NAME
6689 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6690 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6691 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6692 gimple_assign_rhs1 (s),
6693 gimple_assign_rhs2 (s),
6694 code2, op2a, op2b)))
6696 /* Handle the AND case, where we are reassociating:
6697 (inner1 AND inner2) AND (op2a code2 op2b)
6698 => (inner1 AND t) */
6699 if (is_and)
6701 if (integer_onep (t))
6702 return inner1;
6703 else if (integer_zerop (t))
6704 return boolean_false_node;
6705 /* If both are the same, we can apply the identity
6706 (x AND x) == x. */
6707 else if (partial && same_bool_result_p (t, partial))
6708 return t;
6711 /* Handle the OR case. where we are redistributing:
6712 (inner1 OR inner2) AND (op2a code2 op2b)
6713 => (t OR (inner1 AND (op2a code2 op2b)))
6714 => (t OR partial) */
6715 else
6717 if (integer_onep (t))
6718 return boolean_true_node;
6719 else if (partial)
6721 /* We already got a simplification for the other
6722 operand to the redistributed OR expression. The
6723 interesting case is when at least one is false.
6724 Or, if both are the same, we can apply the identity
6725 (x OR x) == x. */
6726 if (integer_zerop (partial))
6727 return t;
6728 else if (integer_zerop (t))
6729 return partial;
6730 else if (same_bool_result_p (t, partial))
6731 return t;
6736 return NULL_TREE;
6739 /* Try to simplify the AND of two comparisons defined by
6740 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6741 If this can be done without constructing an intermediate value,
6742 return the resulting tree; otherwise NULL_TREE is returned.
6743 This function is deliberately asymmetric as it recurses on SSA_DEFs
6744 in the first comparison but not the second. */
6746 static tree
6747 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6748 enum tree_code code2, tree op2a, tree op2b)
6750 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6752 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6753 if (operand_equal_p (op1a, op2a, 0)
6754 && operand_equal_p (op1b, op2b, 0))
6756 /* Result will be either NULL_TREE, or a combined comparison. */
6757 tree t = combine_comparisons (UNKNOWN_LOCATION,
6758 TRUTH_ANDIF_EXPR, code1, code2,
6759 truth_type, op1a, op1b);
6760 if (t)
6761 return t;
6764 /* Likewise the swapped case of the above. */
6765 if (operand_equal_p (op1a, op2b, 0)
6766 && operand_equal_p (op1b, op2a, 0))
6768 /* Result will be either NULL_TREE, or a combined comparison. */
6769 tree t = combine_comparisons (UNKNOWN_LOCATION,
6770 TRUTH_ANDIF_EXPR, code1,
6771 swap_tree_comparison (code2),
6772 truth_type, op1a, op1b);
6773 if (t)
6774 return t;
6777 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6778 NAME's definition is a truth value. See if there are any simplifications
6779 that can be done against the NAME's definition. */
6780 if (TREE_CODE (op1a) == SSA_NAME
6781 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6782 && (integer_zerop (op1b) || integer_onep (op1b)))
6784 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6785 || (code1 == NE_EXPR && integer_onep (op1b)));
6786 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6787 switch (gimple_code (stmt))
6789 case GIMPLE_ASSIGN:
6790 /* Try to simplify by copy-propagating the definition. */
6791 return and_var_with_comparison (type, op1a, invert, code2, op2a,
6792 op2b);
6794 case GIMPLE_PHI:
6795 /* If every argument to the PHI produces the same result when
6796 ANDed with the second comparison, we win.
6797 Do not do this unless the type is bool since we need a bool
6798 result here anyway. */
6799 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6801 tree result = NULL_TREE;
6802 unsigned i;
6803 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6805 tree arg = gimple_phi_arg_def (stmt, i);
6807 /* If this PHI has itself as an argument, ignore it.
6808 If all the other args produce the same result,
6809 we're still OK. */
6810 if (arg == gimple_phi_result (stmt))
6811 continue;
6812 else if (TREE_CODE (arg) == INTEGER_CST)
6814 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
6816 if (!result)
6817 result = boolean_false_node;
6818 else if (!integer_zerop (result))
6819 return NULL_TREE;
6821 else if (!result)
6822 result = fold_build2 (code2, boolean_type_node,
6823 op2a, op2b);
6824 else if (!same_bool_comparison_p (result,
6825 code2, op2a, op2b))
6826 return NULL_TREE;
6828 else if (TREE_CODE (arg) == SSA_NAME
6829 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6831 tree temp;
6832 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6833 /* In simple cases we can look through PHI nodes,
6834 but we have to be careful with loops.
6835 See PR49073. */
6836 if (! dom_info_available_p (CDI_DOMINATORS)
6837 || gimple_bb (def_stmt) == gimple_bb (stmt)
6838 || dominated_by_p (CDI_DOMINATORS,
6839 gimple_bb (def_stmt),
6840 gimple_bb (stmt)))
6841 return NULL_TREE;
6842 temp = and_var_with_comparison (type, arg, invert, code2,
6843 op2a, op2b);
6844 if (!temp)
6845 return NULL_TREE;
6846 else if (!result)
6847 result = temp;
6848 else if (!same_bool_result_p (result, temp))
6849 return NULL_TREE;
6851 else
6852 return NULL_TREE;
6854 return result;
6857 default:
6858 break;
6861 return NULL_TREE;
6864 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6865 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6866 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6867 simplify this to a single expression. As we are going to lower the cost
6868 of building SSA names / gimple stmts significantly, we need to allocate
6869 them ont the stack. This will cause the code to be a bit ugly. */
6871 static tree
6872 maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
6873 enum tree_code code1,
6874 tree op1a, tree op1b,
6875 enum tree_code code2, tree op2a,
6876 tree op2b)
6878 /* Allocate gimple stmt1 on the stack. */
6879 gassign *stmt1
6880 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6881 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
6882 gimple_assign_set_rhs_code (stmt1, code1);
6883 gimple_assign_set_rhs1 (stmt1, op1a);
6884 gimple_assign_set_rhs2 (stmt1, op1b);
6886 /* Allocate gimple stmt2 on the stack. */
6887 gassign *stmt2
6888 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6889 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
6890 gimple_assign_set_rhs_code (stmt2, code2);
6891 gimple_assign_set_rhs1 (stmt2, op2a);
6892 gimple_assign_set_rhs2 (stmt2, op2b);
6894 /* Allocate SSA names(lhs1) on the stack. */
6895 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
6896 memset (lhs1, 0, sizeof (tree_ssa_name));
6897 TREE_SET_CODE (lhs1, SSA_NAME);
6898 TREE_TYPE (lhs1) = type;
6899 init_ssa_name_imm_use (lhs1);
6901 /* Allocate SSA names(lhs2) on the stack. */
6902 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
6903 memset (lhs2, 0, sizeof (tree_ssa_name));
6904 TREE_SET_CODE (lhs2, SSA_NAME);
6905 TREE_TYPE (lhs2) = type;
6906 init_ssa_name_imm_use (lhs2);
6908 gimple_assign_set_lhs (stmt1, lhs1);
6909 gimple_assign_set_lhs (stmt2, lhs2);
6911 gimple_match_op op (gimple_match_cond::UNCOND, code,
6912 type, gimple_assign_lhs (stmt1),
6913 gimple_assign_lhs (stmt2));
6914 if (op.resimplify (NULL, follow_all_ssa_edges))
6916 if (gimple_simplified_result_is_gimple_val (&op))
6918 tree res = op.ops[0];
6919 if (res == lhs1)
6920 return build2 (code1, type, op1a, op1b);
6921 else if (res == lhs2)
6922 return build2 (code2, type, op2a, op2b);
6923 else
6924 return res;
6926 else if (op.code.is_tree_code ()
6927 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
6929 tree op0 = op.ops[0];
6930 tree op1 = op.ops[1];
6931 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
6932 return NULL_TREE; /* not simple */
6934 return build2 ((enum tree_code)op.code, op.type, op0, op1);
6938 return NULL_TREE;
6941 /* Try to simplify the AND of two comparisons, specified by
6942 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6943 If this can be simplified to a single expression (without requiring
6944 introducing more SSA variables to hold intermediate values),
6945 return the resulting tree. Otherwise return NULL_TREE.
6946 If the result expression is non-null, it has boolean type. */
6948 tree
6949 maybe_fold_and_comparisons (tree type,
6950 enum tree_code code1, tree op1a, tree op1b,
6951 enum tree_code code2, tree op2a, tree op2b)
6953 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
6954 return t;
6956 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
6957 return t;
6959 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
6960 op1a, op1b, code2, op2a,
6961 op2b))
6962 return t;
6964 return NULL_TREE;
6967 /* Helper function for or_comparisons_1: try to simplify the OR of the
6968 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6969 If INVERT is true, invert the value of VAR before doing the OR.
6970 Return NULL_EXPR if we can't simplify this to a single expression. */
6972 static tree
6973 or_var_with_comparison (tree type, tree var, bool invert,
6974 enum tree_code code2, tree op2a, tree op2b)
6976 tree t;
6977 gimple *stmt = SSA_NAME_DEF_STMT (var);
6979 /* We can only deal with variables whose definitions are assignments. */
6980 if (!is_gimple_assign (stmt))
6981 return NULL_TREE;
6983 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6984 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
6985 Then we only have to consider the simpler non-inverted cases. */
6986 if (invert)
6987 t = and_var_with_comparison_1 (type, stmt,
6988 invert_tree_comparison (code2, false),
6989 op2a, op2b);
6990 else
6991 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
6992 return canonicalize_bool (t, invert);
6995 /* Try to simplify the OR of the ssa variable defined by the assignment
6996 STMT with the comparison specified by (OP2A CODE2 OP2B).
6997 Return NULL_EXPR if we can't simplify this to a single expression. */
6999 static tree
7000 or_var_with_comparison_1 (tree type, gimple *stmt,
7001 enum tree_code code2, tree op2a, tree op2b)
7003 tree var = gimple_assign_lhs (stmt);
7004 tree true_test_var = NULL_TREE;
7005 tree false_test_var = NULL_TREE;
7006 enum tree_code innercode = gimple_assign_rhs_code (stmt);
7008 /* Check for identities like (var OR (var != 0)) => true . */
7009 if (TREE_CODE (op2a) == SSA_NAME
7010 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
7012 if ((code2 == NE_EXPR && integer_zerop (op2b))
7013 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
7015 true_test_var = op2a;
7016 if (var == true_test_var)
7017 return var;
7019 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
7020 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
7022 false_test_var = op2a;
7023 if (var == false_test_var)
7024 return boolean_true_node;
7028 /* If the definition is a comparison, recurse on it. */
7029 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
7031 tree t = or_comparisons_1 (type, innercode,
7032 gimple_assign_rhs1 (stmt),
7033 gimple_assign_rhs2 (stmt),
7034 code2,
7035 op2a,
7036 op2b);
7037 if (t)
7038 return t;
7041 /* If the definition is an AND or OR expression, we may be able to
7042 simplify by reassociating. */
7043 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
7044 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
7046 tree inner1 = gimple_assign_rhs1 (stmt);
7047 tree inner2 = gimple_assign_rhs2 (stmt);
7048 gimple *s;
7049 tree t;
7050 tree partial = NULL_TREE;
7051 bool is_or = (innercode == BIT_IOR_EXPR);
7053 /* Check for boolean identities that don't require recursive examination
7054 of inner1/inner2:
7055 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7056 inner1 OR (inner1 AND inner2) => inner1
7057 !inner1 OR (inner1 OR inner2) => true
7058 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7060 if (inner1 == true_test_var)
7061 return (is_or ? var : inner1);
7062 else if (inner2 == true_test_var)
7063 return (is_or ? var : inner2);
7064 else if (inner1 == false_test_var)
7065 return (is_or
7066 ? boolean_true_node
7067 : or_var_with_comparison (type, inner2, false, code2, op2a,
7068 op2b));
7069 else if (inner2 == false_test_var)
7070 return (is_or
7071 ? boolean_true_node
7072 : or_var_with_comparison (type, inner1, false, code2, op2a,
7073 op2b));
7075 /* Next, redistribute/reassociate the OR across the inner tests.
7076 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7077 if (TREE_CODE (inner1) == SSA_NAME
7078 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
7079 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7080 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7081 gimple_assign_rhs1 (s),
7082 gimple_assign_rhs2 (s),
7083 code2, op2a, op2b)))
7085 /* Handle the OR case, where we are reassociating:
7086 (inner1 OR inner2) OR (op2a code2 op2b)
7087 => (t OR inner2)
7088 If the partial result t is a constant, we win. Otherwise
7089 continue on to try reassociating with the other inner test. */
7090 if (is_or)
7092 if (integer_onep (t))
7093 return boolean_true_node;
7094 else if (integer_zerop (t))
7095 return inner2;
7098 /* Handle the AND case, where we are redistributing:
7099 (inner1 AND inner2) OR (op2a code2 op2b)
7100 => (t AND (inner2 OR (op2a code op2b))) */
7101 else if (integer_zerop (t))
7102 return boolean_false_node;
7104 /* Save partial result for later. */
7105 partial = t;
7108 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7109 if (TREE_CODE (inner2) == SSA_NAME
7110 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
7111 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7112 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7113 gimple_assign_rhs1 (s),
7114 gimple_assign_rhs2 (s),
7115 code2, op2a, op2b)))
7117 /* Handle the OR case, where we are reassociating:
7118 (inner1 OR inner2) OR (op2a code2 op2b)
7119 => (inner1 OR t)
7120 => (t OR partial) */
7121 if (is_or)
7123 if (integer_zerop (t))
7124 return inner1;
7125 else if (integer_onep (t))
7126 return boolean_true_node;
7127 /* If both are the same, we can apply the identity
7128 (x OR x) == x. */
7129 else if (partial && same_bool_result_p (t, partial))
7130 return t;
7133 /* Handle the AND case, where we are redistributing:
7134 (inner1 AND inner2) OR (op2a code2 op2b)
7135 => (t AND (inner1 OR (op2a code2 op2b)))
7136 => (t AND partial) */
7137 else
7139 if (integer_zerop (t))
7140 return boolean_false_node;
7141 else if (partial)
7143 /* We already got a simplification for the other
7144 operand to the redistributed AND expression. The
7145 interesting case is when at least one is true.
7146 Or, if both are the same, we can apply the identity
7147 (x AND x) == x. */
7148 if (integer_onep (partial))
7149 return t;
7150 else if (integer_onep (t))
7151 return partial;
7152 else if (same_bool_result_p (t, partial))
7153 return t;
7158 return NULL_TREE;
7161 /* Try to simplify the OR of two comparisons defined by
7162 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7163 If this can be done without constructing an intermediate value,
7164 return the resulting tree; otherwise NULL_TREE is returned.
7165 This function is deliberately asymmetric as it recurses on SSA_DEFs
7166 in the first comparison but not the second. */
7168 static tree
7169 or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
7170 enum tree_code code2, tree op2a, tree op2b)
7172 tree truth_type = truth_type_for (TREE_TYPE (op1a));
7174 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7175 if (operand_equal_p (op1a, op2a, 0)
7176 && operand_equal_p (op1b, op2b, 0))
7178 /* Result will be either NULL_TREE, or a combined comparison. */
7179 tree t = combine_comparisons (UNKNOWN_LOCATION,
7180 TRUTH_ORIF_EXPR, code1, code2,
7181 truth_type, op1a, op1b);
7182 if (t)
7183 return t;
7186 /* Likewise the swapped case of the above. */
7187 if (operand_equal_p (op1a, op2b, 0)
7188 && operand_equal_p (op1b, op2a, 0))
7190 /* Result will be either NULL_TREE, or a combined comparison. */
7191 tree t = combine_comparisons (UNKNOWN_LOCATION,
7192 TRUTH_ORIF_EXPR, code1,
7193 swap_tree_comparison (code2),
7194 truth_type, op1a, op1b);
7195 if (t)
7196 return t;
7199 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7200 NAME's definition is a truth value. See if there are any simplifications
7201 that can be done against the NAME's definition. */
7202 if (TREE_CODE (op1a) == SSA_NAME
7203 && (code1 == NE_EXPR || code1 == EQ_EXPR)
7204 && (integer_zerop (op1b) || integer_onep (op1b)))
7206 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7207 || (code1 == NE_EXPR && integer_onep (op1b)));
7208 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
7209 switch (gimple_code (stmt))
7211 case GIMPLE_ASSIGN:
7212 /* Try to simplify by copy-propagating the definition. */
7213 return or_var_with_comparison (type, op1a, invert, code2, op2a,
7214 op2b);
7216 case GIMPLE_PHI:
7217 /* If every argument to the PHI produces the same result when
7218 ORed with the second comparison, we win.
7219 Do not do this unless the type is bool since we need a bool
7220 result here anyway. */
7221 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7223 tree result = NULL_TREE;
7224 unsigned i;
7225 for (i = 0; i < gimple_phi_num_args (stmt); i++)
7227 tree arg = gimple_phi_arg_def (stmt, i);
7229 /* If this PHI has itself as an argument, ignore it.
7230 If all the other args produce the same result,
7231 we're still OK. */
7232 if (arg == gimple_phi_result (stmt))
7233 continue;
7234 else if (TREE_CODE (arg) == INTEGER_CST)
7236 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
7238 if (!result)
7239 result = boolean_true_node;
7240 else if (!integer_onep (result))
7241 return NULL_TREE;
7243 else if (!result)
7244 result = fold_build2 (code2, boolean_type_node,
7245 op2a, op2b);
7246 else if (!same_bool_comparison_p (result,
7247 code2, op2a, op2b))
7248 return NULL_TREE;
7250 else if (TREE_CODE (arg) == SSA_NAME
7251 && !SSA_NAME_IS_DEFAULT_DEF (arg))
7253 tree temp;
7254 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
7255 /* In simple cases we can look through PHI nodes,
7256 but we have to be careful with loops.
7257 See PR49073. */
7258 if (! dom_info_available_p (CDI_DOMINATORS)
7259 || gimple_bb (def_stmt) == gimple_bb (stmt)
7260 || dominated_by_p (CDI_DOMINATORS,
7261 gimple_bb (def_stmt),
7262 gimple_bb (stmt)))
7263 return NULL_TREE;
7264 temp = or_var_with_comparison (type, arg, invert, code2,
7265 op2a, op2b);
7266 if (!temp)
7267 return NULL_TREE;
7268 else if (!result)
7269 result = temp;
7270 else if (!same_bool_result_p (result, temp))
7271 return NULL_TREE;
7273 else
7274 return NULL_TREE;
7276 return result;
7279 default:
7280 break;
7283 return NULL_TREE;
7286 /* Try to simplify the OR of two comparisons, specified by
7287 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7288 If this can be simplified to a single expression (without requiring
7289 introducing more SSA variables to hold intermediate values),
7290 return the resulting tree. Otherwise return NULL_TREE.
7291 If the result expression is non-null, it has boolean type. */
7293 tree
7294 maybe_fold_or_comparisons (tree type,
7295 enum tree_code code1, tree op1a, tree op1b,
7296 enum tree_code code2, tree op2a, tree op2b)
7298 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
7299 return t;
7301 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
7302 return t;
7304 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
7305 op1a, op1b, code2, op2a,
7306 op2b))
7307 return t;
7309 return NULL_TREE;
7312 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7314 Either NULL_TREE, a simplified but non-constant or a constant
7315 is returned.
7317 ??? This should go into a gimple-fold-inline.h file to be eventually
7318 privatized with the single valueize function used in the various TUs
7319 to avoid the indirect function call overhead. */
7321 tree
7322 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
7323 tree (*gvalueize) (tree))
7325 gimple_match_op res_op;
7326 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7327 edges if there are intermediate VARYING defs. For this reason
7328 do not follow SSA edges here even though SCCVN can technically
7329 just deal fine with that. */
7330 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
7332 tree res = NULL_TREE;
7333 if (gimple_simplified_result_is_gimple_val (&res_op))
7334 res = res_op.ops[0];
7335 else if (mprts_hook)
7336 res = mprts_hook (&res_op);
7337 if (res)
7339 if (dump_file && dump_flags & TDF_DETAILS)
7341 fprintf (dump_file, "Match-and-simplified ");
7342 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
7343 fprintf (dump_file, " to ");
7344 print_generic_expr (dump_file, res);
7345 fprintf (dump_file, "\n");
7347 return res;
7351 location_t loc = gimple_location (stmt);
7352 switch (gimple_code (stmt))
7354 case GIMPLE_ASSIGN:
7356 enum tree_code subcode = gimple_assign_rhs_code (stmt);
7358 switch (get_gimple_rhs_class (subcode))
7360 case GIMPLE_SINGLE_RHS:
7362 tree rhs = gimple_assign_rhs1 (stmt);
7363 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
7365 if (TREE_CODE (rhs) == SSA_NAME)
7367 /* If the RHS is an SSA_NAME, return its known constant value,
7368 if any. */
7369 return (*valueize) (rhs);
7371 /* Handle propagating invariant addresses into address
7372 operations. */
7373 else if (TREE_CODE (rhs) == ADDR_EXPR
7374 && !is_gimple_min_invariant (rhs))
7376 poly_int64 offset = 0;
7377 tree base;
7378 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
7379 &offset,
7380 valueize);
7381 if (base
7382 && (CONSTANT_CLASS_P (base)
7383 || decl_address_invariant_p (base)))
7384 return build_invariant_address (TREE_TYPE (rhs),
7385 base, offset);
7387 else if (TREE_CODE (rhs) == CONSTRUCTOR
7388 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
7389 && known_eq (CONSTRUCTOR_NELTS (rhs),
7390 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
7392 unsigned i, nelts;
7393 tree val;
7395 nelts = CONSTRUCTOR_NELTS (rhs);
7396 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
7397 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7399 val = (*valueize) (val);
7400 if (TREE_CODE (val) == INTEGER_CST
7401 || TREE_CODE (val) == REAL_CST
7402 || TREE_CODE (val) == FIXED_CST)
7403 vec.quick_push (val);
7404 else
7405 return NULL_TREE;
7408 return vec.build ();
7410 if (subcode == OBJ_TYPE_REF)
7412 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
7413 /* If callee is constant, we can fold away the wrapper. */
7414 if (is_gimple_min_invariant (val))
7415 return val;
7418 if (kind == tcc_reference)
7420 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
7421 || TREE_CODE (rhs) == REALPART_EXPR
7422 || TREE_CODE (rhs) == IMAGPART_EXPR)
7423 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7425 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7426 return fold_unary_loc (EXPR_LOCATION (rhs),
7427 TREE_CODE (rhs),
7428 TREE_TYPE (rhs), val);
7430 else if (TREE_CODE (rhs) == BIT_FIELD_REF
7431 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7433 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7434 return fold_ternary_loc (EXPR_LOCATION (rhs),
7435 TREE_CODE (rhs),
7436 TREE_TYPE (rhs), val,
7437 TREE_OPERAND (rhs, 1),
7438 TREE_OPERAND (rhs, 2));
7440 else if (TREE_CODE (rhs) == MEM_REF
7441 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7443 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7444 if (TREE_CODE (val) == ADDR_EXPR
7445 && is_gimple_min_invariant (val))
7447 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
7448 unshare_expr (val),
7449 TREE_OPERAND (rhs, 1));
7450 if (tem)
7451 rhs = tem;
7454 return fold_const_aggregate_ref_1 (rhs, valueize);
7456 else if (kind == tcc_declaration)
7457 return get_symbol_constant_value (rhs);
7458 return rhs;
7461 case GIMPLE_UNARY_RHS:
7462 return NULL_TREE;
7464 case GIMPLE_BINARY_RHS:
7465 /* Translate &x + CST into an invariant form suitable for
7466 further propagation. */
7467 if (subcode == POINTER_PLUS_EXPR)
7469 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7470 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7471 if (TREE_CODE (op0) == ADDR_EXPR
7472 && TREE_CODE (op1) == INTEGER_CST)
7474 tree off = fold_convert (ptr_type_node, op1);
7475 return build1_loc
7476 (loc, ADDR_EXPR, TREE_TYPE (op0),
7477 fold_build2 (MEM_REF,
7478 TREE_TYPE (TREE_TYPE (op0)),
7479 unshare_expr (op0), off));
7482 /* Canonicalize bool != 0 and bool == 0 appearing after
7483 valueization. While gimple_simplify handles this
7484 it can get confused by the ~X == 1 -> X == 0 transform
7485 which we cant reduce to a SSA name or a constant
7486 (and we have no way to tell gimple_simplify to not
7487 consider those transforms in the first place). */
7488 else if (subcode == EQ_EXPR
7489 || subcode == NE_EXPR)
7491 tree lhs = gimple_assign_lhs (stmt);
7492 tree op0 = gimple_assign_rhs1 (stmt);
7493 if (useless_type_conversion_p (TREE_TYPE (lhs),
7494 TREE_TYPE (op0)))
7496 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7497 op0 = (*valueize) (op0);
7498 if (TREE_CODE (op0) == INTEGER_CST)
7499 std::swap (op0, op1);
7500 if (TREE_CODE (op1) == INTEGER_CST
7501 && ((subcode == NE_EXPR && integer_zerop (op1))
7502 || (subcode == EQ_EXPR && integer_onep (op1))))
7503 return op0;
7506 return NULL_TREE;
7508 case GIMPLE_TERNARY_RHS:
7510 /* Handle ternary operators that can appear in GIMPLE form. */
7511 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7512 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7513 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
7514 return fold_ternary_loc (loc, subcode,
7515 TREE_TYPE (gimple_assign_lhs (stmt)),
7516 op0, op1, op2);
7519 default:
7520 gcc_unreachable ();
7524 case GIMPLE_CALL:
7526 tree fn;
7527 gcall *call_stmt = as_a <gcall *> (stmt);
7529 if (gimple_call_internal_p (stmt))
7531 enum tree_code subcode = ERROR_MARK;
7532 switch (gimple_call_internal_fn (stmt))
7534 case IFN_UBSAN_CHECK_ADD:
7535 subcode = PLUS_EXPR;
7536 break;
7537 case IFN_UBSAN_CHECK_SUB:
7538 subcode = MINUS_EXPR;
7539 break;
7540 case IFN_UBSAN_CHECK_MUL:
7541 subcode = MULT_EXPR;
7542 break;
7543 case IFN_BUILTIN_EXPECT:
7545 tree arg0 = gimple_call_arg (stmt, 0);
7546 tree op0 = (*valueize) (arg0);
7547 if (TREE_CODE (op0) == INTEGER_CST)
7548 return op0;
7549 return NULL_TREE;
7551 default:
7552 return NULL_TREE;
7554 tree arg0 = gimple_call_arg (stmt, 0);
7555 tree arg1 = gimple_call_arg (stmt, 1);
7556 tree op0 = (*valueize) (arg0);
7557 tree op1 = (*valueize) (arg1);
7559 if (TREE_CODE (op0) != INTEGER_CST
7560 || TREE_CODE (op1) != INTEGER_CST)
7562 switch (subcode)
7564 case MULT_EXPR:
7565 /* x * 0 = 0 * x = 0 without overflow. */
7566 if (integer_zerop (op0) || integer_zerop (op1))
7567 return build_zero_cst (TREE_TYPE (arg0));
7568 break;
7569 case MINUS_EXPR:
7570 /* y - y = 0 without overflow. */
7571 if (operand_equal_p (op0, op1, 0))
7572 return build_zero_cst (TREE_TYPE (arg0));
7573 break;
7574 default:
7575 break;
7578 tree res
7579 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
7580 if (res
7581 && TREE_CODE (res) == INTEGER_CST
7582 && !TREE_OVERFLOW (res))
7583 return res;
7584 return NULL_TREE;
7587 fn = (*valueize) (gimple_call_fn (stmt));
7588 if (TREE_CODE (fn) == ADDR_EXPR
7589 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
7590 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
7591 && gimple_builtin_call_types_compatible_p (stmt,
7592 TREE_OPERAND (fn, 0)))
7594 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
7595 tree retval;
7596 unsigned i;
7597 for (i = 0; i < gimple_call_num_args (stmt); ++i)
7598 args[i] = (*valueize) (gimple_call_arg (stmt, i));
7599 retval = fold_builtin_call_array (loc,
7600 gimple_call_return_type (call_stmt),
7601 fn, gimple_call_num_args (stmt), args);
7602 if (retval)
7604 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7605 STRIP_NOPS (retval);
7606 retval = fold_convert (gimple_call_return_type (call_stmt),
7607 retval);
7609 return retval;
7611 return NULL_TREE;
7614 default:
7615 return NULL_TREE;
7619 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7620 Returns NULL_TREE if folding to a constant is not possible, otherwise
7621 returns a constant according to is_gimple_min_invariant. */
7623 tree
7624 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
7626 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
7627 if (res && is_gimple_min_invariant (res))
7628 return res;
7629 return NULL_TREE;
7633 /* The following set of functions are supposed to fold references using
7634 their constant initializers. */
7636 /* See if we can find constructor defining value of BASE.
7637 When we know the consructor with constant offset (such as
7638 base is array[40] and we do know constructor of array), then
7639 BIT_OFFSET is adjusted accordingly.
7641 As a special case, return error_mark_node when constructor
7642 is not explicitly available, but it is known to be zero
7643 such as 'static const int a;'. */
7644 static tree
7645 get_base_constructor (tree base, poly_int64_pod *bit_offset,
7646 tree (*valueize)(tree))
7648 poly_int64 bit_offset2, size, max_size;
7649 bool reverse;
7651 if (TREE_CODE (base) == MEM_REF)
7653 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
7654 if (!boff.to_shwi (bit_offset))
7655 return NULL_TREE;
7657 if (valueize
7658 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
7659 base = valueize (TREE_OPERAND (base, 0));
7660 if (!base || TREE_CODE (base) != ADDR_EXPR)
7661 return NULL_TREE;
7662 base = TREE_OPERAND (base, 0);
7664 else if (valueize
7665 && TREE_CODE (base) == SSA_NAME)
7666 base = valueize (base);
7668 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7669 DECL_INITIAL. If BASE is a nested reference into another
7670 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7671 the inner reference. */
7672 switch (TREE_CODE (base))
7674 case VAR_DECL:
7675 case CONST_DECL:
7677 tree init = ctor_for_folding (base);
7679 /* Our semantic is exact opposite of ctor_for_folding;
7680 NULL means unknown, while error_mark_node is 0. */
7681 if (init == error_mark_node)
7682 return NULL_TREE;
7683 if (!init)
7684 return error_mark_node;
7685 return init;
7688 case VIEW_CONVERT_EXPR:
7689 return get_base_constructor (TREE_OPERAND (base, 0),
7690 bit_offset, valueize);
7692 case ARRAY_REF:
7693 case COMPONENT_REF:
7694 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
7695 &reverse);
7696 if (!known_size_p (max_size) || maybe_ne (size, max_size))
7697 return NULL_TREE;
7698 *bit_offset += bit_offset2;
7699 return get_base_constructor (base, bit_offset, valueize);
7701 case CONSTRUCTOR:
7702 return base;
7704 default:
7705 if (CONSTANT_CLASS_P (base))
7706 return base;
7708 return NULL_TREE;
7712 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7713 to the memory at bit OFFSET. When non-null, TYPE is the expected
7714 type of the reference; otherwise the type of the referenced element
7715 is used instead. When SIZE is zero, attempt to fold a reference to
7716 the entire element which OFFSET refers to. Increment *SUBOFF by
7717 the bit offset of the accessed element. */
7719 static tree
7720 fold_array_ctor_reference (tree type, tree ctor,
7721 unsigned HOST_WIDE_INT offset,
7722 unsigned HOST_WIDE_INT size,
7723 tree from_decl,
7724 unsigned HOST_WIDE_INT *suboff)
7726 offset_int low_bound;
7727 offset_int elt_size;
7728 offset_int access_index;
7729 tree domain_type = NULL_TREE;
7730 HOST_WIDE_INT inner_offset;
7732 /* Compute low bound and elt size. */
7733 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
7734 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
7735 if (domain_type && TYPE_MIN_VALUE (domain_type))
7737 /* Static constructors for variably sized objects make no sense. */
7738 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
7739 return NULL_TREE;
7740 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
7742 else
7743 low_bound = 0;
7744 /* Static constructors for variably sized objects make no sense. */
7745 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
7746 return NULL_TREE;
7747 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
7749 /* When TYPE is non-null, verify that it specifies a constant-sized
7750 access of a multiple of the array element size. Avoid division
7751 by zero below when ELT_SIZE is zero, such as with the result of
7752 an initializer for a zero-length array or an empty struct. */
7753 if (elt_size == 0
7754 || (type
7755 && (!TYPE_SIZE_UNIT (type)
7756 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
7757 return NULL_TREE;
7759 /* Compute the array index we look for. */
7760 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
7761 elt_size);
7762 access_index += low_bound;
7764 /* And offset within the access. */
7765 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
7767 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
7768 if (size > elt_sz * BITS_PER_UNIT)
7770 /* native_encode_expr constraints. */
7771 if (size > MAX_BITSIZE_MODE_ANY_MODE
7772 || size % BITS_PER_UNIT != 0
7773 || inner_offset % BITS_PER_UNIT != 0
7774 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
7775 return NULL_TREE;
7777 unsigned ctor_idx;
7778 tree val = get_array_ctor_element_at_index (ctor, access_index,
7779 &ctor_idx);
7780 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7781 return build_zero_cst (type);
7783 /* native-encode adjacent ctor elements. */
7784 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7785 unsigned bufoff = 0;
7786 offset_int index = 0;
7787 offset_int max_index = access_index;
7788 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7789 if (!val)
7790 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7791 else if (!CONSTANT_CLASS_P (val))
7792 return NULL_TREE;
7793 if (!elt->index)
7795 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7797 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7798 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7800 else
7801 index = max_index = wi::to_offset (elt->index);
7802 index = wi::umax (index, access_index);
7805 if (bufoff + elt_sz > sizeof (buf))
7806 elt_sz = sizeof (buf) - bufoff;
7807 int len = native_encode_expr (val, buf + bufoff, elt_sz,
7808 inner_offset / BITS_PER_UNIT);
7809 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
7810 return NULL_TREE;
7811 inner_offset = 0;
7812 bufoff += len;
7814 access_index += 1;
7815 if (wi::cmpu (access_index, index) == 0)
7816 val = elt->value;
7817 else if (wi::cmpu (access_index, max_index) > 0)
7819 ctor_idx++;
7820 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7822 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7823 ++max_index;
7825 else
7827 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7828 index = 0;
7829 max_index = access_index;
7830 if (!elt->index)
7832 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7834 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7835 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7837 else
7838 index = max_index = wi::to_offset (elt->index);
7839 index = wi::umax (index, access_index);
7840 if (wi::cmpu (access_index, index) == 0)
7841 val = elt->value;
7842 else
7843 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7847 while (bufoff < size / BITS_PER_UNIT);
7848 *suboff += size;
7849 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
7852 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
7854 if (!size && TREE_CODE (val) != CONSTRUCTOR)
7856 /* For the final reference to the entire accessed element
7857 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7858 may be null) in favor of the type of the element, and set
7859 SIZE to the size of the accessed element. */
7860 inner_offset = 0;
7861 type = TREE_TYPE (val);
7862 size = elt_sz * BITS_PER_UNIT;
7864 else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
7865 && TREE_CODE (val) == CONSTRUCTOR
7866 && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
7867 /* If this isn't the last element in the CTOR and a CTOR itself
7868 and it does not cover the whole object we are requesting give up
7869 since we're not set up for combining from multiple CTORs. */
7870 return NULL_TREE;
7872 *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
7873 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
7874 suboff);
7877 /* Memory not explicitly mentioned in constructor is 0 (or
7878 the reference is out of range). */
7879 return type ? build_zero_cst (type) : NULL_TREE;
7882 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7883 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7884 is the expected type of the reference; otherwise the type of
7885 the referenced member is used instead. When SIZE is zero,
7886 attempt to fold a reference to the entire member which OFFSET
7887 refers to; in this case. Increment *SUBOFF by the bit offset
7888 of the accessed member. */
7890 static tree
7891 fold_nonarray_ctor_reference (tree type, tree ctor,
7892 unsigned HOST_WIDE_INT offset,
7893 unsigned HOST_WIDE_INT size,
7894 tree from_decl,
7895 unsigned HOST_WIDE_INT *suboff)
7897 unsigned HOST_WIDE_INT cnt;
7898 tree cfield, cval;
7900 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
7901 cval)
7903 tree byte_offset = DECL_FIELD_OFFSET (cfield);
7904 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
7905 tree field_size = DECL_SIZE (cfield);
7907 if (!field_size)
7909 /* Determine the size of the flexible array member from
7910 the size of the initializer provided for it. */
7911 field_size = TYPE_SIZE (TREE_TYPE (cval));
7914 /* Variable sized objects in static constructors makes no sense,
7915 but field_size can be NULL for flexible array members. */
7916 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
7917 && TREE_CODE (byte_offset) == INTEGER_CST
7918 && (field_size != NULL_TREE
7919 ? TREE_CODE (field_size) == INTEGER_CST
7920 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
7922 /* Compute bit offset of the field. */
7923 offset_int bitoffset
7924 = (wi::to_offset (field_offset)
7925 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
7926 /* Compute bit offset where the field ends. */
7927 offset_int bitoffset_end;
7928 if (field_size != NULL_TREE)
7929 bitoffset_end = bitoffset + wi::to_offset (field_size);
7930 else
7931 bitoffset_end = 0;
7933 /* Compute the bit offset of the end of the desired access.
7934 As a special case, if the size of the desired access is
7935 zero, assume the access is to the entire field (and let
7936 the caller make any necessary adjustments by storing
7937 the actual bounds of the field in FIELDBOUNDS). */
7938 offset_int access_end = offset_int (offset);
7939 if (size)
7940 access_end += size;
7941 else
7942 access_end = bitoffset_end;
7944 /* Is there any overlap between the desired access at
7945 [OFFSET, OFFSET+SIZE) and the offset of the field within
7946 the object at [BITOFFSET, BITOFFSET_END)? */
7947 if (wi::cmps (access_end, bitoffset) > 0
7948 && (field_size == NULL_TREE
7949 || wi::lts_p (offset, bitoffset_end)))
7951 *suboff += bitoffset.to_uhwi ();
7953 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
7955 /* For the final reference to the entire accessed member
7956 (SIZE is zero), reset OFFSET, disegard TYPE (which may
7957 be null) in favor of the type of the member, and set
7958 SIZE to the size of the accessed member. */
7959 offset = bitoffset.to_uhwi ();
7960 type = TREE_TYPE (cval);
7961 size = (bitoffset_end - bitoffset).to_uhwi ();
7964 /* We do have overlap. Now see if the field is large enough
7965 to cover the access. Give up for accesses that extend
7966 beyond the end of the object or that span multiple fields. */
7967 if (wi::cmps (access_end, bitoffset_end) > 0)
7968 return NULL_TREE;
7969 if (offset < bitoffset)
7970 return NULL_TREE;
7972 offset_int inner_offset = offset_int (offset) - bitoffset;
7973 return fold_ctor_reference (type, cval,
7974 inner_offset.to_uhwi (), size,
7975 from_decl, suboff);
7979 if (!type)
7980 return NULL_TREE;
7982 return build_zero_cst (type);
7985 /* CTOR is value initializing memory. Fold a reference of TYPE and
7986 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
7987 is zero, attempt to fold a reference to the entire subobject
7988 which OFFSET refers to. This is used when folding accesses to
7989 string members of aggregates. When non-null, set *SUBOFF to
7990 the bit offset of the accessed subobject. */
7992 tree
7993 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
7994 const poly_uint64 &poly_size, tree from_decl,
7995 unsigned HOST_WIDE_INT *suboff /* = NULL */)
7997 tree ret;
7999 /* We found the field with exact match. */
8000 if (type
8001 && useless_type_conversion_p (type, TREE_TYPE (ctor))
8002 && known_eq (poly_offset, 0U))
8003 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
8005 /* The remaining optimizations need a constant size and offset. */
8006 unsigned HOST_WIDE_INT size, offset;
8007 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
8008 return NULL_TREE;
8010 /* We are at the end of walk, see if we can view convert the
8011 result. */
8012 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
8013 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
8014 && !compare_tree_int (TYPE_SIZE (type), size)
8015 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
8017 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
8018 if (ret)
8020 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
8021 if (ret)
8022 STRIP_USELESS_TYPE_CONVERSION (ret);
8024 return ret;
8026 /* For constants and byte-aligned/sized reads try to go through
8027 native_encode/interpret. */
8028 if (CONSTANT_CLASS_P (ctor)
8029 && BITS_PER_UNIT == 8
8030 && offset % BITS_PER_UNIT == 0
8031 && offset / BITS_PER_UNIT <= INT_MAX
8032 && size % BITS_PER_UNIT == 0
8033 && size <= MAX_BITSIZE_MODE_ANY_MODE
8034 && can_native_interpret_type_p (type))
8036 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8037 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
8038 offset / BITS_PER_UNIT);
8039 if (len > 0)
8040 return native_interpret_expr (type, buf, len);
8042 if (TREE_CODE (ctor) == CONSTRUCTOR)
8044 unsigned HOST_WIDE_INT dummy = 0;
8045 if (!suboff)
8046 suboff = &dummy;
8048 tree ret;
8049 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
8050 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
8051 ret = fold_array_ctor_reference (type, ctor, offset, size,
8052 from_decl, suboff);
8053 else
8054 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
8055 from_decl, suboff);
8057 /* Fall back to native_encode_initializer. Needs to be done
8058 only in the outermost fold_ctor_reference call (because it itself
8059 recurses into CONSTRUCTORs) and doesn't update suboff. */
8060 if (ret == NULL_TREE
8061 && suboff == &dummy
8062 && BITS_PER_UNIT == 8
8063 && offset % BITS_PER_UNIT == 0
8064 && offset / BITS_PER_UNIT <= INT_MAX
8065 && size % BITS_PER_UNIT == 0
8066 && size <= MAX_BITSIZE_MODE_ANY_MODE
8067 && can_native_interpret_type_p (type))
8069 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8070 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
8071 offset / BITS_PER_UNIT);
8072 if (len > 0)
8073 return native_interpret_expr (type, buf, len);
8076 return ret;
8079 return NULL_TREE;
8082 /* Return the tree representing the element referenced by T if T is an
8083 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8084 names using VALUEIZE. Return NULL_TREE otherwise. */
8086 tree
8087 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
8089 tree ctor, idx, base;
8090 poly_int64 offset, size, max_size;
8091 tree tem;
8092 bool reverse;
8094 if (TREE_THIS_VOLATILE (t))
8095 return NULL_TREE;
8097 if (DECL_P (t))
8098 return get_symbol_constant_value (t);
8100 tem = fold_read_from_constant_string (t);
8101 if (tem)
8102 return tem;
8104 switch (TREE_CODE (t))
8106 case ARRAY_REF:
8107 case ARRAY_RANGE_REF:
8108 /* Constant indexes are handled well by get_base_constructor.
8109 Only special case variable offsets.
8110 FIXME: This code can't handle nested references with variable indexes
8111 (they will be handled only by iteration of ccp). Perhaps we can bring
8112 get_ref_base_and_extent here and make it use a valueize callback. */
8113 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
8114 && valueize
8115 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
8116 && poly_int_tree_p (idx))
8118 tree low_bound, unit_size;
8120 /* If the resulting bit-offset is constant, track it. */
8121 if ((low_bound = array_ref_low_bound (t),
8122 poly_int_tree_p (low_bound))
8123 && (unit_size = array_ref_element_size (t),
8124 tree_fits_uhwi_p (unit_size)))
8126 poly_offset_int woffset
8127 = wi::sext (wi::to_poly_offset (idx)
8128 - wi::to_poly_offset (low_bound),
8129 TYPE_PRECISION (sizetype));
8130 woffset *= tree_to_uhwi (unit_size);
8131 woffset *= BITS_PER_UNIT;
8132 if (woffset.to_shwi (&offset))
8134 base = TREE_OPERAND (t, 0);
8135 ctor = get_base_constructor (base, &offset, valueize);
8136 /* Empty constructor. Always fold to 0. */
8137 if (ctor == error_mark_node)
8138 return build_zero_cst (TREE_TYPE (t));
8139 /* Out of bound array access. Value is undefined,
8140 but don't fold. */
8141 if (maybe_lt (offset, 0))
8142 return NULL_TREE;
8143 /* We cannot determine ctor. */
8144 if (!ctor)
8145 return NULL_TREE;
8146 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
8147 tree_to_uhwi (unit_size)
8148 * BITS_PER_UNIT,
8149 base);
8153 /* Fallthru. */
8155 case COMPONENT_REF:
8156 case BIT_FIELD_REF:
8157 case TARGET_MEM_REF:
8158 case MEM_REF:
8159 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
8160 ctor = get_base_constructor (base, &offset, valueize);
8162 /* Empty constructor. Always fold to 0. */
8163 if (ctor == error_mark_node)
8164 return build_zero_cst (TREE_TYPE (t));
8165 /* We do not know precise address. */
8166 if (!known_size_p (max_size) || maybe_ne (max_size, size))
8167 return NULL_TREE;
8168 /* We cannot determine ctor. */
8169 if (!ctor)
8170 return NULL_TREE;
8172 /* Out of bound array access. Value is undefined, but don't fold. */
8173 if (maybe_lt (offset, 0))
8174 return NULL_TREE;
8176 tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
8177 if (tem)
8178 return tem;
8180 /* For bit field reads try to read the representative and
8181 adjust. */
8182 if (TREE_CODE (t) == COMPONENT_REF
8183 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
8184 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
8186 HOST_WIDE_INT csize, coffset;
8187 tree field = TREE_OPERAND (t, 1);
8188 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8189 if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
8190 && size.is_constant (&csize)
8191 && offset.is_constant (&coffset)
8192 && (coffset % BITS_PER_UNIT != 0
8193 || csize % BITS_PER_UNIT != 0)
8194 && !reverse
8195 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
8197 poly_int64 bitoffset;
8198 poly_uint64 field_offset, repr_offset;
8199 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8200 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
8201 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
8202 else
8203 bitoffset = 0;
8204 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8205 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
8206 HOST_WIDE_INT bitoff;
8207 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8208 - TYPE_PRECISION (TREE_TYPE (field)));
8209 if (bitoffset.is_constant (&bitoff)
8210 && bitoff >= 0
8211 && bitoff <= diff)
8213 offset -= bitoff;
8214 size = tree_to_uhwi (DECL_SIZE (repr));
8216 tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
8217 size, base);
8218 if (tem && TREE_CODE (tem) == INTEGER_CST)
8220 if (!BYTES_BIG_ENDIAN)
8221 tem = wide_int_to_tree (TREE_TYPE (field),
8222 wi::lrshift (wi::to_wide (tem),
8223 bitoff));
8224 else
8225 tem = wide_int_to_tree (TREE_TYPE (field),
8226 wi::lrshift (wi::to_wide (tem),
8227 diff - bitoff));
8228 return tem;
8233 break;
8235 case REALPART_EXPR:
8236 case IMAGPART_EXPR:
8238 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
8239 if (c && TREE_CODE (c) == COMPLEX_CST)
8240 return fold_build1_loc (EXPR_LOCATION (t),
8241 TREE_CODE (t), TREE_TYPE (t), c);
8242 break;
8245 default:
8246 break;
8249 return NULL_TREE;
8252 tree
8253 fold_const_aggregate_ref (tree t)
8255 return fold_const_aggregate_ref_1 (t, NULL);
8258 /* Lookup virtual method with index TOKEN in a virtual table V
8259 at OFFSET.
8260 Set CAN_REFER if non-NULL to false if method
8261 is not referable or if the virtual table is ill-formed (such as rewriten
8262 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8264 tree
8265 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
8266 tree v,
8267 unsigned HOST_WIDE_INT offset,
8268 bool *can_refer)
8270 tree vtable = v, init, fn;
8271 unsigned HOST_WIDE_INT size;
8272 unsigned HOST_WIDE_INT elt_size, access_index;
8273 tree domain_type;
8275 if (can_refer)
8276 *can_refer = true;
8278 /* First of all double check we have virtual table. */
8279 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
8281 /* Pass down that we lost track of the target. */
8282 if (can_refer)
8283 *can_refer = false;
8284 return NULL_TREE;
8287 init = ctor_for_folding (v);
8289 /* The virtual tables should always be born with constructors
8290 and we always should assume that they are avaialble for
8291 folding. At the moment we do not stream them in all cases,
8292 but it should never happen that ctor seem unreachable. */
8293 gcc_assert (init);
8294 if (init == error_mark_node)
8296 /* Pass down that we lost track of the target. */
8297 if (can_refer)
8298 *can_refer = false;
8299 return NULL_TREE;
8301 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
8302 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
8303 offset *= BITS_PER_UNIT;
8304 offset += token * size;
8306 /* Lookup the value in the constructor that is assumed to be array.
8307 This is equivalent to
8308 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8309 offset, size, NULL);
8310 but in a constant time. We expect that frontend produced a simple
8311 array without indexed initializers. */
8313 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
8314 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
8315 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
8316 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
8318 access_index = offset / BITS_PER_UNIT / elt_size;
8319 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
8321 /* The C++ FE can now produce indexed fields, and we check if the indexes
8322 match. */
8323 if (access_index < CONSTRUCTOR_NELTS (init))
8325 fn = CONSTRUCTOR_ELT (init, access_index)->value;
8326 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
8327 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8328 STRIP_NOPS (fn);
8330 else
8331 fn = NULL;
8333 /* For type inconsistent program we may end up looking up virtual method
8334 in virtual table that does not contain TOKEN entries. We may overrun
8335 the virtual table and pick up a constant or RTTI info pointer.
8336 In any case the call is undefined. */
8337 if (!fn
8338 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
8339 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
8340 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
8341 else
8343 fn = TREE_OPERAND (fn, 0);
8345 /* When cgraph node is missing and function is not public, we cannot
8346 devirtualize. This can happen in WHOPR when the actual method
8347 ends up in other partition, because we found devirtualization
8348 possibility too late. */
8349 if (!can_refer_decl_in_current_unit_p (fn, vtable))
8351 if (can_refer)
8353 *can_refer = false;
8354 return fn;
8356 return NULL_TREE;
8360 /* Make sure we create a cgraph node for functions we'll reference.
8361 They can be non-existent if the reference comes from an entry
8362 of an external vtable for example. */
8363 cgraph_node::get_create (fn);
8365 return fn;
8368 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8369 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8370 KNOWN_BINFO carries the binfo describing the true type of
8371 OBJ_TYPE_REF_OBJECT(REF).
8372 Set CAN_REFER if non-NULL to false if method
8373 is not referable or if the virtual table is ill-formed (such as rewriten
8374 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8376 tree
8377 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
8378 bool *can_refer)
8380 unsigned HOST_WIDE_INT offset;
8381 tree v;
8383 v = BINFO_VTABLE (known_binfo);
8384 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8385 if (!v)
8386 return NULL_TREE;
8388 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
8390 if (can_refer)
8391 *can_refer = false;
8392 return NULL_TREE;
8394 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
8397 /* Given a pointer value T, return a simplified version of an
8398 indirection through T, or NULL_TREE if no simplification is
8399 possible. Note that the resulting type may be different from
8400 the type pointed to in the sense that it is still compatible
8401 from the langhooks point of view. */
8403 tree
8404 gimple_fold_indirect_ref (tree t)
8406 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
8407 tree sub = t;
8408 tree subtype;
8410 STRIP_NOPS (sub);
8411 subtype = TREE_TYPE (sub);
8412 if (!POINTER_TYPE_P (subtype)
8413 || TYPE_REF_CAN_ALIAS_ALL (ptype))
8414 return NULL_TREE;
8416 if (TREE_CODE (sub) == ADDR_EXPR)
8418 tree op = TREE_OPERAND (sub, 0);
8419 tree optype = TREE_TYPE (op);
8420 /* *&p => p */
8421 if (useless_type_conversion_p (type, optype))
8422 return op;
8424 /* *(foo *)&fooarray => fooarray[0] */
8425 if (TREE_CODE (optype) == ARRAY_TYPE
8426 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
8427 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8429 tree type_domain = TYPE_DOMAIN (optype);
8430 tree min_val = size_zero_node;
8431 if (type_domain && TYPE_MIN_VALUE (type_domain))
8432 min_val = TYPE_MIN_VALUE (type_domain);
8433 if (TREE_CODE (min_val) == INTEGER_CST)
8434 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
8436 /* *(foo *)&complexfoo => __real__ complexfoo */
8437 else if (TREE_CODE (optype) == COMPLEX_TYPE
8438 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8439 return fold_build1 (REALPART_EXPR, type, op);
8440 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8441 else if (TREE_CODE (optype) == VECTOR_TYPE
8442 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8444 tree part_width = TYPE_SIZE (type);
8445 tree index = bitsize_int (0);
8446 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
8450 /* *(p + CST) -> ... */
8451 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
8452 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
8454 tree addr = TREE_OPERAND (sub, 0);
8455 tree off = TREE_OPERAND (sub, 1);
8456 tree addrtype;
8458 STRIP_NOPS (addr);
8459 addrtype = TREE_TYPE (addr);
8461 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8462 if (TREE_CODE (addr) == ADDR_EXPR
8463 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
8464 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
8465 && tree_fits_uhwi_p (off))
8467 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
8468 tree part_width = TYPE_SIZE (type);
8469 unsigned HOST_WIDE_INT part_widthi
8470 = tree_to_shwi (part_width) / BITS_PER_UNIT;
8471 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
8472 tree index = bitsize_int (indexi);
8473 if (known_lt (offset / part_widthi,
8474 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
8475 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
8476 part_width, index);
8479 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8480 if (TREE_CODE (addr) == ADDR_EXPR
8481 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
8482 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
8484 tree size = TYPE_SIZE_UNIT (type);
8485 if (tree_int_cst_equal (size, off))
8486 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
8489 /* *(p + CST) -> MEM_REF <p, CST>. */
8490 if (TREE_CODE (addr) != ADDR_EXPR
8491 || DECL_P (TREE_OPERAND (addr, 0)))
8492 return fold_build2 (MEM_REF, type,
8493 addr,
8494 wide_int_to_tree (ptype, wi::to_wide (off)));
8497 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8498 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
8499 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
8500 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
8502 tree type_domain;
8503 tree min_val = size_zero_node;
8504 tree osub = sub;
8505 sub = gimple_fold_indirect_ref (sub);
8506 if (! sub)
8507 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
8508 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
8509 if (type_domain && TYPE_MIN_VALUE (type_domain))
8510 min_val = TYPE_MIN_VALUE (type_domain);
8511 if (TREE_CODE (min_val) == INTEGER_CST)
8512 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
8515 return NULL_TREE;
8518 /* Return true if CODE is an operation that when operating on signed
8519 integer types involves undefined behavior on overflow and the
8520 operation can be expressed with unsigned arithmetic. */
8522 bool
8523 arith_code_with_undefined_signed_overflow (tree_code code)
8525 switch (code)
8527 case ABS_EXPR:
8528 case PLUS_EXPR:
8529 case MINUS_EXPR:
8530 case MULT_EXPR:
8531 case NEGATE_EXPR:
8532 case POINTER_PLUS_EXPR:
8533 return true;
8534 default:
8535 return false;
8539 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8540 operation that can be transformed to unsigned arithmetic by converting
8541 its operand, carrying out the operation in the corresponding unsigned
8542 type and converting the result back to the original type.
8544 Returns a sequence of statements that replace STMT and also contain
8545 a modified form of STMT itself. */
8547 gimple_seq
8548 rewrite_to_defined_overflow (gimple *stmt)
8550 if (dump_file && (dump_flags & TDF_DETAILS))
8552 fprintf (dump_file, "rewriting stmt with undefined signed "
8553 "overflow ");
8554 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
8557 tree lhs = gimple_assign_lhs (stmt);
8558 tree type = unsigned_type_for (TREE_TYPE (lhs));
8559 gimple_seq stmts = NULL;
8560 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
8561 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
8562 else
8563 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
8565 tree op = gimple_op (stmt, i);
8566 op = gimple_convert (&stmts, type, op);
8567 gimple_set_op (stmt, i, op);
8569 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
8570 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
8571 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
8572 gimple_set_modified (stmt, true);
8573 gimple_seq_add_stmt (&stmts, stmt);
8574 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
8575 gimple_seq_add_stmt (&stmts, cvt);
8577 return stmts;
8581 /* The valueization hook we use for the gimple_build API simplification.
8582 This makes us match fold_buildN behavior by only combining with
8583 statements in the sequence(s) we are currently building. */
8585 static tree
8586 gimple_build_valueize (tree op)
8588 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
8589 return op;
8590 return NULL_TREE;
8593 /* Build the expression CODE OP0 of type TYPE with location LOC,
8594 simplifying it first if possible. Returns the built
8595 expression value and appends statements possibly defining it
8596 to SEQ. */
8598 tree
8599 gimple_build (gimple_seq *seq, location_t loc,
8600 enum tree_code code, tree type, tree op0)
8602 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
8603 if (!res)
8605 res = create_tmp_reg_or_ssa_name (type);
8606 gimple *stmt;
8607 if (code == REALPART_EXPR
8608 || code == IMAGPART_EXPR
8609 || code == VIEW_CONVERT_EXPR)
8610 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
8611 else
8612 stmt = gimple_build_assign (res, code, op0);
8613 gimple_set_location (stmt, loc);
8614 gimple_seq_add_stmt_without_update (seq, stmt);
8616 return res;
8619 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8620 simplifying it first if possible. Returns the built
8621 expression value and appends statements possibly defining it
8622 to SEQ. */
8624 tree
8625 gimple_build (gimple_seq *seq, location_t loc,
8626 enum tree_code code, tree type, tree op0, tree op1)
8628 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
8629 if (!res)
8631 res = create_tmp_reg_or_ssa_name (type);
8632 gimple *stmt = gimple_build_assign (res, code, op0, op1);
8633 gimple_set_location (stmt, loc);
8634 gimple_seq_add_stmt_without_update (seq, stmt);
8636 return res;
8639 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8640 simplifying it first if possible. Returns the built
8641 expression value and appends statements possibly defining it
8642 to SEQ. */
8644 tree
8645 gimple_build (gimple_seq *seq, location_t loc,
8646 enum tree_code code, tree type, tree op0, tree op1, tree op2)
8648 tree res = gimple_simplify (code, type, op0, op1, op2,
8649 seq, gimple_build_valueize);
8650 if (!res)
8652 res = create_tmp_reg_or_ssa_name (type);
8653 gimple *stmt;
8654 if (code == BIT_FIELD_REF)
8655 stmt = gimple_build_assign (res, code,
8656 build3 (code, type, op0, op1, op2));
8657 else
8658 stmt = gimple_build_assign (res, code, op0, op1, op2);
8659 gimple_set_location (stmt, loc);
8660 gimple_seq_add_stmt_without_update (seq, stmt);
8662 return res;
8665 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8666 void) with a location LOC. Returns the built expression value (or NULL_TREE
8667 if TYPE is void) and appends statements possibly defining it to SEQ. */
8669 tree
8670 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn, tree type)
8672 tree res = NULL_TREE;
8673 gcall *stmt;
8674 if (internal_fn_p (fn))
8675 stmt = gimple_build_call_internal (as_internal_fn (fn), 0);
8676 else
8678 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8679 stmt = gimple_build_call (decl, 0);
8681 if (!VOID_TYPE_P (type))
8683 res = create_tmp_reg_or_ssa_name (type);
8684 gimple_call_set_lhs (stmt, res);
8686 gimple_set_location (stmt, loc);
8687 gimple_seq_add_stmt_without_update (seq, stmt);
8688 return res;
8691 /* Build the call FN (ARG0) with a result of type TYPE
8692 (or no result if TYPE is void) with location LOC,
8693 simplifying it first if possible. Returns the built
8694 expression value (or NULL_TREE if TYPE is void) and appends
8695 statements possibly defining it to SEQ. */
8697 tree
8698 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8699 tree type, tree arg0)
8701 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
8702 if (!res)
8704 gcall *stmt;
8705 if (internal_fn_p (fn))
8706 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
8707 else
8709 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8710 stmt = gimple_build_call (decl, 1, arg0);
8712 if (!VOID_TYPE_P (type))
8714 res = create_tmp_reg_or_ssa_name (type);
8715 gimple_call_set_lhs (stmt, res);
8717 gimple_set_location (stmt, loc);
8718 gimple_seq_add_stmt_without_update (seq, stmt);
8720 return res;
8723 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
8724 (or no result if TYPE is void) with location LOC,
8725 simplifying it first if possible. Returns the built
8726 expression value (or NULL_TREE if TYPE is void) and appends
8727 statements possibly defining it to SEQ. */
8729 tree
8730 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8731 tree type, tree arg0, tree arg1)
8733 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
8734 if (!res)
8736 gcall *stmt;
8737 if (internal_fn_p (fn))
8738 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
8739 else
8741 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8742 stmt = gimple_build_call (decl, 2, arg0, arg1);
8744 if (!VOID_TYPE_P (type))
8746 res = create_tmp_reg_or_ssa_name (type);
8747 gimple_call_set_lhs (stmt, res);
8749 gimple_set_location (stmt, loc);
8750 gimple_seq_add_stmt_without_update (seq, stmt);
8752 return res;
8755 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8756 (or no result if TYPE is void) with location LOC,
8757 simplifying it first if possible. Returns the built
8758 expression value (or NULL_TREE if TYPE is void) and appends
8759 statements possibly defining it to SEQ. */
8761 tree
8762 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8763 tree type, tree arg0, tree arg1, tree arg2)
8765 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
8766 seq, gimple_build_valueize);
8767 if (!res)
8769 gcall *stmt;
8770 if (internal_fn_p (fn))
8771 stmt = gimple_build_call_internal (as_internal_fn (fn),
8772 3, arg0, arg1, arg2);
8773 else
8775 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8776 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
8778 if (!VOID_TYPE_P (type))
8780 res = create_tmp_reg_or_ssa_name (type);
8781 gimple_call_set_lhs (stmt, res);
8783 gimple_set_location (stmt, loc);
8784 gimple_seq_add_stmt_without_update (seq, stmt);
8786 return res;
8789 /* Build the conversion (TYPE) OP with a result of type TYPE
8790 with location LOC if such conversion is neccesary in GIMPLE,
8791 simplifying it first.
8792 Returns the built expression value and appends
8793 statements possibly defining it to SEQ. */
8795 tree
8796 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
8798 if (useless_type_conversion_p (type, TREE_TYPE (op)))
8799 return op;
8800 return gimple_build (seq, loc, NOP_EXPR, type, op);
8803 /* Build the conversion (ptrofftype) OP with a result of a type
8804 compatible with ptrofftype with location LOC if such conversion
8805 is neccesary in GIMPLE, simplifying it first.
8806 Returns the built expression value and appends
8807 statements possibly defining it to SEQ. */
8809 tree
8810 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
8812 if (ptrofftype_p (TREE_TYPE (op)))
8813 return op;
8814 return gimple_convert (seq, loc, sizetype, op);
8817 /* Build a vector of type TYPE in which each element has the value OP.
8818 Return a gimple value for the result, appending any new statements
8819 to SEQ. */
8821 tree
8822 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
8823 tree op)
8825 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
8826 && !CONSTANT_CLASS_P (op))
8827 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
8829 tree res, vec = build_vector_from_val (type, op);
8830 if (is_gimple_val (vec))
8831 return vec;
8832 if (gimple_in_ssa_p (cfun))
8833 res = make_ssa_name (type);
8834 else
8835 res = create_tmp_reg (type);
8836 gimple *stmt = gimple_build_assign (res, vec);
8837 gimple_set_location (stmt, loc);
8838 gimple_seq_add_stmt_without_update (seq, stmt);
8839 return res;
8842 /* Build a vector from BUILDER, handling the case in which some elements
8843 are non-constant. Return a gimple value for the result, appending any
8844 new instructions to SEQ.
8846 BUILDER must not have a stepped encoding on entry. This is because
8847 the function is not geared up to handle the arithmetic that would
8848 be needed in the variable case, and any code building a vector that
8849 is known to be constant should use BUILDER->build () directly. */
8851 tree
8852 gimple_build_vector (gimple_seq *seq, location_t loc,
8853 tree_vector_builder *builder)
8855 gcc_assert (builder->nelts_per_pattern () <= 2);
8856 unsigned int encoded_nelts = builder->encoded_nelts ();
8857 for (unsigned int i = 0; i < encoded_nelts; ++i)
8858 if (!CONSTANT_CLASS_P ((*builder)[i]))
8860 tree type = builder->type ();
8861 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
8862 vec<constructor_elt, va_gc> *v;
8863 vec_alloc (v, nelts);
8864 for (i = 0; i < nelts; ++i)
8865 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
8867 tree res;
8868 if (gimple_in_ssa_p (cfun))
8869 res = make_ssa_name (type);
8870 else
8871 res = create_tmp_reg (type);
8872 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
8873 gimple_set_location (stmt, loc);
8874 gimple_seq_add_stmt_without_update (seq, stmt);
8875 return res;
8877 return builder->build ();
8880 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
8881 and generate a value guaranteed to be rounded upwards to ALIGN.
8883 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
8885 tree
8886 gimple_build_round_up (gimple_seq *seq, location_t loc, tree type,
8887 tree old_size, unsigned HOST_WIDE_INT align)
8889 unsigned HOST_WIDE_INT tg_mask = align - 1;
8890 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
8891 gcc_assert (INTEGRAL_TYPE_P (type));
8892 tree tree_mask = build_int_cst (type, tg_mask);
8893 tree oversize = gimple_build (seq, loc, PLUS_EXPR, type, old_size,
8894 tree_mask);
8896 tree mask = build_int_cst (type, -align);
8897 return gimple_build (seq, loc, BIT_AND_EXPR, type, oversize, mask);
8900 /* Return true if the result of assignment STMT is known to be non-negative.
8901 If the return value is based on the assumption that signed overflow is
8902 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8903 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8905 static bool
8906 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8907 int depth)
8909 enum tree_code code = gimple_assign_rhs_code (stmt);
8910 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
8911 switch (get_gimple_rhs_class (code))
8913 case GIMPLE_UNARY_RHS:
8914 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
8915 type,
8916 gimple_assign_rhs1 (stmt),
8917 strict_overflow_p, depth);
8918 case GIMPLE_BINARY_RHS:
8919 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
8920 type,
8921 gimple_assign_rhs1 (stmt),
8922 gimple_assign_rhs2 (stmt),
8923 strict_overflow_p, depth);
8924 case GIMPLE_TERNARY_RHS:
8925 return false;
8926 case GIMPLE_SINGLE_RHS:
8927 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
8928 strict_overflow_p, depth);
8929 case GIMPLE_INVALID_RHS:
8930 break;
8932 gcc_unreachable ();
8935 /* Return true if return value of call STMT is known to be non-negative.
8936 If the return value is based on the assumption that signed overflow is
8937 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8938 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8940 static bool
8941 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8942 int depth)
8944 tree arg0 = gimple_call_num_args (stmt) > 0 ?
8945 gimple_call_arg (stmt, 0) : NULL_TREE;
8946 tree arg1 = gimple_call_num_args (stmt) > 1 ?
8947 gimple_call_arg (stmt, 1) : NULL_TREE;
8948 tree lhs = gimple_call_lhs (stmt);
8949 return (lhs
8950 && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs),
8951 gimple_call_combined_fn (stmt),
8952 arg0, arg1,
8953 strict_overflow_p, depth));
8956 /* Return true if return value of call STMT is known to be non-negative.
8957 If the return value is based on the assumption that signed overflow is
8958 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8959 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8961 static bool
8962 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8963 int depth)
8965 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
8967 tree arg = gimple_phi_arg_def (stmt, i);
8968 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
8969 return false;
8971 return true;
8974 /* Return true if STMT is known to compute a non-negative value.
8975 If the return value is based on the assumption that signed overflow is
8976 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8977 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8979 bool
8980 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8981 int depth)
8983 switch (gimple_code (stmt))
8985 case GIMPLE_ASSIGN:
8986 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
8987 depth);
8988 case GIMPLE_CALL:
8989 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
8990 depth);
8991 case GIMPLE_PHI:
8992 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
8993 depth);
8994 default:
8995 return false;
8999 /* Return true if the floating-point value computed by assignment STMT
9000 is known to have an integer value. We also allow +Inf, -Inf and NaN
9001 to be considered integer values. Return false for signaling NaN.
9003 DEPTH is the current nesting depth of the query. */
9005 static bool
9006 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
9008 enum tree_code code = gimple_assign_rhs_code (stmt);
9009 switch (get_gimple_rhs_class (code))
9011 case GIMPLE_UNARY_RHS:
9012 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
9013 gimple_assign_rhs1 (stmt), depth);
9014 case GIMPLE_BINARY_RHS:
9015 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
9016 gimple_assign_rhs1 (stmt),
9017 gimple_assign_rhs2 (stmt), depth);
9018 case GIMPLE_TERNARY_RHS:
9019 return false;
9020 case GIMPLE_SINGLE_RHS:
9021 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
9022 case GIMPLE_INVALID_RHS:
9023 break;
9025 gcc_unreachable ();
9028 /* Return true if the floating-point value computed by call STMT is known
9029 to have an integer value. We also allow +Inf, -Inf and NaN to be
9030 considered integer values. Return false for signaling NaN.
9032 DEPTH is the current nesting depth of the query. */
9034 static bool
9035 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
9037 tree arg0 = (gimple_call_num_args (stmt) > 0
9038 ? gimple_call_arg (stmt, 0)
9039 : NULL_TREE);
9040 tree arg1 = (gimple_call_num_args (stmt) > 1
9041 ? gimple_call_arg (stmt, 1)
9042 : NULL_TREE);
9043 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
9044 arg0, arg1, depth);
9047 /* Return true if the floating-point result of phi STMT is known to have
9048 an integer value. We also allow +Inf, -Inf and NaN to be considered
9049 integer values. Return false for signaling NaN.
9051 DEPTH is the current nesting depth of the query. */
9053 static bool
9054 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
9056 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9058 tree arg = gimple_phi_arg_def (stmt, i);
9059 if (!integer_valued_real_single_p (arg, depth + 1))
9060 return false;
9062 return true;
9065 /* Return true if the floating-point value computed by STMT is known
9066 to have an integer value. We also allow +Inf, -Inf and NaN to be
9067 considered integer values. Return false for signaling NaN.
9069 DEPTH is the current nesting depth of the query. */
9071 bool
9072 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
9074 switch (gimple_code (stmt))
9076 case GIMPLE_ASSIGN:
9077 return gimple_assign_integer_valued_real_p (stmt, depth);
9078 case GIMPLE_CALL:
9079 return gimple_call_integer_valued_real_p (stmt, depth);
9080 case GIMPLE_PHI:
9081 return gimple_phi_integer_valued_real_p (stmt, depth);
9082 default:
9083 return false;