[Ada] Do not perform useless work in Check_No_Parts_Violations
[official-gcc.git] / gcc / gimple-fold.c
blob1c0e930aba50ff8fc07ddcd756aca2834acbbabb
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2021 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
68 #include "varasm.h"
70 enum strlen_range_kind {
71 /* Compute the exact constant string length. */
72 SRK_STRLEN,
73 /* Compute the maximum constant string length. */
74 SRK_STRLENMAX,
75 /* Compute a range of string lengths bounded by object sizes. When
76 the length of a string cannot be determined, consider as the upper
77 bound the size of the enclosing object the string may be a member
78 or element of. Also determine the size of the largest character
79 array the string may refer to. */
80 SRK_LENRANGE,
81 /* Determine the integer value of the argument (not string length). */
82 SRK_INT_VALUE
85 static bool
86 get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
88 /* Return true when DECL can be referenced from current unit.
89 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
90 We can get declarations that are not possible to reference for various
91 reasons:
93 1) When analyzing C++ virtual tables.
94 C++ virtual tables do have known constructors even
95 when they are keyed to other compilation unit.
96 Those tables can contain pointers to methods and vars
97 in other units. Those methods have both STATIC and EXTERNAL
98 set.
99 2) In WHOPR mode devirtualization might lead to reference
100 to method that was partitioned elsehwere.
101 In this case we have static VAR_DECL or FUNCTION_DECL
102 that has no corresponding callgraph/varpool node
103 declaring the body.
104 3) COMDAT functions referred by external vtables that
105 we devirtualize only during final compilation stage.
106 At this time we already decided that we will not output
107 the function body and thus we can't reference the symbol
108 directly. */
110 static bool
111 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
113 varpool_node *vnode;
114 struct cgraph_node *node;
115 symtab_node *snode;
117 if (DECL_ABSTRACT_P (decl))
118 return false;
120 /* We are concerned only about static/external vars and functions. */
121 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
122 || !VAR_OR_FUNCTION_DECL_P (decl))
123 return true;
125 /* Static objects can be referred only if they are defined and not optimized
126 out yet. */
127 if (!TREE_PUBLIC (decl))
129 if (DECL_EXTERNAL (decl))
130 return false;
131 /* Before we start optimizing unreachable code we can be sure all
132 static objects are defined. */
133 if (symtab->function_flags_ready)
134 return true;
135 snode = symtab_node::get (decl);
136 if (!snode || !snode->definition)
137 return false;
138 node = dyn_cast <cgraph_node *> (snode);
139 return !node || !node->inlined_to;
142 /* We will later output the initializer, so we can refer to it.
143 So we are concerned only when DECL comes from initializer of
144 external var or var that has been optimized out. */
145 if (!from_decl
146 || !VAR_P (from_decl)
147 || (!DECL_EXTERNAL (from_decl)
148 && (vnode = varpool_node::get (from_decl)) != NULL
149 && vnode->definition)
150 || (flag_ltrans
151 && (vnode = varpool_node::get (from_decl)) != NULL
152 && vnode->in_other_partition))
153 return true;
154 /* We are folding reference from external vtable. The vtable may reffer
155 to a symbol keyed to other compilation unit. The other compilation
156 unit may be in separate DSO and the symbol may be hidden. */
157 if (DECL_VISIBILITY_SPECIFIED (decl)
158 && DECL_EXTERNAL (decl)
159 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
160 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
161 return false;
162 /* When function is public, we always can introduce new reference.
163 Exception are the COMDAT functions where introducing a direct
164 reference imply need to include function body in the curren tunit. */
165 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
166 return true;
167 /* We have COMDAT. We are going to check if we still have definition
168 or if the definition is going to be output in other partition.
169 Bypass this when gimplifying; all needed functions will be produced.
171 As observed in PR20991 for already optimized out comdat virtual functions
172 it may be tempting to not necessarily give up because the copy will be
173 output elsewhere when corresponding vtable is output.
174 This is however not possible - ABI specify that COMDATs are output in
175 units where they are used and when the other unit was compiled with LTO
176 it is possible that vtable was kept public while the function itself
177 was privatized. */
178 if (!symtab->function_flags_ready)
179 return true;
181 snode = symtab_node::get (decl);
182 if (!snode
183 || ((!snode->definition || DECL_EXTERNAL (decl))
184 && (!snode->in_other_partition
185 || (!snode->forced_by_abi && !snode->force_output))))
186 return false;
187 node = dyn_cast <cgraph_node *> (snode);
188 return !node || !node->inlined_to;
191 /* Create a temporary for TYPE for a statement STMT. If the current function
192 is in SSA form, a SSA name is created. Otherwise a temporary register
193 is made. */
195 tree
196 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
198 if (gimple_in_ssa_p (cfun))
199 return make_ssa_name (type, stmt);
200 else
201 return create_tmp_reg (type);
204 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
205 acceptable form for is_gimple_min_invariant.
206 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
208 tree
209 canonicalize_constructor_val (tree cval, tree from_decl)
211 if (CONSTANT_CLASS_P (cval))
212 return cval;
214 tree orig_cval = cval;
215 STRIP_NOPS (cval);
216 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
217 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
219 tree ptr = TREE_OPERAND (cval, 0);
220 if (is_gimple_min_invariant (ptr))
221 cval = build1_loc (EXPR_LOCATION (cval),
222 ADDR_EXPR, TREE_TYPE (ptr),
223 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
224 ptr,
225 fold_convert (ptr_type_node,
226 TREE_OPERAND (cval, 1))));
228 if (TREE_CODE (cval) == ADDR_EXPR)
230 tree base = NULL_TREE;
231 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
233 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
234 if (base)
235 TREE_OPERAND (cval, 0) = base;
237 else
238 base = get_base_address (TREE_OPERAND (cval, 0));
239 if (!base)
240 return NULL_TREE;
242 if (VAR_OR_FUNCTION_DECL_P (base)
243 && !can_refer_decl_in_current_unit_p (base, from_decl))
244 return NULL_TREE;
245 if (TREE_TYPE (base) == error_mark_node)
246 return NULL_TREE;
247 if (VAR_P (base))
248 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
249 but since the use can be in a debug stmt we can't. */
251 else if (TREE_CODE (base) == FUNCTION_DECL)
253 /* Make sure we create a cgraph node for functions we'll reference.
254 They can be non-existent if the reference comes from an entry
255 of an external vtable for example. */
256 cgraph_node::get_create (base);
258 /* Fixup types in global initializers. */
259 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
260 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
262 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
263 cval = fold_convert (TREE_TYPE (orig_cval), cval);
264 return cval;
266 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
267 if (TREE_CODE (cval) == INTEGER_CST)
269 if (TREE_OVERFLOW_P (cval))
270 cval = drop_tree_overflow (cval);
271 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
272 cval = fold_convert (TREE_TYPE (orig_cval), cval);
273 return cval;
275 return orig_cval;
278 /* If SYM is a constant variable with known value, return the value.
279 NULL_TREE is returned otherwise. */
281 tree
282 get_symbol_constant_value (tree sym)
284 tree val = ctor_for_folding (sym);
285 if (val != error_mark_node)
287 if (val)
289 val = canonicalize_constructor_val (unshare_expr (val), sym);
290 if (val && is_gimple_min_invariant (val))
291 return val;
292 else
293 return NULL_TREE;
295 /* Variables declared 'const' without an initializer
296 have zero as the initializer if they may not be
297 overridden at link or run time. */
298 if (!val
299 && is_gimple_reg_type (TREE_TYPE (sym)))
300 return build_zero_cst (TREE_TYPE (sym));
303 return NULL_TREE;
308 /* Subroutine of fold_stmt. We perform constant folding of the
309 memory reference tree EXPR. */
311 static tree
312 maybe_fold_reference (tree expr)
314 tree result = NULL_TREE;
316 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
317 || TREE_CODE (expr) == REALPART_EXPR
318 || TREE_CODE (expr) == IMAGPART_EXPR)
319 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
320 result = fold_unary_loc (EXPR_LOCATION (expr),
321 TREE_CODE (expr),
322 TREE_TYPE (expr),
323 TREE_OPERAND (expr, 0));
324 else if (TREE_CODE (expr) == BIT_FIELD_REF
325 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
326 result = fold_ternary_loc (EXPR_LOCATION (expr),
327 TREE_CODE (expr),
328 TREE_TYPE (expr),
329 TREE_OPERAND (expr, 0),
330 TREE_OPERAND (expr, 1),
331 TREE_OPERAND (expr, 2));
332 else
333 result = fold_const_aggregate_ref (expr);
335 if (result && is_gimple_min_invariant (result))
336 return result;
338 return NULL_TREE;
341 /* Return true if EXPR is an acceptable right-hand-side for a
342 GIMPLE assignment. We validate the entire tree, not just
343 the root node, thus catching expressions that embed complex
344 operands that are not permitted in GIMPLE. This function
345 is needed because the folding routines in fold-const.c
346 may return such expressions in some cases, e.g., an array
347 access with an embedded index addition. It may make more
348 sense to have folding routines that are sensitive to the
349 constraints on GIMPLE operands, rather than abandoning any
350 any attempt to fold if the usual folding turns out to be too
351 aggressive. */
353 bool
354 valid_gimple_rhs_p (tree expr)
356 enum tree_code code = TREE_CODE (expr);
358 switch (TREE_CODE_CLASS (code))
360 case tcc_declaration:
361 if (!is_gimple_variable (expr))
362 return false;
363 break;
365 case tcc_constant:
366 /* All constants are ok. */
367 break;
369 case tcc_comparison:
370 /* GENERIC allows comparisons with non-boolean types, reject
371 those for GIMPLE. Let vector-typed comparisons pass - rules
372 for GENERIC and GIMPLE are the same here. */
373 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
374 && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
375 || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
376 && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
377 return false;
379 /* Fallthru. */
380 case tcc_binary:
381 if (!is_gimple_val (TREE_OPERAND (expr, 0))
382 || !is_gimple_val (TREE_OPERAND (expr, 1)))
383 return false;
384 break;
386 case tcc_unary:
387 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
388 return false;
389 break;
391 case tcc_expression:
392 switch (code)
394 case ADDR_EXPR:
396 tree t;
397 if (is_gimple_min_invariant (expr))
398 return true;
399 t = TREE_OPERAND (expr, 0);
400 while (handled_component_p (t))
402 /* ??? More checks needed, see the GIMPLE verifier. */
403 if ((TREE_CODE (t) == ARRAY_REF
404 || TREE_CODE (t) == ARRAY_RANGE_REF)
405 && !is_gimple_val (TREE_OPERAND (t, 1)))
406 return false;
407 t = TREE_OPERAND (t, 0);
409 if (!is_gimple_id (t))
410 return false;
412 break;
414 default:
415 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
417 if ((code == COND_EXPR
418 ? !is_gimple_condexpr (TREE_OPERAND (expr, 0))
419 : !is_gimple_val (TREE_OPERAND (expr, 0)))
420 || !is_gimple_val (TREE_OPERAND (expr, 1))
421 || !is_gimple_val (TREE_OPERAND (expr, 2)))
422 return false;
423 break;
425 return false;
427 break;
429 case tcc_vl_exp:
430 return false;
432 case tcc_exceptional:
433 if (code == CONSTRUCTOR)
435 unsigned i;
436 tree elt;
437 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
438 if (!is_gimple_val (elt))
439 return false;
440 return true;
442 if (code != SSA_NAME)
443 return false;
444 break;
446 case tcc_reference:
447 if (code == BIT_FIELD_REF)
448 return is_gimple_val (TREE_OPERAND (expr, 0));
449 return false;
451 default:
452 return false;
455 return true;
459 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
460 replacement rhs for the statement or NULL_TREE if no simplification
461 could be made. It is assumed that the operands have been previously
462 folded. */
464 static tree
465 fold_gimple_assign (gimple_stmt_iterator *si)
467 gimple *stmt = gsi_stmt (*si);
468 enum tree_code subcode = gimple_assign_rhs_code (stmt);
469 location_t loc = gimple_location (stmt);
471 tree result = NULL_TREE;
473 switch (get_gimple_rhs_class (subcode))
475 case GIMPLE_SINGLE_RHS:
477 tree rhs = gimple_assign_rhs1 (stmt);
479 if (TREE_CLOBBER_P (rhs))
480 return NULL_TREE;
482 if (REFERENCE_CLASS_P (rhs))
483 return maybe_fold_reference (rhs);
485 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
487 tree val = OBJ_TYPE_REF_EXPR (rhs);
488 if (is_gimple_min_invariant (val))
489 return val;
490 else if (flag_devirtualize && virtual_method_call_p (rhs))
492 bool final;
493 vec <cgraph_node *>targets
494 = possible_polymorphic_call_targets (rhs, stmt, &final);
495 if (final && targets.length () <= 1 && dbg_cnt (devirt))
497 if (dump_enabled_p ())
499 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
500 "resolving virtual function address "
501 "reference to function %s\n",
502 targets.length () == 1
503 ? targets[0]->name ()
504 : "NULL");
506 if (targets.length () == 1)
508 val = fold_convert (TREE_TYPE (val),
509 build_fold_addr_expr_loc
510 (loc, targets[0]->decl));
511 STRIP_USELESS_TYPE_CONVERSION (val);
513 else
514 /* We cannot use __builtin_unreachable here because it
515 cannot have address taken. */
516 val = build_int_cst (TREE_TYPE (val), 0);
517 return val;
522 else if (TREE_CODE (rhs) == ADDR_EXPR)
524 tree ref = TREE_OPERAND (rhs, 0);
525 if (TREE_CODE (ref) == MEM_REF
526 && integer_zerop (TREE_OPERAND (ref, 1)))
528 result = TREE_OPERAND (ref, 0);
529 if (!useless_type_conversion_p (TREE_TYPE (rhs),
530 TREE_TYPE (result)))
531 result = build1 (NOP_EXPR, TREE_TYPE (rhs), result);
532 return result;
536 else if (TREE_CODE (rhs) == CONSTRUCTOR
537 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
539 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
540 unsigned i;
541 tree val;
543 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
544 if (! CONSTANT_CLASS_P (val))
545 return NULL_TREE;
547 return build_vector_from_ctor (TREE_TYPE (rhs),
548 CONSTRUCTOR_ELTS (rhs));
551 else if (DECL_P (rhs)
552 && is_gimple_reg_type (TREE_TYPE (rhs)))
553 return get_symbol_constant_value (rhs);
555 break;
557 case GIMPLE_UNARY_RHS:
558 break;
560 case GIMPLE_BINARY_RHS:
561 break;
563 case GIMPLE_TERNARY_RHS:
564 result = fold_ternary_loc (loc, subcode,
565 TREE_TYPE (gimple_assign_lhs (stmt)),
566 gimple_assign_rhs1 (stmt),
567 gimple_assign_rhs2 (stmt),
568 gimple_assign_rhs3 (stmt));
570 if (result)
572 STRIP_USELESS_TYPE_CONVERSION (result);
573 if (valid_gimple_rhs_p (result))
574 return result;
576 break;
578 case GIMPLE_INVALID_RHS:
579 gcc_unreachable ();
582 return NULL_TREE;
586 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
587 adjusting the replacement stmts location and virtual operands.
588 If the statement has a lhs the last stmt in the sequence is expected
589 to assign to that lhs. */
591 static void
592 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
594 gimple *stmt = gsi_stmt (*si_p);
596 if (gimple_has_location (stmt))
597 annotate_all_with_location (stmts, gimple_location (stmt));
599 /* First iterate over the replacement statements backward, assigning
600 virtual operands to their defining statements. */
601 gimple *laststore = NULL;
602 for (gimple_stmt_iterator i = gsi_last (stmts);
603 !gsi_end_p (i); gsi_prev (&i))
605 gimple *new_stmt = gsi_stmt (i);
606 if ((gimple_assign_single_p (new_stmt)
607 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
608 || (is_gimple_call (new_stmt)
609 && (gimple_call_flags (new_stmt)
610 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
612 tree vdef;
613 if (!laststore)
614 vdef = gimple_vdef (stmt);
615 else
616 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
617 gimple_set_vdef (new_stmt, vdef);
618 if (vdef && TREE_CODE (vdef) == SSA_NAME)
619 SSA_NAME_DEF_STMT (vdef) = new_stmt;
620 laststore = new_stmt;
624 /* Second iterate over the statements forward, assigning virtual
625 operands to their uses. */
626 tree reaching_vuse = gimple_vuse (stmt);
627 for (gimple_stmt_iterator i = gsi_start (stmts);
628 !gsi_end_p (i); gsi_next (&i))
630 gimple *new_stmt = gsi_stmt (i);
631 /* If the new statement possibly has a VUSE, update it with exact SSA
632 name we know will reach this one. */
633 if (gimple_has_mem_ops (new_stmt))
634 gimple_set_vuse (new_stmt, reaching_vuse);
635 gimple_set_modified (new_stmt, true);
636 if (gimple_vdef (new_stmt))
637 reaching_vuse = gimple_vdef (new_stmt);
640 /* If the new sequence does not do a store release the virtual
641 definition of the original statement. */
642 if (reaching_vuse
643 && reaching_vuse == gimple_vuse (stmt))
645 tree vdef = gimple_vdef (stmt);
646 if (vdef
647 && TREE_CODE (vdef) == SSA_NAME)
649 unlink_stmt_vdef (stmt);
650 release_ssa_name (vdef);
654 /* Finally replace the original statement with the sequence. */
655 gsi_replace_with_seq (si_p, stmts, false);
658 /* Helper function for update_gimple_call and
659 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
660 with GIMPLE_CALL NEW_STMT. */
662 static void
663 finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
664 gimple *stmt)
666 tree lhs = gimple_call_lhs (stmt);
667 gimple_call_set_lhs (new_stmt, lhs);
668 if (lhs && TREE_CODE (lhs) == SSA_NAME)
669 SSA_NAME_DEF_STMT (lhs) = new_stmt;
670 gimple_move_vops (new_stmt, stmt);
671 gimple_set_location (new_stmt, gimple_location (stmt));
672 if (gimple_block (new_stmt) == NULL_TREE)
673 gimple_set_block (new_stmt, gimple_block (stmt));
674 gsi_replace (si_p, new_stmt, false);
677 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
678 with number of arguments NARGS, where the arguments in GIMPLE form
679 follow NARGS argument. */
681 bool
682 update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
684 va_list ap;
685 gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
687 gcc_assert (is_gimple_call (stmt));
688 va_start (ap, nargs);
689 new_stmt = gimple_build_call_valist (fn, nargs, ap);
690 finish_update_gimple_call (si_p, new_stmt, stmt);
691 va_end (ap);
692 return true;
695 /* Return true if EXPR is a CALL_EXPR suitable for representation
696 as a single GIMPLE_CALL statement. If the arguments require
697 further gimplification, return false. */
699 static bool
700 valid_gimple_call_p (tree expr)
702 unsigned i, nargs;
704 if (TREE_CODE (expr) != CALL_EXPR)
705 return false;
707 nargs = call_expr_nargs (expr);
708 for (i = 0; i < nargs; i++)
710 tree arg = CALL_EXPR_ARG (expr, i);
711 if (is_gimple_reg_type (TREE_TYPE (arg)))
713 if (!is_gimple_val (arg))
714 return false;
716 else
717 if (!is_gimple_lvalue (arg))
718 return false;
721 return true;
724 /* Convert EXPR into a GIMPLE value suitable for substitution on the
725 RHS of an assignment. Insert the necessary statements before
726 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
727 is replaced. If the call is expected to produces a result, then it
728 is replaced by an assignment of the new RHS to the result variable.
729 If the result is to be ignored, then the call is replaced by a
730 GIMPLE_NOP. A proper VDEF chain is retained by making the first
731 VUSE and the last VDEF of the whole sequence be the same as the replaced
732 statement and using new SSA names for stores in between. */
734 void
735 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
737 tree lhs;
738 gimple *stmt, *new_stmt;
739 gimple_stmt_iterator i;
740 gimple_seq stmts = NULL;
742 stmt = gsi_stmt (*si_p);
744 gcc_assert (is_gimple_call (stmt));
746 if (valid_gimple_call_p (expr))
748 /* The call has simplified to another call. */
749 tree fn = CALL_EXPR_FN (expr);
750 unsigned i;
751 unsigned nargs = call_expr_nargs (expr);
752 vec<tree> args = vNULL;
753 gcall *new_stmt;
755 if (nargs > 0)
757 args.create (nargs);
758 args.safe_grow_cleared (nargs, true);
760 for (i = 0; i < nargs; i++)
761 args[i] = CALL_EXPR_ARG (expr, i);
764 new_stmt = gimple_build_call_vec (fn, args);
765 finish_update_gimple_call (si_p, new_stmt, stmt);
766 args.release ();
767 return;
770 lhs = gimple_call_lhs (stmt);
771 if (lhs == NULL_TREE)
773 push_gimplify_context (gimple_in_ssa_p (cfun));
774 gimplify_and_add (expr, &stmts);
775 pop_gimplify_context (NULL);
777 /* We can end up with folding a memcpy of an empty class assignment
778 which gets optimized away by C++ gimplification. */
779 if (gimple_seq_empty_p (stmts))
781 if (gimple_in_ssa_p (cfun))
783 unlink_stmt_vdef (stmt);
784 release_defs (stmt);
786 gsi_replace (si_p, gimple_build_nop (), false);
787 return;
790 else
792 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
793 new_stmt = gimple_build_assign (lhs, tmp);
794 i = gsi_last (stmts);
795 gsi_insert_after_without_update (&i, new_stmt,
796 GSI_CONTINUE_LINKING);
799 gsi_replace_with_seq_vops (si_p, stmts);
803 /* Replace the call at *GSI with the gimple value VAL. */
805 void
806 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
808 gimple *stmt = gsi_stmt (*gsi);
809 tree lhs = gimple_call_lhs (stmt);
810 gimple *repl;
811 if (lhs)
813 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
814 val = fold_convert (TREE_TYPE (lhs), val);
815 repl = gimple_build_assign (lhs, val);
817 else
818 repl = gimple_build_nop ();
819 tree vdef = gimple_vdef (stmt);
820 if (vdef && TREE_CODE (vdef) == SSA_NAME)
822 unlink_stmt_vdef (stmt);
823 release_ssa_name (vdef);
825 gsi_replace (gsi, repl, false);
828 /* Replace the call at *GSI with the new call REPL and fold that
829 again. */
831 static void
832 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
834 gimple *stmt = gsi_stmt (*gsi);
835 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
836 gimple_set_location (repl, gimple_location (stmt));
837 gimple_move_vops (repl, stmt);
838 gsi_replace (gsi, repl, false);
839 fold_stmt (gsi);
842 /* Return true if VAR is a VAR_DECL or a component thereof. */
844 static bool
845 var_decl_component_p (tree var)
847 tree inner = var;
848 while (handled_component_p (inner))
849 inner = TREE_OPERAND (inner, 0);
850 return (DECL_P (inner)
851 || (TREE_CODE (inner) == MEM_REF
852 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
855 /* Return TRUE if the SIZE argument, representing the size of an
856 object, is in a range of values of which exactly zero is valid. */
858 static bool
859 size_must_be_zero_p (tree size)
861 if (integer_zerop (size))
862 return true;
864 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
865 return false;
867 tree type = TREE_TYPE (size);
868 int prec = TYPE_PRECISION (type);
870 /* Compute the value of SSIZE_MAX, the largest positive value that
871 can be stored in ssize_t, the signed counterpart of size_t. */
872 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
873 value_range valid_range (build_int_cst (type, 0),
874 wide_int_to_tree (type, ssize_max));
875 value_range vr;
876 if (cfun)
877 get_range_query (cfun)->range_of_expr (vr, size);
878 else
879 get_global_range_query ()->range_of_expr (vr, size);
880 if (vr.undefined_p ())
881 vr.set_varying (TREE_TYPE (size));
882 vr.intersect (&valid_range);
883 return vr.zero_p ();
886 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
887 diagnose (otherwise undefined) overlapping copies without preventing
888 folding. When folded, GCC guarantees that overlapping memcpy has
889 the same semantics as memmove. Call to the library memcpy need not
890 provide the same guarantee. Return false if no simplification can
891 be made. */
893 static bool
894 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
895 tree dest, tree src, enum built_in_function code)
897 gimple *stmt = gsi_stmt (*gsi);
898 tree lhs = gimple_call_lhs (stmt);
899 tree len = gimple_call_arg (stmt, 2);
900 location_t loc = gimple_location (stmt);
902 /* If the LEN parameter is a constant zero or in range where
903 the only valid value is zero, return DEST. */
904 if (size_must_be_zero_p (len))
906 gimple *repl;
907 if (gimple_call_lhs (stmt))
908 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
909 else
910 repl = gimple_build_nop ();
911 tree vdef = gimple_vdef (stmt);
912 if (vdef && TREE_CODE (vdef) == SSA_NAME)
914 unlink_stmt_vdef (stmt);
915 release_ssa_name (vdef);
917 gsi_replace (gsi, repl, false);
918 return true;
921 /* If SRC and DEST are the same (and not volatile), return
922 DEST{,+LEN,+LEN-1}. */
923 if (operand_equal_p (src, dest, 0))
925 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
926 It's safe and may even be emitted by GCC itself (see bug
927 32667). */
928 unlink_stmt_vdef (stmt);
929 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
930 release_ssa_name (gimple_vdef (stmt));
931 if (!lhs)
933 gsi_replace (gsi, gimple_build_nop (), false);
934 return true;
936 goto done;
938 else
940 /* We cannot (easily) change the type of the copy if it is a storage
941 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
942 modify the storage order of objects (see storage_order_barrier_p). */
943 tree srctype
944 = POINTER_TYPE_P (TREE_TYPE (src))
945 ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
946 tree desttype
947 = POINTER_TYPE_P (TREE_TYPE (dest))
948 ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
949 tree destvar, srcvar, srcoff;
950 unsigned int src_align, dest_align;
951 unsigned HOST_WIDE_INT tmp_len;
952 const char *tmp_str;
954 /* Build accesses at offset zero with a ref-all character type. */
955 tree off0
956 = build_int_cst (build_pointer_type_for_mode (char_type_node,
957 ptr_mode, true), 0);
959 /* If we can perform the copy efficiently with first doing all loads
960 and then all stores inline it that way. Currently efficiently
961 means that we can load all the memory into a single integer
962 register which is what MOVE_MAX gives us. */
963 src_align = get_pointer_alignment (src);
964 dest_align = get_pointer_alignment (dest);
965 if (tree_fits_uhwi_p (len)
966 && compare_tree_int (len, MOVE_MAX) <= 0
967 /* FIXME: Don't transform copies from strings with known length.
968 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
969 from being handled, and the case was XFAILed for that reason.
970 Now that it is handled and the XFAIL removed, as soon as other
971 strlenopt tests that rely on it for passing are adjusted, this
972 hack can be removed. */
973 && !c_strlen (src, 1)
974 && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
975 && memchr (tmp_str, 0, tmp_len) == NULL)
976 && !(srctype
977 && AGGREGATE_TYPE_P (srctype)
978 && TYPE_REVERSE_STORAGE_ORDER (srctype))
979 && !(desttype
980 && AGGREGATE_TYPE_P (desttype)
981 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
983 unsigned ilen = tree_to_uhwi (len);
984 if (pow2p_hwi (ilen))
986 /* Detect out-of-bounds accesses without issuing warnings.
987 Avoid folding out-of-bounds copies but to avoid false
988 positives for unreachable code defer warning until after
989 DCE has worked its magic.
990 -Wrestrict is still diagnosed. */
991 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
992 dest, src, len, len,
993 false, false))
994 if (warning != OPT_Wrestrict)
995 return false;
997 scalar_int_mode mode;
998 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
999 if (type
1000 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
1001 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
1002 /* If the destination pointer is not aligned we must be able
1003 to emit an unaligned store. */
1004 && (dest_align >= GET_MODE_ALIGNMENT (mode)
1005 || !targetm.slow_unaligned_access (mode, dest_align)
1006 || (optab_handler (movmisalign_optab, mode)
1007 != CODE_FOR_nothing)))
1009 tree srctype = type;
1010 tree desttype = type;
1011 if (src_align < GET_MODE_ALIGNMENT (mode))
1012 srctype = build_aligned_type (type, src_align);
1013 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
1014 tree tem = fold_const_aggregate_ref (srcmem);
1015 if (tem)
1016 srcmem = tem;
1017 else if (src_align < GET_MODE_ALIGNMENT (mode)
1018 && targetm.slow_unaligned_access (mode, src_align)
1019 && (optab_handler (movmisalign_optab, mode)
1020 == CODE_FOR_nothing))
1021 srcmem = NULL_TREE;
1022 if (srcmem)
1024 gimple *new_stmt;
1025 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
1027 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
1028 srcmem
1029 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
1030 new_stmt);
1031 gimple_assign_set_lhs (new_stmt, srcmem);
1032 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1033 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1035 if (dest_align < GET_MODE_ALIGNMENT (mode))
1036 desttype = build_aligned_type (type, dest_align);
1037 new_stmt
1038 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
1039 dest, off0),
1040 srcmem);
1041 gimple_move_vops (new_stmt, stmt);
1042 if (!lhs)
1044 gsi_replace (gsi, new_stmt, false);
1045 return true;
1047 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1048 goto done;
1054 if (code == BUILT_IN_MEMMOVE)
1056 /* Both DEST and SRC must be pointer types.
1057 ??? This is what old code did. Is the testing for pointer types
1058 really mandatory?
1060 If either SRC is readonly or length is 1, we can use memcpy. */
1061 if (!dest_align || !src_align)
1062 return false;
1063 if (readonly_data_expr (src)
1064 || (tree_fits_uhwi_p (len)
1065 && (MIN (src_align, dest_align) / BITS_PER_UNIT
1066 >= tree_to_uhwi (len))))
1068 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1069 if (!fn)
1070 return false;
1071 gimple_call_set_fndecl (stmt, fn);
1072 gimple_call_set_arg (stmt, 0, dest);
1073 gimple_call_set_arg (stmt, 1, src);
1074 fold_stmt (gsi);
1075 return true;
1078 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1079 if (TREE_CODE (src) == ADDR_EXPR
1080 && TREE_CODE (dest) == ADDR_EXPR)
1082 tree src_base, dest_base, fn;
1083 poly_int64 src_offset = 0, dest_offset = 0;
1084 poly_uint64 maxsize;
1086 srcvar = TREE_OPERAND (src, 0);
1087 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
1088 if (src_base == NULL)
1089 src_base = srcvar;
1090 destvar = TREE_OPERAND (dest, 0);
1091 dest_base = get_addr_base_and_unit_offset (destvar,
1092 &dest_offset);
1093 if (dest_base == NULL)
1094 dest_base = destvar;
1095 if (!poly_int_tree_p (len, &maxsize))
1096 maxsize = -1;
1097 if (SSA_VAR_P (src_base)
1098 && SSA_VAR_P (dest_base))
1100 if (operand_equal_p (src_base, dest_base, 0)
1101 && ranges_maybe_overlap_p (src_offset, maxsize,
1102 dest_offset, maxsize))
1103 return false;
1105 else if (TREE_CODE (src_base) == MEM_REF
1106 && TREE_CODE (dest_base) == MEM_REF)
1108 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
1109 TREE_OPERAND (dest_base, 0), 0))
1110 return false;
1111 poly_offset_int full_src_offset
1112 = mem_ref_offset (src_base) + src_offset;
1113 poly_offset_int full_dest_offset
1114 = mem_ref_offset (dest_base) + dest_offset;
1115 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
1116 full_dest_offset, maxsize))
1117 return false;
1119 else
1120 return false;
1122 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1123 if (!fn)
1124 return false;
1125 gimple_call_set_fndecl (stmt, fn);
1126 gimple_call_set_arg (stmt, 0, dest);
1127 gimple_call_set_arg (stmt, 1, src);
1128 fold_stmt (gsi);
1129 return true;
1132 /* If the destination and source do not alias optimize into
1133 memcpy as well. */
1134 if ((is_gimple_min_invariant (dest)
1135 || TREE_CODE (dest) == SSA_NAME)
1136 && (is_gimple_min_invariant (src)
1137 || TREE_CODE (src) == SSA_NAME))
1139 ao_ref destr, srcr;
1140 ao_ref_init_from_ptr_and_size (&destr, dest, len);
1141 ao_ref_init_from_ptr_and_size (&srcr, src, len);
1142 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1144 tree fn;
1145 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1146 if (!fn)
1147 return false;
1148 gimple_call_set_fndecl (stmt, fn);
1149 gimple_call_set_arg (stmt, 0, dest);
1150 gimple_call_set_arg (stmt, 1, src);
1151 fold_stmt (gsi);
1152 return true;
1156 return false;
1159 if (!tree_fits_shwi_p (len))
1160 return false;
1161 if (!srctype
1162 || (AGGREGATE_TYPE_P (srctype)
1163 && TYPE_REVERSE_STORAGE_ORDER (srctype)))
1164 return false;
1165 if (!desttype
1166 || (AGGREGATE_TYPE_P (desttype)
1167 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
1168 return false;
1169 /* In the following try to find a type that is most natural to be
1170 used for the memcpy source and destination and that allows
1171 the most optimization when memcpy is turned into a plain assignment
1172 using that type. In theory we could always use a char[len] type
1173 but that only gains us that the destination and source possibly
1174 no longer will have their address taken. */
1175 if (TREE_CODE (srctype) == ARRAY_TYPE
1176 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1177 srctype = TREE_TYPE (srctype);
1178 if (TREE_CODE (desttype) == ARRAY_TYPE
1179 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1180 desttype = TREE_TYPE (desttype);
1181 if (TREE_ADDRESSABLE (srctype)
1182 || TREE_ADDRESSABLE (desttype))
1183 return false;
1185 /* Make sure we are not copying using a floating-point mode or
1186 a type whose size possibly does not match its precision. */
1187 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1188 || TREE_CODE (desttype) == BOOLEAN_TYPE
1189 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1190 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1191 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1192 || TREE_CODE (srctype) == BOOLEAN_TYPE
1193 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1194 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1195 if (!srctype)
1196 srctype = desttype;
1197 if (!desttype)
1198 desttype = srctype;
1199 if (!srctype)
1200 return false;
1202 src_align = get_pointer_alignment (src);
1203 dest_align = get_pointer_alignment (dest);
1205 /* Choose between src and destination type for the access based
1206 on alignment, whether the access constitutes a register access
1207 and whether it may actually expose a declaration for SSA rewrite
1208 or SRA decomposition. Also try to expose a string constant, we
1209 might be able to concatenate several of them later into a single
1210 string store. */
1211 destvar = NULL_TREE;
1212 srcvar = NULL_TREE;
1213 if (TREE_CODE (dest) == ADDR_EXPR
1214 && var_decl_component_p (TREE_OPERAND (dest, 0))
1215 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1216 && dest_align >= TYPE_ALIGN (desttype)
1217 && (is_gimple_reg_type (desttype)
1218 || src_align >= TYPE_ALIGN (desttype)))
1219 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1220 else if (TREE_CODE (src) == ADDR_EXPR
1221 && var_decl_component_p (TREE_OPERAND (src, 0))
1222 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1223 && src_align >= TYPE_ALIGN (srctype)
1224 && (is_gimple_reg_type (srctype)
1225 || dest_align >= TYPE_ALIGN (srctype)))
1226 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1227 /* FIXME: Don't transform copies from strings with known original length.
1228 As soon as strlenopt tests that rely on it for passing are adjusted,
1229 this hack can be removed. */
1230 else if (gimple_call_alloca_for_var_p (stmt)
1231 && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1232 && integer_zerop (srcoff)
1233 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1234 && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1235 srctype = TREE_TYPE (srcvar);
1236 else
1237 return false;
1239 /* Now that we chose an access type express the other side in
1240 terms of it if the target allows that with respect to alignment
1241 constraints. */
1242 if (srcvar == NULL_TREE)
1244 if (src_align >= TYPE_ALIGN (desttype))
1245 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1246 else
1248 if (STRICT_ALIGNMENT)
1249 return false;
1250 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1251 src_align);
1252 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1255 else if (destvar == NULL_TREE)
1257 if (dest_align >= TYPE_ALIGN (srctype))
1258 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1259 else
1261 if (STRICT_ALIGNMENT)
1262 return false;
1263 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1264 dest_align);
1265 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1269 /* Same as above, detect out-of-bounds accesses without issuing
1270 warnings. Avoid folding out-of-bounds copies but to avoid
1271 false positives for unreachable code defer warning until
1272 after DCE has worked its magic.
1273 -Wrestrict is still diagnosed. */
1274 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1275 dest, src, len, len,
1276 false, false))
1277 if (warning != OPT_Wrestrict)
1278 return false;
1280 gimple *new_stmt;
1281 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1283 tree tem = fold_const_aggregate_ref (srcvar);
1284 if (tem)
1285 srcvar = tem;
1286 if (! is_gimple_min_invariant (srcvar))
1288 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1289 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1290 new_stmt);
1291 gimple_assign_set_lhs (new_stmt, srcvar);
1292 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1293 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1295 new_stmt = gimple_build_assign (destvar, srcvar);
1296 goto set_vop_and_replace;
1299 /* We get an aggregate copy. If the source is a STRING_CST, then
1300 directly use its type to perform the copy. */
1301 if (TREE_CODE (srcvar) == STRING_CST)
1302 desttype = srctype;
1304 /* Or else, use an unsigned char[] type to perform the copy in order
1305 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1306 types or float modes behavior on copying. */
1307 else
1309 desttype = build_array_type_nelts (unsigned_char_type_node,
1310 tree_to_uhwi (len));
1311 srctype = desttype;
1312 if (src_align > TYPE_ALIGN (srctype))
1313 srctype = build_aligned_type (srctype, src_align);
1314 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1317 if (dest_align > TYPE_ALIGN (desttype))
1318 desttype = build_aligned_type (desttype, dest_align);
1319 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1320 new_stmt = gimple_build_assign (destvar, srcvar);
1322 set_vop_and_replace:
1323 gimple_move_vops (new_stmt, stmt);
1324 if (!lhs)
1326 gsi_replace (gsi, new_stmt, false);
1327 return true;
1329 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1332 done:
1333 gimple_seq stmts = NULL;
1334 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1335 len = NULL_TREE;
1336 else if (code == BUILT_IN_MEMPCPY)
1338 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1339 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1340 TREE_TYPE (dest), dest, len);
1342 else
1343 gcc_unreachable ();
1345 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1346 gimple *repl = gimple_build_assign (lhs, dest);
1347 gsi_replace (gsi, repl, false);
1348 return true;
1351 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1352 to built-in memcmp (a, b, len). */
1354 static bool
1355 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1357 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1359 if (!fn)
1360 return false;
1362 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1364 gimple *stmt = gsi_stmt (*gsi);
1365 tree a = gimple_call_arg (stmt, 0);
1366 tree b = gimple_call_arg (stmt, 1);
1367 tree len = gimple_call_arg (stmt, 2);
1369 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1370 replace_call_with_call_and_fold (gsi, repl);
1372 return true;
1375 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1376 to built-in memmove (dest, src, len). */
1378 static bool
1379 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1381 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1383 if (!fn)
1384 return false;
1386 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1387 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1388 len) into memmove (dest, src, len). */
1390 gimple *stmt = gsi_stmt (*gsi);
1391 tree src = gimple_call_arg (stmt, 0);
1392 tree dest = gimple_call_arg (stmt, 1);
1393 tree len = gimple_call_arg (stmt, 2);
1395 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1396 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1397 replace_call_with_call_and_fold (gsi, repl);
1399 return true;
1402 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1403 to built-in memset (dest, 0, len). */
1405 static bool
1406 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1408 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1410 if (!fn)
1411 return false;
1413 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1415 gimple *stmt = gsi_stmt (*gsi);
1416 tree dest = gimple_call_arg (stmt, 0);
1417 tree len = gimple_call_arg (stmt, 1);
1419 gimple_seq seq = NULL;
1420 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1421 gimple_seq_add_stmt_without_update (&seq, repl);
1422 gsi_replace_with_seq_vops (gsi, seq);
1423 fold_stmt (gsi);
1425 return true;
1428 /* Fold function call to builtin memset or bzero at *GSI setting the
1429 memory of size LEN to VAL. Return whether a simplification was made. */
1431 static bool
1432 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1434 gimple *stmt = gsi_stmt (*gsi);
1435 tree etype;
1436 unsigned HOST_WIDE_INT length, cval;
1438 /* If the LEN parameter is zero, return DEST. */
1439 if (integer_zerop (len))
1441 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1442 return true;
1445 if (! tree_fits_uhwi_p (len))
1446 return false;
1448 if (TREE_CODE (c) != INTEGER_CST)
1449 return false;
1451 tree dest = gimple_call_arg (stmt, 0);
1452 tree var = dest;
1453 if (TREE_CODE (var) != ADDR_EXPR)
1454 return false;
1456 var = TREE_OPERAND (var, 0);
1457 if (TREE_THIS_VOLATILE (var))
1458 return false;
1460 etype = TREE_TYPE (var);
1461 if (TREE_CODE (etype) == ARRAY_TYPE)
1462 etype = TREE_TYPE (etype);
1464 if (!INTEGRAL_TYPE_P (etype)
1465 && !POINTER_TYPE_P (etype))
1466 return NULL_TREE;
1468 if (! var_decl_component_p (var))
1469 return NULL_TREE;
1471 length = tree_to_uhwi (len);
1472 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1473 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1474 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
1475 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1476 return NULL_TREE;
1478 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1479 return NULL_TREE;
1481 if (!type_has_mode_precision_p (etype))
1482 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1483 TYPE_UNSIGNED (etype));
1485 if (integer_zerop (c))
1486 cval = 0;
1487 else
1489 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1490 return NULL_TREE;
1492 cval = TREE_INT_CST_LOW (c);
1493 cval &= 0xff;
1494 cval |= cval << 8;
1495 cval |= cval << 16;
1496 cval |= (cval << 31) << 1;
1499 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1500 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1501 gimple_move_vops (store, stmt);
1502 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1503 if (gimple_call_lhs (stmt))
1505 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1506 gsi_replace (gsi, asgn, false);
1508 else
1510 gimple_stmt_iterator gsi2 = *gsi;
1511 gsi_prev (gsi);
1512 gsi_remove (&gsi2, true);
1515 return true;
1518 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1520 static bool
1521 get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1522 c_strlen_data *pdata, unsigned eltsize)
1524 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1526 /* The length computed by this invocation of the function. */
1527 tree val = NULL_TREE;
1529 /* True if VAL is an optimistic (tight) bound determined from
1530 the size of the character array in which the string may be
1531 stored. In that case, the computed VAL is used to set
1532 PDATA->MAXBOUND. */
1533 bool tight_bound = false;
1535 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1536 if (TREE_CODE (arg) == ADDR_EXPR
1537 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1539 tree op = TREE_OPERAND (arg, 0);
1540 if (integer_zerop (TREE_OPERAND (op, 1)))
1542 tree aop0 = TREE_OPERAND (op, 0);
1543 if (TREE_CODE (aop0) == INDIRECT_REF
1544 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1545 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1546 pdata, eltsize);
1548 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1549 && rkind == SRK_LENRANGE)
1551 /* Fail if an array is the last member of a struct object
1552 since it could be treated as a (fake) flexible array
1553 member. */
1554 tree idx = TREE_OPERAND (op, 1);
1556 arg = TREE_OPERAND (op, 0);
1557 tree optype = TREE_TYPE (arg);
1558 if (tree dom = TYPE_DOMAIN (optype))
1559 if (tree bound = TYPE_MAX_VALUE (dom))
1560 if (TREE_CODE (bound) == INTEGER_CST
1561 && TREE_CODE (idx) == INTEGER_CST
1562 && tree_int_cst_lt (bound, idx))
1563 return false;
1567 if (rkind == SRK_INT_VALUE)
1569 /* We are computing the maximum value (not string length). */
1570 val = arg;
1571 if (TREE_CODE (val) != INTEGER_CST
1572 || tree_int_cst_sgn (val) < 0)
1573 return false;
1575 else
1577 c_strlen_data lendata = { };
1578 val = c_strlen (arg, 1, &lendata, eltsize);
1580 if (!val && lendata.decl)
1582 /* ARG refers to an unterminated const character array.
1583 DATA.DECL with size DATA.LEN. */
1584 val = lendata.minlen;
1585 pdata->decl = lendata.decl;
1589 /* Set if VAL represents the maximum length based on array size (set
1590 when exact length cannot be determined). */
1591 bool maxbound = false;
1593 if (!val && rkind == SRK_LENRANGE)
1595 if (TREE_CODE (arg) == ADDR_EXPR)
1596 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1597 pdata, eltsize);
1599 if (TREE_CODE (arg) == ARRAY_REF)
1601 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1603 /* Determine the "innermost" array type. */
1604 while (TREE_CODE (optype) == ARRAY_TYPE
1605 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1606 optype = TREE_TYPE (optype);
1608 /* Avoid arrays of pointers. */
1609 tree eltype = TREE_TYPE (optype);
1610 if (TREE_CODE (optype) != ARRAY_TYPE
1611 || !INTEGRAL_TYPE_P (eltype))
1612 return false;
1614 /* Fail when the array bound is unknown or zero. */
1615 val = TYPE_SIZE_UNIT (optype);
1616 if (!val
1617 || TREE_CODE (val) != INTEGER_CST
1618 || integer_zerop (val))
1619 return false;
1621 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1622 integer_one_node);
1624 /* Set the minimum size to zero since the string in
1625 the array could have zero length. */
1626 pdata->minlen = ssize_int (0);
1628 tight_bound = true;
1630 else if (TREE_CODE (arg) == COMPONENT_REF
1631 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1632 == ARRAY_TYPE))
1634 /* Use the type of the member array to determine the upper
1635 bound on the length of the array. This may be overly
1636 optimistic if the array itself isn't NUL-terminated and
1637 the caller relies on the subsequent member to contain
1638 the NUL but that would only be considered valid if
1639 the array were the last member of a struct. */
1641 tree fld = TREE_OPERAND (arg, 1);
1643 tree optype = TREE_TYPE (fld);
1645 /* Determine the "innermost" array type. */
1646 while (TREE_CODE (optype) == ARRAY_TYPE
1647 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1648 optype = TREE_TYPE (optype);
1650 /* Fail when the array bound is unknown or zero. */
1651 val = TYPE_SIZE_UNIT (optype);
1652 if (!val
1653 || TREE_CODE (val) != INTEGER_CST
1654 || integer_zerop (val))
1655 return false;
1656 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1657 integer_one_node);
1659 /* Set the minimum size to zero since the string in
1660 the array could have zero length. */
1661 pdata->minlen = ssize_int (0);
1663 /* The array size determined above is an optimistic bound
1664 on the length. If the array isn't nul-terminated the
1665 length computed by the library function would be greater.
1666 Even though using strlen to cross the subobject boundary
1667 is undefined, avoid drawing conclusions from the member
1668 type about the length here. */
1669 tight_bound = true;
1671 else if (TREE_CODE (arg) == MEM_REF
1672 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1673 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1674 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1676 /* Handle a MEM_REF into a DECL accessing an array of integers,
1677 being conservative about references to extern structures with
1678 flexible array members that can be initialized to arbitrary
1679 numbers of elements as an extension (static structs are okay).
1680 FIXME: Make this less conservative -- see
1681 component_ref_size in tree.c. */
1682 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1683 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1684 && (decl_binds_to_current_def_p (ref)
1685 || !array_at_struct_end_p (arg)))
1687 /* Fail if the offset is out of bounds. Such accesses
1688 should be diagnosed at some point. */
1689 val = DECL_SIZE_UNIT (ref);
1690 if (!val
1691 || TREE_CODE (val) != INTEGER_CST
1692 || integer_zerop (val))
1693 return false;
1695 poly_offset_int psiz = wi::to_offset (val);
1696 poly_offset_int poff = mem_ref_offset (arg);
1697 if (known_le (psiz, poff))
1698 return false;
1700 pdata->minlen = ssize_int (0);
1702 /* Subtract the offset and one for the terminating nul. */
1703 psiz -= poff;
1704 psiz -= 1;
1705 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1706 /* Since VAL reflects the size of a declared object
1707 rather the type of the access it is not a tight bound. */
1710 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
1712 /* Avoid handling pointers to arrays. GCC might misuse
1713 a pointer to an array of one bound to point to an array
1714 object of a greater bound. */
1715 tree argtype = TREE_TYPE (arg);
1716 if (TREE_CODE (argtype) == ARRAY_TYPE)
1718 val = TYPE_SIZE_UNIT (argtype);
1719 if (!val
1720 || TREE_CODE (val) != INTEGER_CST
1721 || integer_zerop (val))
1722 return false;
1723 val = wide_int_to_tree (TREE_TYPE (val),
1724 wi::sub (wi::to_wide (val), 1));
1726 /* Set the minimum size to zero since the string in
1727 the array could have zero length. */
1728 pdata->minlen = ssize_int (0);
1731 maxbound = true;
1734 if (!val)
1735 return false;
1737 /* Adjust the lower bound on the string length as necessary. */
1738 if (!pdata->minlen
1739 || (rkind != SRK_STRLEN
1740 && TREE_CODE (pdata->minlen) == INTEGER_CST
1741 && TREE_CODE (val) == INTEGER_CST
1742 && tree_int_cst_lt (val, pdata->minlen)))
1743 pdata->minlen = val;
1745 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1747 /* Adjust the tighter (more optimistic) string length bound
1748 if necessary and proceed to adjust the more conservative
1749 bound. */
1750 if (TREE_CODE (val) == INTEGER_CST)
1752 if (tree_int_cst_lt (pdata->maxbound, val))
1753 pdata->maxbound = val;
1755 else
1756 pdata->maxbound = val;
1758 else if (pdata->maxbound || maxbound)
1759 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1760 if VAL corresponds to the maximum length determined based
1761 on the type of the object. */
1762 pdata->maxbound = val;
1764 if (tight_bound)
1766 /* VAL computed above represents an optimistically tight bound
1767 on the length of the string based on the referenced object's
1768 or subobject's type. Determine the conservative upper bound
1769 based on the enclosing object's size if possible. */
1770 if (rkind == SRK_LENRANGE)
1772 poly_int64 offset;
1773 tree base = get_addr_base_and_unit_offset (arg, &offset);
1774 if (!base)
1776 /* When the call above fails due to a non-constant offset
1777 assume the offset is zero and use the size of the whole
1778 enclosing object instead. */
1779 base = get_base_address (arg);
1780 offset = 0;
1782 /* If the base object is a pointer no upper bound on the length
1783 can be determined. Otherwise the maximum length is equal to
1784 the size of the enclosing object minus the offset of
1785 the referenced subobject minus 1 (for the terminating nul). */
1786 tree type = TREE_TYPE (base);
1787 if (TREE_CODE (type) == POINTER_TYPE
1788 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1789 || !(val = DECL_SIZE_UNIT (base)))
1790 val = build_all_ones_cst (size_type_node);
1791 else
1793 val = DECL_SIZE_UNIT (base);
1794 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1795 size_int (offset + 1));
1798 else
1799 return false;
1802 if (pdata->maxlen)
1804 /* Adjust the more conservative bound if possible/necessary
1805 and fail otherwise. */
1806 if (rkind != SRK_STRLEN)
1808 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1809 || TREE_CODE (val) != INTEGER_CST)
1810 return false;
1812 if (tree_int_cst_lt (pdata->maxlen, val))
1813 pdata->maxlen = val;
1814 return true;
1816 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1818 /* Fail if the length of this ARG is different from that
1819 previously determined from another ARG. */
1820 return false;
1824 pdata->maxlen = val;
1825 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1828 /* For an ARG referencing one or more strings, try to obtain the range
1829 of their lengths, or the size of the largest array ARG referes to if
1830 the range of lengths cannot be determined, and store all in *PDATA.
1831 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1832 the maximum constant value.
1833 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1834 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1835 length or if we are unable to determine the length, return false.
1836 VISITED is a bitmap of visited variables.
1837 RKIND determines the kind of value or range to obtain (see
1838 strlen_range_kind).
1839 Set PDATA->DECL if ARG refers to an unterminated constant array.
1840 On input, set ELTSIZE to 1 for normal single byte character strings,
1841 and either 2 or 4 for wide characer strings (the size of wchar_t).
1842 Return true if *PDATA was successfully populated and false otherwise. */
1844 static bool
1845 get_range_strlen (tree arg, bitmap *visited,
1846 strlen_range_kind rkind,
1847 c_strlen_data *pdata, unsigned eltsize)
1850 if (TREE_CODE (arg) != SSA_NAME)
1851 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1853 /* If ARG is registered for SSA update we cannot look at its defining
1854 statement. */
1855 if (name_registered_for_update_p (arg))
1856 return false;
1858 /* If we were already here, break the infinite cycle. */
1859 if (!*visited)
1860 *visited = BITMAP_ALLOC (NULL);
1861 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1862 return true;
1864 tree var = arg;
1865 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1867 switch (gimple_code (def_stmt))
1869 case GIMPLE_ASSIGN:
1870 /* The RHS of the statement defining VAR must either have a
1871 constant length or come from another SSA_NAME with a constant
1872 length. */
1873 if (gimple_assign_single_p (def_stmt)
1874 || gimple_assign_unary_nop_p (def_stmt))
1876 tree rhs = gimple_assign_rhs1 (def_stmt);
1877 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1879 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1881 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1882 gimple_assign_rhs3 (def_stmt) };
1884 for (unsigned int i = 0; i < 2; i++)
1885 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1887 if (rkind != SRK_LENRANGE)
1888 return false;
1889 /* Set the upper bound to the maximum to prevent
1890 it from being adjusted in the next iteration but
1891 leave MINLEN and the more conservative MAXBOUND
1892 determined so far alone (or leave them null if
1893 they haven't been set yet). That the MINLEN is
1894 in fact zero can be determined from MAXLEN being
1895 unbounded but the discovered minimum is used for
1896 diagnostics. */
1897 pdata->maxlen = build_all_ones_cst (size_type_node);
1899 return true;
1901 return false;
1903 case GIMPLE_PHI:
1904 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1905 must have a constant length. */
1906 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1908 tree arg = gimple_phi_arg (def_stmt, i)->def;
1910 /* If this PHI has itself as an argument, we cannot
1911 determine the string length of this argument. However,
1912 if we can find a constant string length for the other
1913 PHI args then we can still be sure that this is a
1914 constant string length. So be optimistic and just
1915 continue with the next argument. */
1916 if (arg == gimple_phi_result (def_stmt))
1917 continue;
1919 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1921 if (rkind != SRK_LENRANGE)
1922 return false;
1923 /* Set the upper bound to the maximum to prevent
1924 it from being adjusted in the next iteration but
1925 leave MINLEN and the more conservative MAXBOUND
1926 determined so far alone (or leave them null if
1927 they haven't been set yet). That the MINLEN is
1928 in fact zero can be determined from MAXLEN being
1929 unbounded but the discovered minimum is used for
1930 diagnostics. */
1931 pdata->maxlen = build_all_ones_cst (size_type_node);
1934 return true;
1936 default:
1937 return false;
1941 /* Try to obtain the range of the lengths of the string(s) referenced
1942 by ARG, or the size of the largest array ARG refers to if the range
1943 of lengths cannot be determined, and store all in *PDATA which must
1944 be zero-initialized on input except PDATA->MAXBOUND may be set to
1945 a non-null tree node other than INTEGER_CST to request to have it
1946 set to the length of the longest string in a PHI. ELTSIZE is
1947 the expected size of the string element in bytes: 1 for char and
1948 some power of 2 for wide characters.
1949 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1950 for optimization. Returning false means that a nonzero PDATA->MINLEN
1951 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1952 is -1 (in that case, the actual range is indeterminate, i.e.,
1953 [0, PTRDIFF_MAX - 2]. */
1955 bool
1956 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1958 bitmap visited = NULL;
1959 tree maxbound = pdata->maxbound;
1961 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
1963 /* On failure extend the length range to an impossible maximum
1964 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1965 members can stay unchanged regardless. */
1966 pdata->minlen = ssize_int (0);
1967 pdata->maxlen = build_all_ones_cst (size_type_node);
1969 else if (!pdata->minlen)
1970 pdata->minlen = ssize_int (0);
1972 /* If it's unchanged from it initial non-null value, set the conservative
1973 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1974 if (maxbound && pdata->maxbound == maxbound)
1975 pdata->maxbound = build_all_ones_cst (size_type_node);
1977 if (visited)
1978 BITMAP_FREE (visited);
1980 return !integer_all_onesp (pdata->maxlen);
1983 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1984 For ARG of pointer types, NONSTR indicates if the caller is prepared
1985 to handle unterminated strings. For integer ARG and when RKIND ==
1986 SRK_INT_VALUE, NONSTR must be null.
1988 If an unterminated array is discovered and our caller handles
1989 unterminated arrays, then bubble up the offending DECL and
1990 return the maximum size. Otherwise return NULL. */
1992 static tree
1993 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1995 /* A non-null NONSTR is meaningless when determining the maximum
1996 value of an integer ARG. */
1997 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1998 /* ARG must have an integral type when RKIND says so. */
1999 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
2001 bitmap visited = NULL;
2003 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2004 is unbounded. */
2005 c_strlen_data lendata = { };
2006 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
2007 lendata.maxlen = NULL_TREE;
2008 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
2009 lendata.maxlen = NULL_TREE;
2011 if (visited)
2012 BITMAP_FREE (visited);
2014 if (nonstr)
2016 /* For callers prepared to handle unterminated arrays set
2017 *NONSTR to point to the declaration of the array and return
2018 the maximum length/size. */
2019 *nonstr = lendata.decl;
2020 return lendata.maxlen;
2023 /* Fail if the constant array isn't nul-terminated. */
2024 return lendata.decl ? NULL_TREE : lendata.maxlen;
2028 /* Fold function call to builtin strcpy with arguments DEST and SRC.
2029 If LEN is not NULL, it represents the length of the string to be
2030 copied. Return NULL_TREE if no simplification can be made. */
2032 static bool
2033 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
2034 tree dest, tree src)
2036 gimple *stmt = gsi_stmt (*gsi);
2037 location_t loc = gimple_location (stmt);
2038 tree fn;
2040 /* If SRC and DEST are the same (and not volatile), return DEST. */
2041 if (operand_equal_p (src, dest, 0))
2043 /* Issue -Wrestrict unless the pointers are null (those do
2044 not point to objects and so do not indicate an overlap;
2045 such calls could be the result of sanitization and jump
2046 threading). */
2047 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2049 tree func = gimple_call_fndecl (stmt);
2051 warning_at (loc, OPT_Wrestrict,
2052 "%qD source argument is the same as destination",
2053 func);
2056 replace_call_with_value (gsi, dest);
2057 return true;
2060 if (optimize_function_for_size_p (cfun))
2061 return false;
2063 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2064 if (!fn)
2065 return false;
2067 /* Set to non-null if ARG refers to an unterminated array. */
2068 tree nonstr = NULL;
2069 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
2071 if (nonstr)
2073 /* Avoid folding calls with unterminated arrays. */
2074 if (!gimple_no_warning_p (stmt))
2075 warn_string_no_nul (loc, NULL_TREE, "strcpy", src, nonstr);
2076 gimple_set_no_warning (stmt, true);
2077 return false;
2080 if (!len)
2081 return false;
2083 len = fold_convert_loc (loc, size_type_node, len);
2084 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
2085 len = force_gimple_operand_gsi (gsi, len, true,
2086 NULL_TREE, true, GSI_SAME_STMT);
2087 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2088 replace_call_with_call_and_fold (gsi, repl);
2089 return true;
2092 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2093 If SLEN is not NULL, it represents the length of the source string.
2094 Return NULL_TREE if no simplification can be made. */
2096 static bool
2097 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
2098 tree dest, tree src, tree len)
2100 gimple *stmt = gsi_stmt (*gsi);
2101 location_t loc = gimple_location (stmt);
2102 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
2104 /* If the LEN parameter is zero, return DEST. */
2105 if (integer_zerop (len))
2107 /* Avoid warning if the destination refers to an array/pointer
2108 decorate with attribute nonstring. */
2109 if (!nonstring)
2111 tree fndecl = gimple_call_fndecl (stmt);
2113 /* Warn about the lack of nul termination: the result is not
2114 a (nul-terminated) string. */
2115 tree slen = get_maxval_strlen (src, SRK_STRLEN);
2116 if (slen && !integer_zerop (slen))
2117 warning_at (loc, OPT_Wstringop_truncation,
2118 "%G%qD destination unchanged after copying no bytes "
2119 "from a string of length %E",
2120 stmt, fndecl, slen);
2121 else
2122 warning_at (loc, OPT_Wstringop_truncation,
2123 "%G%qD destination unchanged after copying no bytes",
2124 stmt, fndecl);
2127 replace_call_with_value (gsi, dest);
2128 return true;
2131 /* We can't compare slen with len as constants below if len is not a
2132 constant. */
2133 if (TREE_CODE (len) != INTEGER_CST)
2134 return false;
2136 /* Now, we must be passed a constant src ptr parameter. */
2137 tree slen = get_maxval_strlen (src, SRK_STRLEN);
2138 if (!slen || TREE_CODE (slen) != INTEGER_CST)
2139 return false;
2141 /* The size of the source string including the terminating nul. */
2142 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
2144 /* We do not support simplification of this case, though we do
2145 support it when expanding trees into RTL. */
2146 /* FIXME: generate a call to __builtin_memset. */
2147 if (tree_int_cst_lt (ssize, len))
2148 return false;
2150 /* Diagnose truncation that leaves the copy unterminated. */
2151 maybe_diag_stxncpy_trunc (*gsi, src, len);
2153 /* OK transform into builtin memcpy. */
2154 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2155 if (!fn)
2156 return false;
2158 len = fold_convert_loc (loc, size_type_node, len);
2159 len = force_gimple_operand_gsi (gsi, len, true,
2160 NULL_TREE, true, GSI_SAME_STMT);
2161 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2162 replace_call_with_call_and_fold (gsi, repl);
2164 return true;
2167 /* Fold function call to builtin strchr or strrchr.
2168 If both arguments are constant, evaluate and fold the result,
2169 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2170 In general strlen is significantly faster than strchr
2171 due to being a simpler operation. */
2172 static bool
2173 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
2175 gimple *stmt = gsi_stmt (*gsi);
2176 tree str = gimple_call_arg (stmt, 0);
2177 tree c = gimple_call_arg (stmt, 1);
2178 location_t loc = gimple_location (stmt);
2179 const char *p;
2180 char ch;
2182 if (!gimple_call_lhs (stmt))
2183 return false;
2185 /* Avoid folding if the first argument is not a nul-terminated array.
2186 Defer warning until later. */
2187 if (!check_nul_terminated_array (NULL_TREE, str))
2188 return false;
2190 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
2192 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
2194 if (p1 == NULL)
2196 replace_call_with_value (gsi, integer_zero_node);
2197 return true;
2200 tree len = build_int_cst (size_type_node, p1 - p);
2201 gimple_seq stmts = NULL;
2202 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2203 POINTER_PLUS_EXPR, str, len);
2204 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2205 gsi_replace_with_seq_vops (gsi, stmts);
2206 return true;
2209 if (!integer_zerop (c))
2210 return false;
2212 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2213 if (is_strrchr && optimize_function_for_size_p (cfun))
2215 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2217 if (strchr_fn)
2219 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2220 replace_call_with_call_and_fold (gsi, repl);
2221 return true;
2224 return false;
2227 tree len;
2228 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2230 if (!strlen_fn)
2231 return false;
2233 /* Create newstr = strlen (str). */
2234 gimple_seq stmts = NULL;
2235 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2236 gimple_set_location (new_stmt, loc);
2237 len = create_tmp_reg_or_ssa_name (size_type_node);
2238 gimple_call_set_lhs (new_stmt, len);
2239 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2241 /* Create (str p+ strlen (str)). */
2242 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2243 POINTER_PLUS_EXPR, str, len);
2244 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2245 gsi_replace_with_seq_vops (gsi, stmts);
2246 /* gsi now points at the assignment to the lhs, get a
2247 stmt iterator to the strlen.
2248 ??? We can't use gsi_for_stmt as that doesn't work when the
2249 CFG isn't built yet. */
2250 gimple_stmt_iterator gsi2 = *gsi;
2251 gsi_prev (&gsi2);
2252 fold_stmt (&gsi2);
2253 return true;
2256 /* Fold function call to builtin strstr.
2257 If both arguments are constant, evaluate and fold the result,
2258 additionally fold strstr (x, "") into x and strstr (x, "c")
2259 into strchr (x, 'c'). */
2260 static bool
2261 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2263 gimple *stmt = gsi_stmt (*gsi);
2264 if (!gimple_call_lhs (stmt))
2265 return false;
2267 tree haystack = gimple_call_arg (stmt, 0);
2268 tree needle = gimple_call_arg (stmt, 1);
2270 /* Avoid folding if either argument is not a nul-terminated array.
2271 Defer warning until later. */
2272 if (!check_nul_terminated_array (NULL_TREE, haystack)
2273 || !check_nul_terminated_array (NULL_TREE, needle))
2274 return false;
2276 const char *q = c_getstr (needle);
2277 if (q == NULL)
2278 return false;
2280 if (const char *p = c_getstr (haystack))
2282 const char *r = strstr (p, q);
2284 if (r == NULL)
2286 replace_call_with_value (gsi, integer_zero_node);
2287 return true;
2290 tree len = build_int_cst (size_type_node, r - p);
2291 gimple_seq stmts = NULL;
2292 gimple *new_stmt
2293 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2294 haystack, len);
2295 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2296 gsi_replace_with_seq_vops (gsi, stmts);
2297 return true;
2300 /* For strstr (x, "") return x. */
2301 if (q[0] == '\0')
2303 replace_call_with_value (gsi, haystack);
2304 return true;
2307 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2308 if (q[1] == '\0')
2310 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2311 if (strchr_fn)
2313 tree c = build_int_cst (integer_type_node, q[0]);
2314 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2315 replace_call_with_call_and_fold (gsi, repl);
2316 return true;
2320 return false;
2323 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2324 to the call.
2326 Return NULL_TREE if no simplification was possible, otherwise return the
2327 simplified form of the call as a tree.
2329 The simplified form may be a constant or other expression which
2330 computes the same value, but in a more efficient manner (including
2331 calls to other builtin functions).
2333 The call may contain arguments which need to be evaluated, but
2334 which are not useful to determine the result of the call. In
2335 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2336 COMPOUND_EXPR will be an argument which must be evaluated.
2337 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2338 COMPOUND_EXPR in the chain will contain the tree for the simplified
2339 form of the builtin function call. */
2341 static bool
2342 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2344 gimple *stmt = gsi_stmt (*gsi);
2345 location_t loc = gimple_location (stmt);
2347 const char *p = c_getstr (src);
2349 /* If the string length is zero, return the dst parameter. */
2350 if (p && *p == '\0')
2352 replace_call_with_value (gsi, dst);
2353 return true;
2356 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2357 return false;
2359 /* See if we can store by pieces into (dst + strlen(dst)). */
2360 tree newdst;
2361 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2362 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2364 if (!strlen_fn || !memcpy_fn)
2365 return false;
2367 /* If the length of the source string isn't computable don't
2368 split strcat into strlen and memcpy. */
2369 tree len = get_maxval_strlen (src, SRK_STRLEN);
2370 if (! len)
2371 return false;
2373 /* Create strlen (dst). */
2374 gimple_seq stmts = NULL, stmts2;
2375 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2376 gimple_set_location (repl, loc);
2377 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2378 gimple_call_set_lhs (repl, newdst);
2379 gimple_seq_add_stmt_without_update (&stmts, repl);
2381 /* Create (dst p+ strlen (dst)). */
2382 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2383 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2384 gimple_seq_add_seq_without_update (&stmts, stmts2);
2386 len = fold_convert_loc (loc, size_type_node, len);
2387 len = size_binop_loc (loc, PLUS_EXPR, len,
2388 build_int_cst (size_type_node, 1));
2389 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2390 gimple_seq_add_seq_without_update (&stmts, stmts2);
2392 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2393 gimple_seq_add_stmt_without_update (&stmts, repl);
2394 if (gimple_call_lhs (stmt))
2396 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2397 gimple_seq_add_stmt_without_update (&stmts, repl);
2398 gsi_replace_with_seq_vops (gsi, stmts);
2399 /* gsi now points at the assignment to the lhs, get a
2400 stmt iterator to the memcpy call.
2401 ??? We can't use gsi_for_stmt as that doesn't work when the
2402 CFG isn't built yet. */
2403 gimple_stmt_iterator gsi2 = *gsi;
2404 gsi_prev (&gsi2);
2405 fold_stmt (&gsi2);
2407 else
2409 gsi_replace_with_seq_vops (gsi, stmts);
2410 fold_stmt (gsi);
2412 return true;
2415 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2416 are the arguments to the call. */
2418 static bool
2419 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2421 gimple *stmt = gsi_stmt (*gsi);
2422 tree dest = gimple_call_arg (stmt, 0);
2423 tree src = gimple_call_arg (stmt, 1);
2424 tree size = gimple_call_arg (stmt, 2);
2425 tree fn;
2426 const char *p;
2429 p = c_getstr (src);
2430 /* If the SRC parameter is "", return DEST. */
2431 if (p && *p == '\0')
2433 replace_call_with_value (gsi, dest);
2434 return true;
2437 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2438 return false;
2440 /* If __builtin_strcat_chk is used, assume strcat is available. */
2441 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2442 if (!fn)
2443 return false;
2445 gimple *repl = gimple_build_call (fn, 2, dest, src);
2446 replace_call_with_call_and_fold (gsi, repl);
2447 return true;
2450 /* Simplify a call to the strncat builtin. */
2452 static bool
2453 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2455 gimple *stmt = gsi_stmt (*gsi);
2456 tree dst = gimple_call_arg (stmt, 0);
2457 tree src = gimple_call_arg (stmt, 1);
2458 tree len = gimple_call_arg (stmt, 2);
2460 const char *p = c_getstr (src);
2462 /* If the requested length is zero, or the src parameter string
2463 length is zero, return the dst parameter. */
2464 if (integer_zerop (len) || (p && *p == '\0'))
2466 replace_call_with_value (gsi, dst);
2467 return true;
2470 if (TREE_CODE (len) != INTEGER_CST || !p)
2471 return false;
2473 unsigned srclen = strlen (p);
2475 int cmpsrc = compare_tree_int (len, srclen);
2477 /* Return early if the requested len is less than the string length.
2478 Warnings will be issued elsewhere later. */
2479 if (cmpsrc < 0)
2480 return false;
2482 unsigned HOST_WIDE_INT dstsize;
2484 bool nowarn = gimple_no_warning_p (stmt);
2486 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2488 int cmpdst = compare_tree_int (len, dstsize);
2490 if (cmpdst >= 0)
2492 tree fndecl = gimple_call_fndecl (stmt);
2494 /* Strncat copies (at most) LEN bytes and always appends
2495 the terminating NUL so the specified bound should never
2496 be equal to (or greater than) the size of the destination.
2497 If it is, the copy could overflow. */
2498 location_t loc = gimple_location (stmt);
2499 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2500 cmpdst == 0
2501 ? G_("%G%qD specified bound %E equals "
2502 "destination size")
2503 : G_("%G%qD specified bound %E exceeds "
2504 "destination size %wu"),
2505 stmt, fndecl, len, dstsize);
2506 if (nowarn)
2507 gimple_set_no_warning (stmt, true);
2511 if (!nowarn && cmpsrc == 0)
2513 tree fndecl = gimple_call_fndecl (stmt);
2514 location_t loc = gimple_location (stmt);
2516 /* To avoid possible overflow the specified bound should also
2517 not be equal to the length of the source, even when the size
2518 of the destination is unknown (it's not an uncommon mistake
2519 to specify as the bound to strncpy the length of the source). */
2520 if (warning_at (loc, OPT_Wstringop_overflow_,
2521 "%G%qD specified bound %E equals source length",
2522 stmt, fndecl, len))
2523 gimple_set_no_warning (stmt, true);
2526 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2528 /* If the replacement _DECL isn't initialized, don't do the
2529 transformation. */
2530 if (!fn)
2531 return false;
2533 /* Otherwise, emit a call to strcat. */
2534 gcall *repl = gimple_build_call (fn, 2, dst, src);
2535 replace_call_with_call_and_fold (gsi, repl);
2536 return true;
2539 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2540 LEN, and SIZE. */
2542 static bool
2543 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2545 gimple *stmt = gsi_stmt (*gsi);
2546 tree dest = gimple_call_arg (stmt, 0);
2547 tree src = gimple_call_arg (stmt, 1);
2548 tree len = gimple_call_arg (stmt, 2);
2549 tree size = gimple_call_arg (stmt, 3);
2550 tree fn;
2551 const char *p;
2553 p = c_getstr (src);
2554 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2555 if ((p && *p == '\0')
2556 || integer_zerop (len))
2558 replace_call_with_value (gsi, dest);
2559 return true;
2562 if (! tree_fits_uhwi_p (size))
2563 return false;
2565 if (! integer_all_onesp (size))
2567 tree src_len = c_strlen (src, 1);
2568 if (src_len
2569 && tree_fits_uhwi_p (src_len)
2570 && tree_fits_uhwi_p (len)
2571 && ! tree_int_cst_lt (len, src_len))
2573 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2574 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2575 if (!fn)
2576 return false;
2578 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2579 replace_call_with_call_and_fold (gsi, repl);
2580 return true;
2582 return false;
2585 /* If __builtin_strncat_chk is used, assume strncat is available. */
2586 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2587 if (!fn)
2588 return false;
2590 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2591 replace_call_with_call_and_fold (gsi, repl);
2592 return true;
2595 /* Build and append gimple statements to STMTS that would load a first
2596 character of a memory location identified by STR. LOC is location
2597 of the statement. */
2599 static tree
2600 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2602 tree var;
2604 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2605 tree cst_uchar_ptr_node
2606 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2607 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2609 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2610 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2611 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2613 gimple_assign_set_lhs (stmt, var);
2614 gimple_seq_add_stmt_without_update (stmts, stmt);
2616 return var;
2619 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2621 static bool
2622 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2624 gimple *stmt = gsi_stmt (*gsi);
2625 tree callee = gimple_call_fndecl (stmt);
2626 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2628 tree type = integer_type_node;
2629 tree str1 = gimple_call_arg (stmt, 0);
2630 tree str2 = gimple_call_arg (stmt, 1);
2631 tree lhs = gimple_call_lhs (stmt);
2633 tree bound_node = NULL_TREE;
2634 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
2636 /* Handle strncmp and strncasecmp functions. */
2637 if (gimple_call_num_args (stmt) == 3)
2639 bound_node = gimple_call_arg (stmt, 2);
2640 if (tree_fits_uhwi_p (bound_node))
2641 bound = tree_to_uhwi (bound_node);
2644 /* If the BOUND parameter is zero, return zero. */
2645 if (bound == 0)
2647 replace_call_with_value (gsi, integer_zero_node);
2648 return true;
2651 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2652 if (operand_equal_p (str1, str2, 0))
2654 replace_call_with_value (gsi, integer_zero_node);
2655 return true;
2658 /* Initially set to the number of characters, including the terminating
2659 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2660 the array Sx is not terminated by a nul.
2661 For nul-terminated strings then adjusted to their length so that
2662 LENx == NULPOSx holds. */
2663 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2664 const char *p1 = getbyterep (str1, &len1);
2665 const char *p2 = getbyterep (str2, &len2);
2667 /* The position of the terminating nul character if one exists, otherwise
2668 a value greater than LENx. */
2669 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2671 if (p1)
2673 size_t n = strnlen (p1, len1);
2674 if (n < len1)
2675 len1 = nulpos1 = n;
2678 if (p2)
2680 size_t n = strnlen (p2, len2);
2681 if (n < len2)
2682 len2 = nulpos2 = n;
2685 /* For known strings, return an immediate value. */
2686 if (p1 && p2)
2688 int r = 0;
2689 bool known_result = false;
2691 switch (fcode)
2693 case BUILT_IN_STRCMP:
2694 case BUILT_IN_STRCMP_EQ:
2695 if (len1 != nulpos1 || len2 != nulpos2)
2696 break;
2698 r = strcmp (p1, p2);
2699 known_result = true;
2700 break;
2702 case BUILT_IN_STRNCMP:
2703 case BUILT_IN_STRNCMP_EQ:
2705 if (bound == HOST_WIDE_INT_M1U)
2706 break;
2708 /* Reduce the bound to be no more than the length
2709 of the shorter of the two strings, or the sizes
2710 of the unterminated arrays. */
2711 unsigned HOST_WIDE_INT n = bound;
2713 if (len1 == nulpos1 && len1 < n)
2714 n = len1 + 1;
2715 if (len2 == nulpos2 && len2 < n)
2716 n = len2 + 1;
2718 if (MIN (nulpos1, nulpos2) + 1 < n)
2719 break;
2721 r = strncmp (p1, p2, n);
2722 known_result = true;
2723 break;
2725 /* Only handleable situation is where the string are equal (result 0),
2726 which is already handled by operand_equal_p case. */
2727 case BUILT_IN_STRCASECMP:
2728 break;
2729 case BUILT_IN_STRNCASECMP:
2731 if (bound == HOST_WIDE_INT_M1U)
2732 break;
2733 r = strncmp (p1, p2, bound);
2734 if (r == 0)
2735 known_result = true;
2736 break;
2738 default:
2739 gcc_unreachable ();
2742 if (known_result)
2744 replace_call_with_value (gsi, build_cmp_result (type, r));
2745 return true;
2749 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
2750 || fcode == BUILT_IN_STRCMP
2751 || fcode == BUILT_IN_STRCMP_EQ
2752 || fcode == BUILT_IN_STRCASECMP;
2754 location_t loc = gimple_location (stmt);
2756 /* If the second arg is "", return *(const unsigned char*)arg1. */
2757 if (p2 && *p2 == '\0' && nonzero_bound)
2759 gimple_seq stmts = NULL;
2760 tree var = gimple_load_first_char (loc, str1, &stmts);
2761 if (lhs)
2763 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2764 gimple_seq_add_stmt_without_update (&stmts, stmt);
2767 gsi_replace_with_seq_vops (gsi, stmts);
2768 return true;
2771 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2772 if (p1 && *p1 == '\0' && nonzero_bound)
2774 gimple_seq stmts = NULL;
2775 tree var = gimple_load_first_char (loc, str2, &stmts);
2777 if (lhs)
2779 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2780 stmt = gimple_build_assign (c, NOP_EXPR, var);
2781 gimple_seq_add_stmt_without_update (&stmts, stmt);
2783 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2784 gimple_seq_add_stmt_without_update (&stmts, stmt);
2787 gsi_replace_with_seq_vops (gsi, stmts);
2788 return true;
2791 /* If BOUND is one, return an expression corresponding to
2792 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2793 if (fcode == BUILT_IN_STRNCMP && bound == 1)
2795 gimple_seq stmts = NULL;
2796 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2797 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2799 if (lhs)
2801 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2802 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2803 gimple_seq_add_stmt_without_update (&stmts, convert1);
2805 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2806 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2807 gimple_seq_add_stmt_without_update (&stmts, convert2);
2809 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2810 gimple_seq_add_stmt_without_update (&stmts, stmt);
2813 gsi_replace_with_seq_vops (gsi, stmts);
2814 return true;
2817 /* If BOUND is greater than the length of one constant string,
2818 and the other argument is also a nul-terminated string, replace
2819 strncmp with strcmp. */
2820 if (fcode == BUILT_IN_STRNCMP
2821 && bound > 0 && bound < HOST_WIDE_INT_M1U
2822 && ((p2 && len2 < bound && len2 == nulpos2)
2823 || (p1 && len1 < bound && len1 == nulpos1)))
2825 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2826 if (!fn)
2827 return false;
2828 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2829 replace_call_with_call_and_fold (gsi, repl);
2830 return true;
2833 return false;
2836 /* Fold a call to the memchr pointed by GSI iterator. */
2838 static bool
2839 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2841 gimple *stmt = gsi_stmt (*gsi);
2842 tree lhs = gimple_call_lhs (stmt);
2843 tree arg1 = gimple_call_arg (stmt, 0);
2844 tree arg2 = gimple_call_arg (stmt, 1);
2845 tree len = gimple_call_arg (stmt, 2);
2847 /* If the LEN parameter is zero, return zero. */
2848 if (integer_zerop (len))
2850 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2851 return true;
2854 char c;
2855 if (TREE_CODE (arg2) != INTEGER_CST
2856 || !tree_fits_uhwi_p (len)
2857 || !target_char_cst_p (arg2, &c))
2858 return false;
2860 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2861 unsigned HOST_WIDE_INT string_length;
2862 const char *p1 = getbyterep (arg1, &string_length);
2864 if (p1)
2866 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2867 if (r == NULL)
2869 tree mem_size, offset_node;
2870 byte_representation (arg1, &offset_node, &mem_size, NULL);
2871 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2872 ? 0 : tree_to_uhwi (offset_node);
2873 /* MEM_SIZE is the size of the array the string literal
2874 is stored in. */
2875 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2876 gcc_checking_assert (string_length <= string_size);
2877 if (length <= string_size)
2879 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2880 return true;
2883 else
2885 unsigned HOST_WIDE_INT offset = r - p1;
2886 gimple_seq stmts = NULL;
2887 if (lhs != NULL_TREE)
2889 tree offset_cst = build_int_cst (sizetype, offset);
2890 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2891 arg1, offset_cst);
2892 gimple_seq_add_stmt_without_update (&stmts, stmt);
2894 else
2895 gimple_seq_add_stmt_without_update (&stmts,
2896 gimple_build_nop ());
2898 gsi_replace_with_seq_vops (gsi, stmts);
2899 return true;
2903 return false;
2906 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2907 to the call. IGNORE is true if the value returned
2908 by the builtin will be ignored. UNLOCKED is true is true if this
2909 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2910 the known length of the string. Return NULL_TREE if no simplification
2911 was possible. */
2913 static bool
2914 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2915 tree arg0, tree arg1,
2916 bool unlocked)
2918 gimple *stmt = gsi_stmt (*gsi);
2920 /* If we're using an unlocked function, assume the other unlocked
2921 functions exist explicitly. */
2922 tree const fn_fputc = (unlocked
2923 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2924 : builtin_decl_implicit (BUILT_IN_FPUTC));
2925 tree const fn_fwrite = (unlocked
2926 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2927 : builtin_decl_implicit (BUILT_IN_FWRITE));
2929 /* If the return value is used, don't do the transformation. */
2930 if (gimple_call_lhs (stmt))
2931 return false;
2933 /* Get the length of the string passed to fputs. If the length
2934 can't be determined, punt. */
2935 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2936 if (!len
2937 || TREE_CODE (len) != INTEGER_CST)
2938 return false;
2940 switch (compare_tree_int (len, 1))
2942 case -1: /* length is 0, delete the call entirely . */
2943 replace_call_with_value (gsi, integer_zero_node);
2944 return true;
2946 case 0: /* length is 1, call fputc. */
2948 const char *p = c_getstr (arg0);
2949 if (p != NULL)
2951 if (!fn_fputc)
2952 return false;
2954 gimple *repl = gimple_build_call (fn_fputc, 2,
2955 build_int_cst
2956 (integer_type_node, p[0]), arg1);
2957 replace_call_with_call_and_fold (gsi, repl);
2958 return true;
2961 /* FALLTHROUGH */
2962 case 1: /* length is greater than 1, call fwrite. */
2964 /* If optimizing for size keep fputs. */
2965 if (optimize_function_for_size_p (cfun))
2966 return false;
2967 /* New argument list transforming fputs(string, stream) to
2968 fwrite(string, 1, len, stream). */
2969 if (!fn_fwrite)
2970 return false;
2972 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2973 size_one_node, len, arg1);
2974 replace_call_with_call_and_fold (gsi, repl);
2975 return true;
2977 default:
2978 gcc_unreachable ();
2980 return false;
2983 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2984 DEST, SRC, LEN, and SIZE are the arguments to the call.
2985 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2986 code of the builtin. If MAXLEN is not NULL, it is maximum length
2987 passed as third argument. */
2989 static bool
2990 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2991 tree dest, tree src, tree len, tree size,
2992 enum built_in_function fcode)
2994 gimple *stmt = gsi_stmt (*gsi);
2995 location_t loc = gimple_location (stmt);
2996 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2997 tree fn;
2999 /* If SRC and DEST are the same (and not volatile), return DEST
3000 (resp. DEST+LEN for __mempcpy_chk). */
3001 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
3003 if (fcode != BUILT_IN_MEMPCPY_CHK)
3005 replace_call_with_value (gsi, dest);
3006 return true;
3008 else
3010 gimple_seq stmts = NULL;
3011 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
3012 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
3013 TREE_TYPE (dest), dest, len);
3014 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3015 replace_call_with_value (gsi, temp);
3016 return true;
3020 if (! tree_fits_uhwi_p (size))
3021 return false;
3023 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3024 if (! integer_all_onesp (size))
3026 if (! tree_fits_uhwi_p (len))
3028 /* If LEN is not constant, try MAXLEN too.
3029 For MAXLEN only allow optimizing into non-_ocs function
3030 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3031 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3033 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
3035 /* (void) __mempcpy_chk () can be optimized into
3036 (void) __memcpy_chk (). */
3037 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3038 if (!fn)
3039 return false;
3041 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3042 replace_call_with_call_and_fold (gsi, repl);
3043 return true;
3045 return false;
3048 else
3049 maxlen = len;
3051 if (tree_int_cst_lt (size, maxlen))
3052 return false;
3055 fn = NULL_TREE;
3056 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3057 mem{cpy,pcpy,move,set} is available. */
3058 switch (fcode)
3060 case BUILT_IN_MEMCPY_CHK:
3061 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
3062 break;
3063 case BUILT_IN_MEMPCPY_CHK:
3064 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
3065 break;
3066 case BUILT_IN_MEMMOVE_CHK:
3067 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
3068 break;
3069 case BUILT_IN_MEMSET_CHK:
3070 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
3071 break;
3072 default:
3073 break;
3076 if (!fn)
3077 return false;
3079 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3080 replace_call_with_call_and_fold (gsi, repl);
3081 return true;
3084 /* Fold a call to the __st[rp]cpy_chk builtin.
3085 DEST, SRC, and SIZE are the arguments to the call.
3086 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3087 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3088 strings passed as second argument. */
3090 static bool
3091 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
3092 tree dest,
3093 tree src, tree size,
3094 enum built_in_function fcode)
3096 gimple *stmt = gsi_stmt (*gsi);
3097 location_t loc = gimple_location (stmt);
3098 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3099 tree len, fn;
3101 /* If SRC and DEST are the same (and not volatile), return DEST. */
3102 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
3104 /* Issue -Wrestrict unless the pointers are null (those do
3105 not point to objects and so do not indicate an overlap;
3106 such calls could be the result of sanitization and jump
3107 threading). */
3108 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
3110 tree func = gimple_call_fndecl (stmt);
3112 warning_at (loc, OPT_Wrestrict,
3113 "%qD source argument is the same as destination",
3114 func);
3117 replace_call_with_value (gsi, dest);
3118 return true;
3121 if (! tree_fits_uhwi_p (size))
3122 return false;
3124 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
3125 if (! integer_all_onesp (size))
3127 len = c_strlen (src, 1);
3128 if (! len || ! tree_fits_uhwi_p (len))
3130 /* If LEN is not constant, try MAXLEN too.
3131 For MAXLEN only allow optimizing into non-_ocs function
3132 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3133 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3135 if (fcode == BUILT_IN_STPCPY_CHK)
3137 if (! ignore)
3138 return false;
3140 /* If return value of __stpcpy_chk is ignored,
3141 optimize into __strcpy_chk. */
3142 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
3143 if (!fn)
3144 return false;
3146 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
3147 replace_call_with_call_and_fold (gsi, repl);
3148 return true;
3151 if (! len || TREE_SIDE_EFFECTS (len))
3152 return false;
3154 /* If c_strlen returned something, but not a constant,
3155 transform __strcpy_chk into __memcpy_chk. */
3156 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3157 if (!fn)
3158 return false;
3160 gimple_seq stmts = NULL;
3161 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
3162 len = gimple_convert (&stmts, loc, size_type_node, len);
3163 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
3164 build_int_cst (size_type_node, 1));
3165 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3166 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3167 replace_call_with_call_and_fold (gsi, repl);
3168 return true;
3171 else
3172 maxlen = len;
3174 if (! tree_int_cst_lt (maxlen, size))
3175 return false;
3178 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3179 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
3180 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
3181 if (!fn)
3182 return false;
3184 gimple *repl = gimple_build_call (fn, 2, dest, src);
3185 replace_call_with_call_and_fold (gsi, repl);
3186 return true;
3189 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3190 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3191 length passed as third argument. IGNORE is true if return value can be
3192 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3194 static bool
3195 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
3196 tree dest, tree src,
3197 tree len, tree size,
3198 enum built_in_function fcode)
3200 gimple *stmt = gsi_stmt (*gsi);
3201 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3202 tree fn;
3204 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
3206 /* If return value of __stpncpy_chk is ignored,
3207 optimize into __strncpy_chk. */
3208 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3209 if (fn)
3211 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3212 replace_call_with_call_and_fold (gsi, repl);
3213 return true;
3217 if (! tree_fits_uhwi_p (size))
3218 return false;
3220 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3221 if (! integer_all_onesp (size))
3223 if (! tree_fits_uhwi_p (len))
3225 /* If LEN is not constant, try MAXLEN too.
3226 For MAXLEN only allow optimizing into non-_ocs function
3227 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3228 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3229 return false;
3231 else
3232 maxlen = len;
3234 if (tree_int_cst_lt (size, maxlen))
3235 return false;
3238 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3239 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
3240 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3241 if (!fn)
3242 return false;
3244 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3245 replace_call_with_call_and_fold (gsi, repl);
3246 return true;
3249 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3250 Return NULL_TREE if no simplification can be made. */
3252 static bool
3253 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3255 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3256 location_t loc = gimple_location (stmt);
3257 tree dest = gimple_call_arg (stmt, 0);
3258 tree src = gimple_call_arg (stmt, 1);
3259 tree fn, lenp1;
3261 /* If the result is unused, replace stpcpy with strcpy. */
3262 if (gimple_call_lhs (stmt) == NULL_TREE)
3264 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3265 if (!fn)
3266 return false;
3267 gimple_call_set_fndecl (stmt, fn);
3268 fold_stmt (gsi);
3269 return true;
3272 /* Set to non-null if ARG refers to an unterminated array. */
3273 c_strlen_data data = { };
3274 /* The size of the unterminated array if SRC referes to one. */
3275 tree size;
3276 /* True if the size is exact/constant, false if it's the lower bound
3277 of a range. */
3278 bool exact;
3279 tree len = c_strlen (src, 1, &data, 1);
3280 if (!len
3281 || TREE_CODE (len) != INTEGER_CST)
3283 data.decl = unterminated_array (src, &size, &exact);
3284 if (!data.decl)
3285 return false;
3288 if (data.decl)
3290 /* Avoid folding calls with unterminated arrays. */
3291 if (!gimple_no_warning_p (stmt))
3292 warn_string_no_nul (loc, NULL_TREE, "stpcpy", src, data.decl, size,
3293 exact);
3294 gimple_set_no_warning (stmt, true);
3295 return false;
3298 if (optimize_function_for_size_p (cfun)
3299 /* If length is zero it's small enough. */
3300 && !integer_zerop (len))
3301 return false;
3303 /* If the source has a known length replace stpcpy with memcpy. */
3304 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3305 if (!fn)
3306 return false;
3308 gimple_seq stmts = NULL;
3309 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3310 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3311 tem, build_int_cst (size_type_node, 1));
3312 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3313 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
3314 gimple_move_vops (repl, stmt);
3315 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3316 /* Replace the result with dest + len. */
3317 stmts = NULL;
3318 tem = gimple_convert (&stmts, loc, sizetype, len);
3319 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3320 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3321 POINTER_PLUS_EXPR, dest, tem);
3322 gsi_replace (gsi, ret, false);
3323 /* Finally fold the memcpy call. */
3324 gimple_stmt_iterator gsi2 = *gsi;
3325 gsi_prev (&gsi2);
3326 fold_stmt (&gsi2);
3327 return true;
3330 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3331 NULL_TREE if a normal call should be emitted rather than expanding
3332 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3333 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3334 passed as second argument. */
3336 static bool
3337 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3338 enum built_in_function fcode)
3340 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3341 tree dest, size, len, fn, fmt, flag;
3342 const char *fmt_str;
3344 /* Verify the required arguments in the original call. */
3345 if (gimple_call_num_args (stmt) < 5)
3346 return false;
3348 dest = gimple_call_arg (stmt, 0);
3349 len = gimple_call_arg (stmt, 1);
3350 flag = gimple_call_arg (stmt, 2);
3351 size = gimple_call_arg (stmt, 3);
3352 fmt = gimple_call_arg (stmt, 4);
3354 if (! tree_fits_uhwi_p (size))
3355 return false;
3357 if (! integer_all_onesp (size))
3359 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3360 if (! tree_fits_uhwi_p (len))
3362 /* If LEN is not constant, try MAXLEN too.
3363 For MAXLEN only allow optimizing into non-_ocs function
3364 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3365 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3366 return false;
3368 else
3369 maxlen = len;
3371 if (tree_int_cst_lt (size, maxlen))
3372 return false;
3375 if (!init_target_chars ())
3376 return false;
3378 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3379 or if format doesn't contain % chars or is "%s". */
3380 if (! integer_zerop (flag))
3382 fmt_str = c_getstr (fmt);
3383 if (fmt_str == NULL)
3384 return false;
3385 if (strchr (fmt_str, target_percent) != NULL
3386 && strcmp (fmt_str, target_percent_s))
3387 return false;
3390 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3391 available. */
3392 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3393 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3394 if (!fn)
3395 return false;
3397 /* Replace the called function and the first 5 argument by 3 retaining
3398 trailing varargs. */
3399 gimple_call_set_fndecl (stmt, fn);
3400 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3401 gimple_call_set_arg (stmt, 0, dest);
3402 gimple_call_set_arg (stmt, 1, len);
3403 gimple_call_set_arg (stmt, 2, fmt);
3404 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3405 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3406 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3407 fold_stmt (gsi);
3408 return true;
3411 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3412 Return NULL_TREE if a normal call should be emitted rather than
3413 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3414 or BUILT_IN_VSPRINTF_CHK. */
3416 static bool
3417 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3418 enum built_in_function fcode)
3420 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3421 tree dest, size, len, fn, fmt, flag;
3422 const char *fmt_str;
3423 unsigned nargs = gimple_call_num_args (stmt);
3425 /* Verify the required arguments in the original call. */
3426 if (nargs < 4)
3427 return false;
3428 dest = gimple_call_arg (stmt, 0);
3429 flag = gimple_call_arg (stmt, 1);
3430 size = gimple_call_arg (stmt, 2);
3431 fmt = gimple_call_arg (stmt, 3);
3433 if (! tree_fits_uhwi_p (size))
3434 return false;
3436 len = NULL_TREE;
3438 if (!init_target_chars ())
3439 return false;
3441 /* Check whether the format is a literal string constant. */
3442 fmt_str = c_getstr (fmt);
3443 if (fmt_str != NULL)
3445 /* If the format doesn't contain % args or %%, we know the size. */
3446 if (strchr (fmt_str, target_percent) == 0)
3448 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3449 len = build_int_cstu (size_type_node, strlen (fmt_str));
3451 /* If the format is "%s" and first ... argument is a string literal,
3452 we know the size too. */
3453 else if (fcode == BUILT_IN_SPRINTF_CHK
3454 && strcmp (fmt_str, target_percent_s) == 0)
3456 tree arg;
3458 if (nargs == 5)
3460 arg = gimple_call_arg (stmt, 4);
3461 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3463 len = c_strlen (arg, 1);
3464 if (! len || ! tree_fits_uhwi_p (len))
3465 len = NULL_TREE;
3471 if (! integer_all_onesp (size))
3473 if (! len || ! tree_int_cst_lt (len, size))
3474 return false;
3477 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3478 or if format doesn't contain % chars or is "%s". */
3479 if (! integer_zerop (flag))
3481 if (fmt_str == NULL)
3482 return false;
3483 if (strchr (fmt_str, target_percent) != NULL
3484 && strcmp (fmt_str, target_percent_s))
3485 return false;
3488 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3489 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3490 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3491 if (!fn)
3492 return false;
3494 /* Replace the called function and the first 4 argument by 2 retaining
3495 trailing varargs. */
3496 gimple_call_set_fndecl (stmt, fn);
3497 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3498 gimple_call_set_arg (stmt, 0, dest);
3499 gimple_call_set_arg (stmt, 1, fmt);
3500 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3501 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3502 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3503 fold_stmt (gsi);
3504 return true;
3507 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3508 ORIG may be null if this is a 2-argument call. We don't attempt to
3509 simplify calls with more than 3 arguments.
3511 Return true if simplification was possible, otherwise false. */
3513 bool
3514 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3516 gimple *stmt = gsi_stmt (*gsi);
3518 /* Verify the required arguments in the original call. We deal with two
3519 types of sprintf() calls: 'sprintf (str, fmt)' and
3520 'sprintf (dest, "%s", orig)'. */
3521 if (gimple_call_num_args (stmt) > 3)
3522 return false;
3524 tree orig = NULL_TREE;
3525 if (gimple_call_num_args (stmt) == 3)
3526 orig = gimple_call_arg (stmt, 2);
3528 /* Check whether the format is a literal string constant. */
3529 tree fmt = gimple_call_arg (stmt, 1);
3530 const char *fmt_str = c_getstr (fmt);
3531 if (fmt_str == NULL)
3532 return false;
3534 tree dest = gimple_call_arg (stmt, 0);
3536 if (!init_target_chars ())
3537 return false;
3539 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3540 if (!fn)
3541 return false;
3543 /* If the format doesn't contain % args or %%, use strcpy. */
3544 if (strchr (fmt_str, target_percent) == NULL)
3546 /* Don't optimize sprintf (buf, "abc", ptr++). */
3547 if (orig)
3548 return false;
3550 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3551 'format' is known to contain no % formats. */
3552 gimple_seq stmts = NULL;
3553 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3555 /* Propagate the NO_WARNING bit to avoid issuing the same
3556 warning more than once. */
3557 if (gimple_no_warning_p (stmt))
3558 gimple_set_no_warning (repl, true);
3560 gimple_seq_add_stmt_without_update (&stmts, repl);
3561 if (tree lhs = gimple_call_lhs (stmt))
3563 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3564 strlen (fmt_str)));
3565 gimple_seq_add_stmt_without_update (&stmts, repl);
3566 gsi_replace_with_seq_vops (gsi, stmts);
3567 /* gsi now points at the assignment to the lhs, get a
3568 stmt iterator to the memcpy call.
3569 ??? We can't use gsi_for_stmt as that doesn't work when the
3570 CFG isn't built yet. */
3571 gimple_stmt_iterator gsi2 = *gsi;
3572 gsi_prev (&gsi2);
3573 fold_stmt (&gsi2);
3575 else
3577 gsi_replace_with_seq_vops (gsi, stmts);
3578 fold_stmt (gsi);
3580 return true;
3583 /* If the format is "%s", use strcpy if the result isn't used. */
3584 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3586 /* Don't crash on sprintf (str1, "%s"). */
3587 if (!orig)
3588 return false;
3590 /* Don't fold calls with source arguments of invalid (nonpointer)
3591 types. */
3592 if (!POINTER_TYPE_P (TREE_TYPE (orig)))
3593 return false;
3595 tree orig_len = NULL_TREE;
3596 if (gimple_call_lhs (stmt))
3598 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3599 if (!orig_len)
3600 return false;
3603 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3604 gimple_seq stmts = NULL;
3605 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3607 /* Propagate the NO_WARNING bit to avoid issuing the same
3608 warning more than once. */
3609 if (gimple_no_warning_p (stmt))
3610 gimple_set_no_warning (repl, true);
3612 gimple_seq_add_stmt_without_update (&stmts, repl);
3613 if (tree lhs = gimple_call_lhs (stmt))
3615 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3616 TREE_TYPE (orig_len)))
3617 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3618 repl = gimple_build_assign (lhs, orig_len);
3619 gimple_seq_add_stmt_without_update (&stmts, repl);
3620 gsi_replace_with_seq_vops (gsi, stmts);
3621 /* gsi now points at the assignment to the lhs, get a
3622 stmt iterator to the memcpy call.
3623 ??? We can't use gsi_for_stmt as that doesn't work when the
3624 CFG isn't built yet. */
3625 gimple_stmt_iterator gsi2 = *gsi;
3626 gsi_prev (&gsi2);
3627 fold_stmt (&gsi2);
3629 else
3631 gsi_replace_with_seq_vops (gsi, stmts);
3632 fold_stmt (gsi);
3634 return true;
3636 return false;
3639 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3640 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3641 attempt to simplify calls with more than 4 arguments.
3643 Return true if simplification was possible, otherwise false. */
3645 bool
3646 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3648 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3649 tree dest = gimple_call_arg (stmt, 0);
3650 tree destsize = gimple_call_arg (stmt, 1);
3651 tree fmt = gimple_call_arg (stmt, 2);
3652 tree orig = NULL_TREE;
3653 const char *fmt_str = NULL;
3655 if (gimple_call_num_args (stmt) > 4)
3656 return false;
3658 if (gimple_call_num_args (stmt) == 4)
3659 orig = gimple_call_arg (stmt, 3);
3661 if (!tree_fits_uhwi_p (destsize))
3662 return false;
3663 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3665 /* Check whether the format is a literal string constant. */
3666 fmt_str = c_getstr (fmt);
3667 if (fmt_str == NULL)
3668 return false;
3670 if (!init_target_chars ())
3671 return false;
3673 /* If the format doesn't contain % args or %%, use strcpy. */
3674 if (strchr (fmt_str, target_percent) == NULL)
3676 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3677 if (!fn)
3678 return false;
3680 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3681 if (orig)
3682 return false;
3684 /* We could expand this as
3685 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3686 or to
3687 memcpy (str, fmt_with_nul_at_cstm1, cst);
3688 but in the former case that might increase code size
3689 and in the latter case grow .rodata section too much.
3690 So punt for now. */
3691 size_t len = strlen (fmt_str);
3692 if (len >= destlen)
3693 return false;
3695 gimple_seq stmts = NULL;
3696 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3697 gimple_seq_add_stmt_without_update (&stmts, repl);
3698 if (tree lhs = gimple_call_lhs (stmt))
3700 repl = gimple_build_assign (lhs,
3701 build_int_cst (TREE_TYPE (lhs), len));
3702 gimple_seq_add_stmt_without_update (&stmts, repl);
3703 gsi_replace_with_seq_vops (gsi, stmts);
3704 /* gsi now points at the assignment to the lhs, get a
3705 stmt iterator to the memcpy call.
3706 ??? We can't use gsi_for_stmt as that doesn't work when the
3707 CFG isn't built yet. */
3708 gimple_stmt_iterator gsi2 = *gsi;
3709 gsi_prev (&gsi2);
3710 fold_stmt (&gsi2);
3712 else
3714 gsi_replace_with_seq_vops (gsi, stmts);
3715 fold_stmt (gsi);
3717 return true;
3720 /* If the format is "%s", use strcpy if the result isn't used. */
3721 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3723 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3724 if (!fn)
3725 return false;
3727 /* Don't crash on snprintf (str1, cst, "%s"). */
3728 if (!orig)
3729 return false;
3731 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3732 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3733 return false;
3735 /* We could expand this as
3736 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3737 or to
3738 memcpy (str1, str2_with_nul_at_cstm1, cst);
3739 but in the former case that might increase code size
3740 and in the latter case grow .rodata section too much.
3741 So punt for now. */
3742 if (compare_tree_int (orig_len, destlen) >= 0)
3743 return false;
3745 /* Convert snprintf (str1, cst, "%s", str2) into
3746 strcpy (str1, str2) if strlen (str2) < cst. */
3747 gimple_seq stmts = NULL;
3748 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3749 gimple_seq_add_stmt_without_update (&stmts, repl);
3750 if (tree lhs = gimple_call_lhs (stmt))
3752 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3753 TREE_TYPE (orig_len)))
3754 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3755 repl = gimple_build_assign (lhs, orig_len);
3756 gimple_seq_add_stmt_without_update (&stmts, repl);
3757 gsi_replace_with_seq_vops (gsi, stmts);
3758 /* gsi now points at the assignment to the lhs, get a
3759 stmt iterator to the memcpy call.
3760 ??? We can't use gsi_for_stmt as that doesn't work when the
3761 CFG isn't built yet. */
3762 gimple_stmt_iterator gsi2 = *gsi;
3763 gsi_prev (&gsi2);
3764 fold_stmt (&gsi2);
3766 else
3768 gsi_replace_with_seq_vops (gsi, stmts);
3769 fold_stmt (gsi);
3771 return true;
3773 return false;
3776 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3777 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3778 more than 3 arguments, and ARG may be null in the 2-argument case.
3780 Return NULL_TREE if no simplification was possible, otherwise return the
3781 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3782 code of the function to be simplified. */
3784 static bool
3785 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3786 tree fp, tree fmt, tree arg,
3787 enum built_in_function fcode)
3789 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3790 tree fn_fputc, fn_fputs;
3791 const char *fmt_str = NULL;
3793 /* If the return value is used, don't do the transformation. */
3794 if (gimple_call_lhs (stmt) != NULL_TREE)
3795 return false;
3797 /* Check whether the format is a literal string constant. */
3798 fmt_str = c_getstr (fmt);
3799 if (fmt_str == NULL)
3800 return false;
3802 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3804 /* If we're using an unlocked function, assume the other
3805 unlocked functions exist explicitly. */
3806 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3807 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3809 else
3811 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3812 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3815 if (!init_target_chars ())
3816 return false;
3818 /* If the format doesn't contain % args or %%, use strcpy. */
3819 if (strchr (fmt_str, target_percent) == NULL)
3821 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3822 && arg)
3823 return false;
3825 /* If the format specifier was "", fprintf does nothing. */
3826 if (fmt_str[0] == '\0')
3828 replace_call_with_value (gsi, NULL_TREE);
3829 return true;
3832 /* When "string" doesn't contain %, replace all cases of
3833 fprintf (fp, string) with fputs (string, fp). The fputs
3834 builtin will take care of special cases like length == 1. */
3835 if (fn_fputs)
3837 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3838 replace_call_with_call_and_fold (gsi, repl);
3839 return true;
3843 /* The other optimizations can be done only on the non-va_list variants. */
3844 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3845 return false;
3847 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3848 else if (strcmp (fmt_str, target_percent_s) == 0)
3850 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3851 return false;
3852 if (fn_fputs)
3854 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3855 replace_call_with_call_and_fold (gsi, repl);
3856 return true;
3860 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3861 else if (strcmp (fmt_str, target_percent_c) == 0)
3863 if (!arg
3864 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3865 return false;
3866 if (fn_fputc)
3868 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3869 replace_call_with_call_and_fold (gsi, repl);
3870 return true;
3874 return false;
3877 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3878 FMT and ARG are the arguments to the call; we don't fold cases with
3879 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3881 Return NULL_TREE if no simplification was possible, otherwise return the
3882 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3883 code of the function to be simplified. */
3885 static bool
3886 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3887 tree arg, enum built_in_function fcode)
3889 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3890 tree fn_putchar, fn_puts, newarg;
3891 const char *fmt_str = NULL;
3893 /* If the return value is used, don't do the transformation. */
3894 if (gimple_call_lhs (stmt) != NULL_TREE)
3895 return false;
3897 /* Check whether the format is a literal string constant. */
3898 fmt_str = c_getstr (fmt);
3899 if (fmt_str == NULL)
3900 return false;
3902 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3904 /* If we're using an unlocked function, assume the other
3905 unlocked functions exist explicitly. */
3906 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3907 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3909 else
3911 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3912 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3915 if (!init_target_chars ())
3916 return false;
3918 if (strcmp (fmt_str, target_percent_s) == 0
3919 || strchr (fmt_str, target_percent) == NULL)
3921 const char *str;
3923 if (strcmp (fmt_str, target_percent_s) == 0)
3925 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3926 return false;
3928 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3929 return false;
3931 str = c_getstr (arg);
3932 if (str == NULL)
3933 return false;
3935 else
3937 /* The format specifier doesn't contain any '%' characters. */
3938 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3939 && arg)
3940 return false;
3941 str = fmt_str;
3944 /* If the string was "", printf does nothing. */
3945 if (str[0] == '\0')
3947 replace_call_with_value (gsi, NULL_TREE);
3948 return true;
3951 /* If the string has length of 1, call putchar. */
3952 if (str[1] == '\0')
3954 /* Given printf("c"), (where c is any one character,)
3955 convert "c"[0] to an int and pass that to the replacement
3956 function. */
3957 newarg = build_int_cst (integer_type_node, str[0]);
3958 if (fn_putchar)
3960 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3961 replace_call_with_call_and_fold (gsi, repl);
3962 return true;
3965 else
3967 /* If the string was "string\n", call puts("string"). */
3968 size_t len = strlen (str);
3969 if ((unsigned char)str[len - 1] == target_newline
3970 && (size_t) (int) len == len
3971 && (int) len > 0)
3973 char *newstr;
3975 /* Create a NUL-terminated string that's one char shorter
3976 than the original, stripping off the trailing '\n'. */
3977 newstr = xstrdup (str);
3978 newstr[len - 1] = '\0';
3979 newarg = build_string_literal (len, newstr);
3980 free (newstr);
3981 if (fn_puts)
3983 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3984 replace_call_with_call_and_fold (gsi, repl);
3985 return true;
3988 else
3989 /* We'd like to arrange to call fputs(string,stdout) here,
3990 but we need stdout and don't have a way to get it yet. */
3991 return false;
3995 /* The other optimizations can be done only on the non-va_list variants. */
3996 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3997 return false;
3999 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
4000 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
4002 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
4003 return false;
4004 if (fn_puts)
4006 gcall *repl = gimple_build_call (fn_puts, 1, arg);
4007 replace_call_with_call_and_fold (gsi, repl);
4008 return true;
4012 /* If the format specifier was "%c", call __builtin_putchar(arg). */
4013 else if (strcmp (fmt_str, target_percent_c) == 0)
4015 if (!arg || ! useless_type_conversion_p (integer_type_node,
4016 TREE_TYPE (arg)))
4017 return false;
4018 if (fn_putchar)
4020 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
4021 replace_call_with_call_and_fold (gsi, repl);
4022 return true;
4026 return false;
4031 /* Fold a call to __builtin_strlen with known length LEN. */
4033 static bool
4034 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
4036 gimple *stmt = gsi_stmt (*gsi);
4037 tree arg = gimple_call_arg (stmt, 0);
4039 wide_int minlen;
4040 wide_int maxlen;
4042 c_strlen_data lendata = { };
4043 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
4044 && !lendata.decl
4045 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
4046 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
4048 /* The range of lengths refers to either a single constant
4049 string or to the longest and shortest constant string
4050 referenced by the argument of the strlen() call, or to
4051 the strings that can possibly be stored in the arrays
4052 the argument refers to. */
4053 minlen = wi::to_wide (lendata.minlen);
4054 maxlen = wi::to_wide (lendata.maxlen);
4056 else
4058 unsigned prec = TYPE_PRECISION (sizetype);
4060 minlen = wi::shwi (0, prec);
4061 maxlen = wi::to_wide (max_object_size (), prec) - 2;
4064 if (minlen == maxlen)
4066 /* Fold the strlen call to a constant. */
4067 tree type = TREE_TYPE (lendata.minlen);
4068 tree len = force_gimple_operand_gsi (gsi,
4069 wide_int_to_tree (type, minlen),
4070 true, NULL, true, GSI_SAME_STMT);
4071 replace_call_with_value (gsi, len);
4072 return true;
4075 /* Set the strlen() range to [0, MAXLEN]. */
4076 if (tree lhs = gimple_call_lhs (stmt))
4077 set_strlen_range (lhs, minlen, maxlen);
4079 return false;
4082 /* Fold a call to __builtin_acc_on_device. */
4084 static bool
4085 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
4087 /* Defer folding until we know which compiler we're in. */
4088 if (symtab->state != EXPANSION)
4089 return false;
4091 unsigned val_host = GOMP_DEVICE_HOST;
4092 unsigned val_dev = GOMP_DEVICE_NONE;
4094 #ifdef ACCEL_COMPILER
4095 val_host = GOMP_DEVICE_NOT_HOST;
4096 val_dev = ACCEL_COMPILER_acc_device;
4097 #endif
4099 location_t loc = gimple_location (gsi_stmt (*gsi));
4101 tree host_eq = make_ssa_name (boolean_type_node);
4102 gimple *host_ass = gimple_build_assign
4103 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
4104 gimple_set_location (host_ass, loc);
4105 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
4107 tree dev_eq = make_ssa_name (boolean_type_node);
4108 gimple *dev_ass = gimple_build_assign
4109 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
4110 gimple_set_location (dev_ass, loc);
4111 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
4113 tree result = make_ssa_name (boolean_type_node);
4114 gimple *result_ass = gimple_build_assign
4115 (result, BIT_IOR_EXPR, host_eq, dev_eq);
4116 gimple_set_location (result_ass, loc);
4117 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
4119 replace_call_with_value (gsi, result);
4121 return true;
4124 /* Fold realloc (0, n) -> malloc (n). */
4126 static bool
4127 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
4129 gimple *stmt = gsi_stmt (*gsi);
4130 tree arg = gimple_call_arg (stmt, 0);
4131 tree size = gimple_call_arg (stmt, 1);
4133 if (operand_equal_p (arg, null_pointer_node, 0))
4135 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
4136 if (fn_malloc)
4138 gcall *repl = gimple_build_call (fn_malloc, 1, size);
4139 replace_call_with_call_and_fold (gsi, repl);
4140 return true;
4143 return false;
4146 /* Number of bytes into which any type but aggregate or vector types
4147 should fit. */
4148 static constexpr size_t clear_padding_unit
4149 = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
4150 /* Buffer size on which __builtin_clear_padding folding code works. */
4151 static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
4153 /* Data passed through __builtin_clear_padding folding. */
4154 struct clear_padding_struct {
4155 location_t loc;
4156 /* 0 during __builtin_clear_padding folding, nonzero during
4157 clear_type_padding_in_mask. In that case, instead of clearing the
4158 non-padding bits in union_ptr array clear the padding bits in there. */
4159 bool clear_in_mask;
4160 tree base;
4161 tree alias_type;
4162 gimple_stmt_iterator *gsi;
4163 /* Alignment of buf->base + 0. */
4164 unsigned align;
4165 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4166 HOST_WIDE_INT off;
4167 /* Number of padding bytes before buf->off that don't have padding clear
4168 code emitted yet. */
4169 HOST_WIDE_INT padding_bytes;
4170 /* The size of the whole object. Never emit code to touch
4171 buf->base + buf->sz or following bytes. */
4172 HOST_WIDE_INT sz;
4173 /* Number of bytes recorded in buf->buf. */
4174 size_t size;
4175 /* When inside union, instead of emitting code we and bits inside of
4176 the union_ptr array. */
4177 unsigned char *union_ptr;
4178 /* Set bits mean padding bits that need to be cleared by the builtin. */
4179 unsigned char buf[clear_padding_buf_size + clear_padding_unit];
4182 /* Emit code to clear padding requested in BUF->buf - set bits
4183 in there stand for padding that should be cleared. FULL is true
4184 if everything from the buffer should be flushed, otherwise
4185 it can leave up to 2 * clear_padding_unit bytes for further
4186 processing. */
4188 static void
4189 clear_padding_flush (clear_padding_struct *buf, bool full)
4191 gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
4192 if (!full && buf->size < 2 * clear_padding_unit)
4193 return;
4194 gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
4195 size_t end = buf->size;
4196 if (!full)
4197 end = ((end - clear_padding_unit - 1) / clear_padding_unit
4198 * clear_padding_unit);
4199 size_t padding_bytes = buf->padding_bytes;
4200 if (buf->union_ptr)
4202 if (buf->clear_in_mask)
4204 /* During clear_type_padding_in_mask, clear the padding
4205 bits set in buf->buf in the buf->union_ptr mask. */
4206 for (size_t i = 0; i < end; i++)
4208 if (buf->buf[i] == (unsigned char) ~0)
4209 padding_bytes++;
4210 else
4212 memset (&buf->union_ptr[buf->off + i - padding_bytes],
4213 0, padding_bytes);
4214 padding_bytes = 0;
4215 buf->union_ptr[buf->off + i] &= ~buf->buf[i];
4218 if (full)
4220 memset (&buf->union_ptr[buf->off + end - padding_bytes],
4221 0, padding_bytes);
4222 buf->off = 0;
4223 buf->size = 0;
4224 buf->padding_bytes = 0;
4226 else
4228 memmove (buf->buf, buf->buf + end, buf->size - end);
4229 buf->off += end;
4230 buf->size -= end;
4231 buf->padding_bytes = padding_bytes;
4233 return;
4235 /* Inside of a union, instead of emitting any code, instead
4236 clear all bits in the union_ptr buffer that are clear
4237 in buf. Whole padding bytes don't clear anything. */
4238 for (size_t i = 0; i < end; i++)
4240 if (buf->buf[i] == (unsigned char) ~0)
4241 padding_bytes++;
4242 else
4244 padding_bytes = 0;
4245 buf->union_ptr[buf->off + i] &= buf->buf[i];
4248 if (full)
4250 buf->off = 0;
4251 buf->size = 0;
4252 buf->padding_bytes = 0;
4254 else
4256 memmove (buf->buf, buf->buf + end, buf->size - end);
4257 buf->off += end;
4258 buf->size -= end;
4259 buf->padding_bytes = padding_bytes;
4261 return;
4263 size_t wordsize = UNITS_PER_WORD;
4264 for (size_t i = 0; i < end; i += wordsize)
4266 size_t nonzero_first = wordsize;
4267 size_t nonzero_last = 0;
4268 size_t zero_first = wordsize;
4269 size_t zero_last = 0;
4270 bool all_ones = true, bytes_only = true;
4271 if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4272 > (unsigned HOST_WIDE_INT) buf->sz)
4274 gcc_assert (wordsize > 1);
4275 wordsize /= 2;
4276 i -= wordsize;
4277 continue;
4279 for (size_t j = i; j < i + wordsize && j < end; j++)
4281 if (buf->buf[j])
4283 if (nonzero_first == wordsize)
4285 nonzero_first = j - i;
4286 nonzero_last = j - i;
4288 if (nonzero_last != j - i)
4289 all_ones = false;
4290 nonzero_last = j + 1 - i;
4292 else
4294 if (zero_first == wordsize)
4295 zero_first = j - i;
4296 zero_last = j + 1 - i;
4298 if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4300 all_ones = false;
4301 bytes_only = false;
4304 size_t padding_end = i;
4305 if (padding_bytes)
4307 if (nonzero_first == 0
4308 && nonzero_last == wordsize
4309 && all_ones)
4311 /* All bits are padding and we had some padding
4312 before too. Just extend it. */
4313 padding_bytes += wordsize;
4314 continue;
4316 if (all_ones && nonzero_first == 0)
4318 padding_bytes += nonzero_last;
4319 padding_end += nonzero_last;
4320 nonzero_first = wordsize;
4321 nonzero_last = 0;
4323 else if (bytes_only && nonzero_first == 0)
4325 gcc_assert (zero_first && zero_first != wordsize);
4326 padding_bytes += zero_first;
4327 padding_end += zero_first;
4329 tree atype, src;
4330 if (padding_bytes == 1)
4332 atype = char_type_node;
4333 src = build_zero_cst (char_type_node);
4335 else
4337 atype = build_array_type_nelts (char_type_node, padding_bytes);
4338 src = build_constructor (atype, NULL);
4340 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4341 build_int_cst (buf->alias_type,
4342 buf->off + padding_end
4343 - padding_bytes));
4344 gimple *g = gimple_build_assign (dst, src);
4345 gimple_set_location (g, buf->loc);
4346 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4347 padding_bytes = 0;
4348 buf->padding_bytes = 0;
4350 if (nonzero_first == wordsize)
4351 /* All bits in a word are 0, there are no padding bits. */
4352 continue;
4353 if (all_ones && nonzero_last == wordsize)
4355 /* All bits between nonzero_first and end of word are padding
4356 bits, start counting padding_bytes. */
4357 padding_bytes = nonzero_last - nonzero_first;
4358 continue;
4360 if (bytes_only)
4362 /* If bitfields aren't involved in this word, prefer storing
4363 individual bytes or groups of them over performing a RMW
4364 operation on the whole word. */
4365 gcc_assert (i + zero_last <= end);
4366 for (size_t j = padding_end; j < i + zero_last; j++)
4368 if (buf->buf[j])
4370 size_t k;
4371 for (k = j; k < i + zero_last; k++)
4372 if (buf->buf[k] == 0)
4373 break;
4374 HOST_WIDE_INT off = buf->off + j;
4375 tree atype, src;
4376 if (k - j == 1)
4378 atype = char_type_node;
4379 src = build_zero_cst (char_type_node);
4381 else
4383 atype = build_array_type_nelts (char_type_node, k - j);
4384 src = build_constructor (atype, NULL);
4386 tree dst = build2_loc (buf->loc, MEM_REF, atype,
4387 buf->base,
4388 build_int_cst (buf->alias_type, off));
4389 gimple *g = gimple_build_assign (dst, src);
4390 gimple_set_location (g, buf->loc);
4391 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4392 j = k;
4395 if (nonzero_last == wordsize)
4396 padding_bytes = nonzero_last - zero_last;
4397 continue;
4399 for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4401 if (nonzero_last - nonzero_first <= eltsz
4402 && ((nonzero_first & ~(eltsz - 1))
4403 == ((nonzero_last - 1) & ~(eltsz - 1))))
4405 tree type;
4406 if (eltsz == 1)
4407 type = char_type_node;
4408 else
4409 type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4411 size_t start = nonzero_first & ~(eltsz - 1);
4412 HOST_WIDE_INT off = buf->off + i + start;
4413 tree atype = type;
4414 if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4415 atype = build_aligned_type (type, buf->align);
4416 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4417 build_int_cst (buf->alias_type, off));
4418 tree src;
4419 gimple *g;
4420 if (all_ones
4421 && nonzero_first == start
4422 && nonzero_last == start + eltsz)
4423 src = build_zero_cst (type);
4424 else
4426 src = make_ssa_name (type);
4427 g = gimple_build_assign (src, unshare_expr (dst));
4428 gimple_set_location (g, buf->loc);
4429 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4430 tree mask = native_interpret_expr (type,
4431 buf->buf + i + start,
4432 eltsz);
4433 gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4434 mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4435 tree src_masked = make_ssa_name (type);
4436 g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4437 src, mask);
4438 gimple_set_location (g, buf->loc);
4439 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4440 src = src_masked;
4442 g = gimple_build_assign (dst, src);
4443 gimple_set_location (g, buf->loc);
4444 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4445 break;
4449 if (full)
4451 if (padding_bytes)
4453 tree atype, src;
4454 if (padding_bytes == 1)
4456 atype = char_type_node;
4457 src = build_zero_cst (char_type_node);
4459 else
4461 atype = build_array_type_nelts (char_type_node, padding_bytes);
4462 src = build_constructor (atype, NULL);
4464 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4465 build_int_cst (buf->alias_type,
4466 buf->off + end
4467 - padding_bytes));
4468 gimple *g = gimple_build_assign (dst, src);
4469 gimple_set_location (g, buf->loc);
4470 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4472 size_t end_rem = end % UNITS_PER_WORD;
4473 buf->off += end - end_rem;
4474 buf->size = end_rem;
4475 memset (buf->buf, 0, buf->size);
4476 buf->padding_bytes = 0;
4478 else
4480 memmove (buf->buf, buf->buf + end, buf->size - end);
4481 buf->off += end;
4482 buf->size -= end;
4483 buf->padding_bytes = padding_bytes;
4487 /* Append PADDING_BYTES padding bytes. */
4489 static void
4490 clear_padding_add_padding (clear_padding_struct *buf,
4491 HOST_WIDE_INT padding_bytes)
4493 if (padding_bytes == 0)
4494 return;
4495 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4496 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4497 clear_padding_flush (buf, false);
4498 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4499 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4501 memset (buf->buf + buf->size, ~0, clear_padding_buf_size - buf->size);
4502 padding_bytes -= clear_padding_buf_size - buf->size;
4503 buf->size = clear_padding_buf_size;
4504 clear_padding_flush (buf, false);
4505 gcc_assert (buf->padding_bytes);
4506 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4507 is guaranteed to be all ones. */
4508 padding_bytes += buf->size;
4509 buf->size = padding_bytes % UNITS_PER_WORD;
4510 memset (buf->buf, ~0, buf->size);
4511 buf->off += padding_bytes - buf->size;
4512 buf->padding_bytes += padding_bytes - buf->size;
4514 else
4516 memset (buf->buf + buf->size, ~0, padding_bytes);
4517 buf->size += padding_bytes;
4521 static void clear_padding_type (clear_padding_struct *, tree, HOST_WIDE_INT);
4523 /* Clear padding bits of union type TYPE. */
4525 static void
4526 clear_padding_union (clear_padding_struct *buf, tree type, HOST_WIDE_INT sz)
4528 clear_padding_struct *union_buf;
4529 HOST_WIDE_INT start_off = 0, next_off = 0;
4530 size_t start_size = 0;
4531 if (buf->union_ptr)
4533 start_off = buf->off + buf->size;
4534 next_off = start_off + sz;
4535 start_size = start_off % UNITS_PER_WORD;
4536 start_off -= start_size;
4537 clear_padding_flush (buf, true);
4538 union_buf = buf;
4540 else
4542 if (sz + buf->size > clear_padding_buf_size)
4543 clear_padding_flush (buf, false);
4544 union_buf = XALLOCA (clear_padding_struct);
4545 union_buf->loc = buf->loc;
4546 union_buf->clear_in_mask = buf->clear_in_mask;
4547 union_buf->base = NULL_TREE;
4548 union_buf->alias_type = NULL_TREE;
4549 union_buf->gsi = NULL;
4550 union_buf->align = 0;
4551 union_buf->off = 0;
4552 union_buf->padding_bytes = 0;
4553 union_buf->sz = sz;
4554 union_buf->size = 0;
4555 if (sz + buf->size <= clear_padding_buf_size)
4556 union_buf->union_ptr = buf->buf + buf->size;
4557 else
4558 union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4559 memset (union_buf->union_ptr, ~0, sz);
4562 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4563 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4565 if (DECL_SIZE_UNIT (field) == NULL_TREE)
4567 if (TREE_TYPE (field) == error_mark_node)
4568 continue;
4569 gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4570 && !COMPLETE_TYPE_P (TREE_TYPE (field)));
4571 if (!buf->clear_in_mask)
4572 error_at (buf->loc, "flexible array member %qD does not have "
4573 "well defined padding bits for %qs",
4574 field, "__builtin_clear_padding");
4575 continue;
4577 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4578 gcc_assert (union_buf->size == 0);
4579 union_buf->off = start_off;
4580 union_buf->size = start_size;
4581 memset (union_buf->buf, ~0, start_size);
4582 clear_padding_type (union_buf, TREE_TYPE (field), fldsz);
4583 clear_padding_add_padding (union_buf, sz - fldsz);
4584 clear_padding_flush (union_buf, true);
4587 if (buf == union_buf)
4589 buf->off = next_off;
4590 buf->size = next_off % UNITS_PER_WORD;
4591 buf->off -= buf->size;
4592 memset (buf->buf, ~0, buf->size);
4594 else if (sz + buf->size <= clear_padding_buf_size)
4595 buf->size += sz;
4596 else
4598 unsigned char *union_ptr = union_buf->union_ptr;
4599 while (sz)
4601 clear_padding_flush (buf, false);
4602 HOST_WIDE_INT this_sz
4603 = MIN ((unsigned HOST_WIDE_INT) sz,
4604 clear_padding_buf_size - buf->size);
4605 memcpy (buf->buf + buf->size, union_ptr, this_sz);
4606 buf->size += this_sz;
4607 union_ptr += this_sz;
4608 sz -= this_sz;
4610 XDELETE (union_buf->union_ptr);
4614 /* The only known floating point formats with padding bits are the
4615 IEEE extended ones. */
4617 static bool
4618 clear_padding_real_needs_padding_p (tree type)
4620 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4621 return (fmt->b == 2
4622 && fmt->signbit_ro == fmt->signbit_rw
4623 && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4626 /* Return true if TYPE might contain any padding bits. */
4628 static bool
4629 clear_padding_type_may_have_padding_p (tree type)
4631 switch (TREE_CODE (type))
4633 case RECORD_TYPE:
4634 case UNION_TYPE:
4635 return true;
4636 case ARRAY_TYPE:
4637 case COMPLEX_TYPE:
4638 case VECTOR_TYPE:
4639 return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4640 case REAL_TYPE:
4641 return clear_padding_real_needs_padding_p (type);
4642 default:
4643 return false;
4647 /* Emit a runtime loop:
4648 for (; buf.base != end; buf.base += sz)
4649 __builtin_clear_padding (buf.base); */
4651 static void
4652 clear_padding_emit_loop (clear_padding_struct *buf, tree type, tree end)
4654 tree l1 = create_artificial_label (buf->loc);
4655 tree l2 = create_artificial_label (buf->loc);
4656 tree l3 = create_artificial_label (buf->loc);
4657 gimple *g = gimple_build_goto (l2);
4658 gimple_set_location (g, buf->loc);
4659 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4660 g = gimple_build_label (l1);
4661 gimple_set_location (g, buf->loc);
4662 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4663 clear_padding_type (buf, type, buf->sz);
4664 clear_padding_flush (buf, true);
4665 g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4666 size_int (buf->sz));
4667 gimple_set_location (g, buf->loc);
4668 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4669 g = gimple_build_label (l2);
4670 gimple_set_location (g, buf->loc);
4671 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4672 g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4673 gimple_set_location (g, buf->loc);
4674 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4675 g = gimple_build_label (l3);
4676 gimple_set_location (g, buf->loc);
4677 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4680 /* Clear padding bits for TYPE. Called recursively from
4681 gimple_fold_builtin_clear_padding. */
4683 static void
4684 clear_padding_type (clear_padding_struct *buf, tree type, HOST_WIDE_INT sz)
4686 switch (TREE_CODE (type))
4688 case RECORD_TYPE:
4689 HOST_WIDE_INT cur_pos;
4690 cur_pos = 0;
4691 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4692 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4694 tree ftype = TREE_TYPE (field);
4695 if (DECL_BIT_FIELD (field))
4697 HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
4698 if (fldsz == 0)
4699 continue;
4700 HOST_WIDE_INT pos = int_byte_position (field);
4701 HOST_WIDE_INT bpos
4702 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4703 bpos %= BITS_PER_UNIT;
4704 HOST_WIDE_INT end
4705 = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4706 if (pos + end > cur_pos)
4708 clear_padding_add_padding (buf, pos + end - cur_pos);
4709 cur_pos = pos + end;
4711 gcc_assert (cur_pos > pos
4712 && ((unsigned HOST_WIDE_INT) buf->size
4713 >= (unsigned HOST_WIDE_INT) cur_pos - pos));
4714 unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4715 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4716 sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4717 " in %qs", "__builtin_clear_padding");
4718 else if (BYTES_BIG_ENDIAN)
4720 /* Big endian. */
4721 if (bpos + fldsz <= BITS_PER_UNIT)
4722 *p &= ~(((1 << fldsz) - 1)
4723 << (BITS_PER_UNIT - bpos - fldsz));
4724 else
4726 if (bpos)
4728 *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4729 p++;
4730 fldsz -= BITS_PER_UNIT - bpos;
4732 memset (p, 0, fldsz / BITS_PER_UNIT);
4733 p += fldsz / BITS_PER_UNIT;
4734 fldsz %= BITS_PER_UNIT;
4735 if (fldsz)
4736 *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4739 else
4741 /* Little endian. */
4742 if (bpos + fldsz <= BITS_PER_UNIT)
4743 *p &= ~(((1 << fldsz) - 1) << bpos);
4744 else
4746 if (bpos)
4748 *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4749 p++;
4750 fldsz -= BITS_PER_UNIT - bpos;
4752 memset (p, 0, fldsz / BITS_PER_UNIT);
4753 p += fldsz / BITS_PER_UNIT;
4754 fldsz %= BITS_PER_UNIT;
4755 if (fldsz)
4756 *p &= ~((1 << fldsz) - 1);
4760 else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4762 if (ftype == error_mark_node)
4763 continue;
4764 gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4765 && !COMPLETE_TYPE_P (ftype));
4766 if (!buf->clear_in_mask)
4767 error_at (buf->loc, "flexible array member %qD does not "
4768 "have well defined padding bits for %qs",
4769 field, "__builtin_clear_padding");
4771 else if (is_empty_type (TREE_TYPE (field)))
4772 continue;
4773 else
4775 HOST_WIDE_INT pos = int_byte_position (field);
4776 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4777 gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4778 clear_padding_add_padding (buf, pos - cur_pos);
4779 cur_pos = pos;
4780 clear_padding_type (buf, TREE_TYPE (field), fldsz);
4781 cur_pos += fldsz;
4784 gcc_assert (sz >= cur_pos);
4785 clear_padding_add_padding (buf, sz - cur_pos);
4786 break;
4787 case ARRAY_TYPE:
4788 HOST_WIDE_INT nelts, fldsz;
4789 fldsz = int_size_in_bytes (TREE_TYPE (type));
4790 if (fldsz == 0)
4791 break;
4792 nelts = sz / fldsz;
4793 if (nelts > 1
4794 && sz > 8 * UNITS_PER_WORD
4795 && buf->union_ptr == NULL
4796 && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4798 /* For sufficiently large array of more than one elements,
4799 emit a runtime loop to keep code size manageable. */
4800 tree base = buf->base;
4801 unsigned int prev_align = buf->align;
4802 HOST_WIDE_INT off = buf->off + buf->size;
4803 HOST_WIDE_INT prev_sz = buf->sz;
4804 clear_padding_flush (buf, true);
4805 tree elttype = TREE_TYPE (type);
4806 buf->base = create_tmp_var (build_pointer_type (elttype));
4807 tree end = make_ssa_name (TREE_TYPE (buf->base));
4808 gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4809 base, size_int (off));
4810 gimple_set_location (g, buf->loc);
4811 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4812 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4813 size_int (sz));
4814 gimple_set_location (g, buf->loc);
4815 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4816 buf->sz = fldsz;
4817 buf->align = TYPE_ALIGN (elttype);
4818 buf->off = 0;
4819 buf->size = 0;
4820 clear_padding_emit_loop (buf, elttype, end);
4821 buf->base = base;
4822 buf->sz = prev_sz;
4823 buf->align = prev_align;
4824 buf->size = off % UNITS_PER_WORD;
4825 buf->off = off - buf->size;
4826 memset (buf->buf, 0, buf->size);
4827 break;
4829 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4830 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4831 break;
4832 case UNION_TYPE:
4833 clear_padding_union (buf, type, sz);
4834 break;
4835 case REAL_TYPE:
4836 gcc_assert ((size_t) sz <= clear_padding_unit);
4837 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4838 clear_padding_flush (buf, false);
4839 if (clear_padding_real_needs_padding_p (type))
4841 /* Use native_interpret_expr + native_encode_expr to figure out
4842 which bits are padding. */
4843 memset (buf->buf + buf->size, ~0, sz);
4844 tree cst = native_interpret_expr (type, buf->buf + buf->size, sz);
4845 gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
4846 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4847 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4848 for (size_t i = 0; i < (size_t) sz; i++)
4849 buf->buf[buf->size + i] ^= ~0;
4851 else
4852 memset (buf->buf + buf->size, 0, sz);
4853 buf->size += sz;
4854 break;
4855 case COMPLEX_TYPE:
4856 fldsz = int_size_in_bytes (TREE_TYPE (type));
4857 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4858 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4859 break;
4860 case VECTOR_TYPE:
4861 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
4862 fldsz = int_size_in_bytes (TREE_TYPE (type));
4863 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4864 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4865 break;
4866 case NULLPTR_TYPE:
4867 gcc_assert ((size_t) sz <= clear_padding_unit);
4868 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4869 clear_padding_flush (buf, false);
4870 memset (buf->buf + buf->size, ~0, sz);
4871 buf->size += sz;
4872 break;
4873 default:
4874 gcc_assert ((size_t) sz <= clear_padding_unit);
4875 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4876 clear_padding_flush (buf, false);
4877 memset (buf->buf + buf->size, 0, sz);
4878 buf->size += sz;
4879 break;
4883 /* Clear padding bits of TYPE in MASK. */
4885 void
4886 clear_type_padding_in_mask (tree type, unsigned char *mask)
4888 clear_padding_struct buf;
4889 buf.loc = UNKNOWN_LOCATION;
4890 buf.clear_in_mask = true;
4891 buf.base = NULL_TREE;
4892 buf.alias_type = NULL_TREE;
4893 buf.gsi = NULL;
4894 buf.align = 0;
4895 buf.off = 0;
4896 buf.padding_bytes = 0;
4897 buf.sz = int_size_in_bytes (type);
4898 buf.size = 0;
4899 buf.union_ptr = mask;
4900 clear_padding_type (&buf, type, buf.sz);
4901 clear_padding_flush (&buf, true);
4904 /* Fold __builtin_clear_padding builtin. */
4906 static bool
4907 gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
4909 gimple *stmt = gsi_stmt (*gsi);
4910 gcc_assert (gimple_call_num_args (stmt) == 2);
4911 tree ptr = gimple_call_arg (stmt, 0);
4912 tree typearg = gimple_call_arg (stmt, 1);
4913 tree type = TREE_TYPE (TREE_TYPE (typearg));
4914 location_t loc = gimple_location (stmt);
4915 clear_padding_struct buf;
4916 gimple_stmt_iterator gsiprev = *gsi;
4917 /* This should be folded during the lower pass. */
4918 gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
4919 gcc_assert (COMPLETE_TYPE_P (type));
4920 gsi_prev (&gsiprev);
4922 buf.loc = loc;
4923 buf.clear_in_mask = false;
4924 buf.base = ptr;
4925 buf.alias_type = NULL_TREE;
4926 buf.gsi = gsi;
4927 buf.align = get_pointer_alignment (ptr);
4928 unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
4929 buf.align = MAX (buf.align, talign);
4930 buf.off = 0;
4931 buf.padding_bytes = 0;
4932 buf.size = 0;
4933 buf.sz = int_size_in_bytes (type);
4934 buf.union_ptr = NULL;
4935 if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
4936 sorry_at (loc, "%s not supported for variable length aggregates",
4937 "__builtin_clear_padding");
4938 /* The implementation currently assumes 8-bit host and target
4939 chars which is the case for all currently supported targets
4940 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4941 else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
4942 sorry_at (loc, "%s not supported on this target",
4943 "__builtin_clear_padding");
4944 else if (!clear_padding_type_may_have_padding_p (type))
4946 else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
4948 tree sz = TYPE_SIZE_UNIT (type);
4949 tree elttype = type;
4950 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4951 while (TREE_CODE (elttype) == ARRAY_TYPE
4952 && int_size_in_bytes (elttype) < 0)
4953 elttype = TREE_TYPE (elttype);
4954 HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
4955 gcc_assert (eltsz >= 0);
4956 if (eltsz)
4958 buf.base = create_tmp_var (build_pointer_type (elttype));
4959 tree end = make_ssa_name (TREE_TYPE (buf.base));
4960 gimple *g = gimple_build_assign (buf.base, ptr);
4961 gimple_set_location (g, loc);
4962 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4963 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
4964 gimple_set_location (g, loc);
4965 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4966 buf.sz = eltsz;
4967 buf.align = TYPE_ALIGN (elttype);
4968 buf.alias_type = build_pointer_type (elttype);
4969 clear_padding_emit_loop (&buf, elttype, end);
4972 else
4974 if (!is_gimple_mem_ref_addr (buf.base))
4976 buf.base = make_ssa_name (TREE_TYPE (ptr));
4977 gimple *g = gimple_build_assign (buf.base, ptr);
4978 gimple_set_location (g, loc);
4979 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4981 buf.alias_type = build_pointer_type (type);
4982 clear_padding_type (&buf, type, buf.sz);
4983 clear_padding_flush (&buf, true);
4986 gimple_stmt_iterator gsiprev2 = *gsi;
4987 gsi_prev (&gsiprev2);
4988 if (gsi_stmt (gsiprev) == gsi_stmt (gsiprev2))
4989 gsi_replace (gsi, gimple_build_nop (), true);
4990 else
4992 gsi_remove (gsi, true);
4993 *gsi = gsiprev2;
4995 return true;
4998 /* Fold the non-target builtin at *GSI and return whether any simplification
4999 was made. */
5001 static bool
5002 gimple_fold_builtin (gimple_stmt_iterator *gsi)
5004 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
5005 tree callee = gimple_call_fndecl (stmt);
5007 /* Give up for always_inline inline builtins until they are
5008 inlined. */
5009 if (avoid_folding_inline_builtin (callee))
5010 return false;
5012 unsigned n = gimple_call_num_args (stmt);
5013 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
5014 switch (fcode)
5016 case BUILT_IN_BCMP:
5017 return gimple_fold_builtin_bcmp (gsi);
5018 case BUILT_IN_BCOPY:
5019 return gimple_fold_builtin_bcopy (gsi);
5020 case BUILT_IN_BZERO:
5021 return gimple_fold_builtin_bzero (gsi);
5023 case BUILT_IN_MEMSET:
5024 return gimple_fold_builtin_memset (gsi,
5025 gimple_call_arg (stmt, 1),
5026 gimple_call_arg (stmt, 2));
5027 case BUILT_IN_MEMCPY:
5028 case BUILT_IN_MEMPCPY:
5029 case BUILT_IN_MEMMOVE:
5030 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
5031 gimple_call_arg (stmt, 1), fcode);
5032 case BUILT_IN_SPRINTF_CHK:
5033 case BUILT_IN_VSPRINTF_CHK:
5034 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
5035 case BUILT_IN_STRCAT_CHK:
5036 return gimple_fold_builtin_strcat_chk (gsi);
5037 case BUILT_IN_STRNCAT_CHK:
5038 return gimple_fold_builtin_strncat_chk (gsi);
5039 case BUILT_IN_STRLEN:
5040 return gimple_fold_builtin_strlen (gsi);
5041 case BUILT_IN_STRCPY:
5042 return gimple_fold_builtin_strcpy (gsi,
5043 gimple_call_arg (stmt, 0),
5044 gimple_call_arg (stmt, 1));
5045 case BUILT_IN_STRNCPY:
5046 return gimple_fold_builtin_strncpy (gsi,
5047 gimple_call_arg (stmt, 0),
5048 gimple_call_arg (stmt, 1),
5049 gimple_call_arg (stmt, 2));
5050 case BUILT_IN_STRCAT:
5051 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
5052 gimple_call_arg (stmt, 1));
5053 case BUILT_IN_STRNCAT:
5054 return gimple_fold_builtin_strncat (gsi);
5055 case BUILT_IN_INDEX:
5056 case BUILT_IN_STRCHR:
5057 return gimple_fold_builtin_strchr (gsi, false);
5058 case BUILT_IN_RINDEX:
5059 case BUILT_IN_STRRCHR:
5060 return gimple_fold_builtin_strchr (gsi, true);
5061 case BUILT_IN_STRSTR:
5062 return gimple_fold_builtin_strstr (gsi);
5063 case BUILT_IN_STRCMP:
5064 case BUILT_IN_STRCMP_EQ:
5065 case BUILT_IN_STRCASECMP:
5066 case BUILT_IN_STRNCMP:
5067 case BUILT_IN_STRNCMP_EQ:
5068 case BUILT_IN_STRNCASECMP:
5069 return gimple_fold_builtin_string_compare (gsi);
5070 case BUILT_IN_MEMCHR:
5071 return gimple_fold_builtin_memchr (gsi);
5072 case BUILT_IN_FPUTS:
5073 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5074 gimple_call_arg (stmt, 1), false);
5075 case BUILT_IN_FPUTS_UNLOCKED:
5076 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5077 gimple_call_arg (stmt, 1), true);
5078 case BUILT_IN_MEMCPY_CHK:
5079 case BUILT_IN_MEMPCPY_CHK:
5080 case BUILT_IN_MEMMOVE_CHK:
5081 case BUILT_IN_MEMSET_CHK:
5082 return gimple_fold_builtin_memory_chk (gsi,
5083 gimple_call_arg (stmt, 0),
5084 gimple_call_arg (stmt, 1),
5085 gimple_call_arg (stmt, 2),
5086 gimple_call_arg (stmt, 3),
5087 fcode);
5088 case BUILT_IN_STPCPY:
5089 return gimple_fold_builtin_stpcpy (gsi);
5090 case BUILT_IN_STRCPY_CHK:
5091 case BUILT_IN_STPCPY_CHK:
5092 return gimple_fold_builtin_stxcpy_chk (gsi,
5093 gimple_call_arg (stmt, 0),
5094 gimple_call_arg (stmt, 1),
5095 gimple_call_arg (stmt, 2),
5096 fcode);
5097 case BUILT_IN_STRNCPY_CHK:
5098 case BUILT_IN_STPNCPY_CHK:
5099 return gimple_fold_builtin_stxncpy_chk (gsi,
5100 gimple_call_arg (stmt, 0),
5101 gimple_call_arg (stmt, 1),
5102 gimple_call_arg (stmt, 2),
5103 gimple_call_arg (stmt, 3),
5104 fcode);
5105 case BUILT_IN_SNPRINTF_CHK:
5106 case BUILT_IN_VSNPRINTF_CHK:
5107 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
5109 case BUILT_IN_FPRINTF:
5110 case BUILT_IN_FPRINTF_UNLOCKED:
5111 case BUILT_IN_VFPRINTF:
5112 if (n == 2 || n == 3)
5113 return gimple_fold_builtin_fprintf (gsi,
5114 gimple_call_arg (stmt, 0),
5115 gimple_call_arg (stmt, 1),
5116 n == 3
5117 ? gimple_call_arg (stmt, 2)
5118 : NULL_TREE,
5119 fcode);
5120 break;
5121 case BUILT_IN_FPRINTF_CHK:
5122 case BUILT_IN_VFPRINTF_CHK:
5123 if (n == 3 || n == 4)
5124 return gimple_fold_builtin_fprintf (gsi,
5125 gimple_call_arg (stmt, 0),
5126 gimple_call_arg (stmt, 2),
5127 n == 4
5128 ? gimple_call_arg (stmt, 3)
5129 : NULL_TREE,
5130 fcode);
5131 break;
5132 case BUILT_IN_PRINTF:
5133 case BUILT_IN_PRINTF_UNLOCKED:
5134 case BUILT_IN_VPRINTF:
5135 if (n == 1 || n == 2)
5136 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
5137 n == 2
5138 ? gimple_call_arg (stmt, 1)
5139 : NULL_TREE, fcode);
5140 break;
5141 case BUILT_IN_PRINTF_CHK:
5142 case BUILT_IN_VPRINTF_CHK:
5143 if (n == 2 || n == 3)
5144 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
5145 n == 3
5146 ? gimple_call_arg (stmt, 2)
5147 : NULL_TREE, fcode);
5148 break;
5149 case BUILT_IN_ACC_ON_DEVICE:
5150 return gimple_fold_builtin_acc_on_device (gsi,
5151 gimple_call_arg (stmt, 0));
5152 case BUILT_IN_REALLOC:
5153 return gimple_fold_builtin_realloc (gsi);
5155 case BUILT_IN_CLEAR_PADDING:
5156 return gimple_fold_builtin_clear_padding (gsi);
5158 default:;
5161 /* Try the generic builtin folder. */
5162 bool ignore = (gimple_call_lhs (stmt) == NULL);
5163 tree result = fold_call_stmt (stmt, ignore);
5164 if (result)
5166 if (ignore)
5167 STRIP_NOPS (result);
5168 else
5169 result = fold_convert (gimple_call_return_type (stmt), result);
5170 gimplify_and_update_call_from_tree (gsi, result);
5171 return true;
5174 return false;
5177 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5178 function calls to constants, where possible. */
5180 static tree
5181 fold_internal_goacc_dim (const gimple *call)
5183 int axis = oacc_get_ifn_dim_arg (call);
5184 int size = oacc_get_fn_dim_size (current_function_decl, axis);
5185 tree result = NULL_TREE;
5186 tree type = TREE_TYPE (gimple_call_lhs (call));
5188 switch (gimple_call_internal_fn (call))
5190 case IFN_GOACC_DIM_POS:
5191 /* If the size is 1, we know the answer. */
5192 if (size == 1)
5193 result = build_int_cst (type, 0);
5194 break;
5195 case IFN_GOACC_DIM_SIZE:
5196 /* If the size is not dynamic, we know the answer. */
5197 if (size)
5198 result = build_int_cst (type, size);
5199 break;
5200 default:
5201 break;
5204 return result;
5207 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5208 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5209 &var where var is only addressable because of such calls. */
5211 bool
5212 optimize_atomic_compare_exchange_p (gimple *stmt)
5214 if (gimple_call_num_args (stmt) != 6
5215 || !flag_inline_atomics
5216 || !optimize
5217 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
5218 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5219 || !gimple_vdef (stmt)
5220 || !gimple_vuse (stmt))
5221 return false;
5223 tree fndecl = gimple_call_fndecl (stmt);
5224 switch (DECL_FUNCTION_CODE (fndecl))
5226 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
5227 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
5228 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
5229 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
5230 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
5231 break;
5232 default:
5233 return false;
5236 tree expected = gimple_call_arg (stmt, 1);
5237 if (TREE_CODE (expected) != ADDR_EXPR
5238 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
5239 return false;
5241 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
5242 if (!is_gimple_reg_type (etype)
5243 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
5244 || TREE_THIS_VOLATILE (etype)
5245 || VECTOR_TYPE_P (etype)
5246 || TREE_CODE (etype) == COMPLEX_TYPE
5247 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5248 might not preserve all the bits. See PR71716. */
5249 || SCALAR_FLOAT_TYPE_P (etype)
5250 || maybe_ne (TYPE_PRECISION (etype),
5251 GET_MODE_BITSIZE (TYPE_MODE (etype))))
5252 return false;
5254 tree weak = gimple_call_arg (stmt, 3);
5255 if (!integer_zerop (weak) && !integer_onep (weak))
5256 return false;
5258 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5259 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5260 machine_mode mode = TYPE_MODE (itype);
5262 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
5263 == CODE_FOR_nothing
5264 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5265 return false;
5267 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
5268 return false;
5270 return true;
5273 /* Fold
5274 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5275 into
5276 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5277 i = IMAGPART_EXPR <t>;
5278 r = (_Bool) i;
5279 e = REALPART_EXPR <t>; */
5281 void
5282 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5284 gimple *stmt = gsi_stmt (*gsi);
5285 tree fndecl = gimple_call_fndecl (stmt);
5286 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5287 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5288 tree ctype = build_complex_type (itype);
5289 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
5290 bool throws = false;
5291 edge e = NULL;
5292 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5293 expected);
5294 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5295 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5296 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5298 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
5299 build1 (VIEW_CONVERT_EXPR, itype,
5300 gimple_assign_lhs (g)));
5301 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5303 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
5304 + int_size_in_bytes (itype);
5305 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5306 gimple_call_arg (stmt, 0),
5307 gimple_assign_lhs (g),
5308 gimple_call_arg (stmt, 2),
5309 build_int_cst (integer_type_node, flag),
5310 gimple_call_arg (stmt, 4),
5311 gimple_call_arg (stmt, 5));
5312 tree lhs = make_ssa_name (ctype);
5313 gimple_call_set_lhs (g, lhs);
5314 gimple_move_vops (g, stmt);
5315 tree oldlhs = gimple_call_lhs (stmt);
5316 if (stmt_can_throw_internal (cfun, stmt))
5318 throws = true;
5319 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
5321 gimple_call_set_nothrow (as_a <gcall *> (g),
5322 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
5323 gimple_call_set_lhs (stmt, NULL_TREE);
5324 gsi_replace (gsi, g, true);
5325 if (oldlhs)
5327 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
5328 build1 (IMAGPART_EXPR, itype, lhs));
5329 if (throws)
5331 gsi_insert_on_edge_immediate (e, g);
5332 *gsi = gsi_for_stmt (g);
5334 else
5335 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5336 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
5337 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5339 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
5340 build1 (REALPART_EXPR, itype, lhs));
5341 if (throws && oldlhs == NULL_TREE)
5343 gsi_insert_on_edge_immediate (e, g);
5344 *gsi = gsi_for_stmt (g);
5346 else
5347 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5348 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5350 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5351 VIEW_CONVERT_EXPR,
5352 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5353 gimple_assign_lhs (g)));
5354 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5356 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
5357 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5358 *gsi = gsiret;
5361 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5362 doesn't fit into TYPE. The test for overflow should be regardless of
5363 -fwrapv, and even for unsigned types. */
5365 bool
5366 arith_overflowed_p (enum tree_code code, const_tree type,
5367 const_tree arg0, const_tree arg1)
5369 widest2_int warg0 = widest2_int_cst (arg0);
5370 widest2_int warg1 = widest2_int_cst (arg1);
5371 widest2_int wres;
5372 switch (code)
5374 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
5375 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
5376 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
5377 default: gcc_unreachable ();
5379 signop sign = TYPE_SIGN (type);
5380 if (sign == UNSIGNED && wi::neg_p (wres))
5381 return true;
5382 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
5385 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5386 for the memory it references, otherwise return null. VECTYPE is the
5387 type of the memory vector. */
5389 static tree
5390 gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
5392 tree ptr = gimple_call_arg (call, 0);
5393 tree alias_align = gimple_call_arg (call, 1);
5394 tree mask = gimple_call_arg (call, 2);
5395 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
5396 return NULL_TREE;
5398 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
5399 if (TYPE_ALIGN (vectype) != align)
5400 vectype = build_aligned_type (vectype, align);
5401 tree offset = build_zero_cst (TREE_TYPE (alias_align));
5402 return fold_build2 (MEM_REF, vectype, ptr, offset);
5405 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5407 static bool
5408 gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
5410 tree lhs = gimple_call_lhs (call);
5411 if (!lhs)
5412 return false;
5414 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
5416 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5417 gimple_set_location (new_stmt, gimple_location (call));
5418 gimple_move_vops (new_stmt, call);
5419 gsi_replace (gsi, new_stmt, false);
5420 return true;
5422 return false;
5425 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5427 static bool
5428 gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
5430 tree rhs = gimple_call_arg (call, 3);
5431 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
5433 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5434 gimple_set_location (new_stmt, gimple_location (call));
5435 gimple_move_vops (new_stmt, call);
5436 gsi_replace (gsi, new_stmt, false);
5437 return true;
5439 return false;
5442 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5443 The statement may be replaced by another statement, e.g., if the call
5444 simplifies to a constant value. Return true if any changes were made.
5445 It is assumed that the operands have been previously folded. */
5447 static bool
5448 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
5450 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
5451 tree callee;
5452 bool changed = false;
5454 /* Check for virtual calls that became direct calls. */
5455 callee = gimple_call_fn (stmt);
5456 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
5458 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
5460 if (dump_file && virtual_method_call_p (callee)
5461 && !possible_polymorphic_call_target_p
5462 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
5463 (OBJ_TYPE_REF_EXPR (callee)))))
5465 fprintf (dump_file,
5466 "Type inheritance inconsistent devirtualization of ");
5467 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5468 fprintf (dump_file, " to ");
5469 print_generic_expr (dump_file, callee, TDF_SLIM);
5470 fprintf (dump_file, "\n");
5473 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
5474 changed = true;
5476 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
5478 bool final;
5479 vec <cgraph_node *>targets
5480 = possible_polymorphic_call_targets (callee, stmt, &final);
5481 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5483 tree lhs = gimple_call_lhs (stmt);
5484 if (dump_enabled_p ())
5486 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5487 "folding virtual function call to %s\n",
5488 targets.length () == 1
5489 ? targets[0]->name ()
5490 : "__builtin_unreachable");
5492 if (targets.length () == 1)
5494 tree fndecl = targets[0]->decl;
5495 gimple_call_set_fndecl (stmt, fndecl);
5496 changed = true;
5497 /* If changing the call to __cxa_pure_virtual
5498 or similar noreturn function, adjust gimple_call_fntype
5499 too. */
5500 if (gimple_call_noreturn_p (stmt)
5501 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
5502 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
5503 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5504 == void_type_node))
5505 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
5506 /* If the call becomes noreturn, remove the lhs. */
5507 if (lhs
5508 && gimple_call_noreturn_p (stmt)
5509 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
5510 || should_remove_lhs_p (lhs)))
5512 if (TREE_CODE (lhs) == SSA_NAME)
5514 tree var = create_tmp_var (TREE_TYPE (lhs));
5515 tree def = get_or_create_ssa_default_def (cfun, var);
5516 gimple *new_stmt = gimple_build_assign (lhs, def);
5517 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
5519 gimple_call_set_lhs (stmt, NULL_TREE);
5521 maybe_remove_unused_call_args (cfun, stmt);
5523 else
5525 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5526 gimple *new_stmt = gimple_build_call (fndecl, 0);
5527 gimple_set_location (new_stmt, gimple_location (stmt));
5528 /* If the call had a SSA name as lhs morph that into
5529 an uninitialized value. */
5530 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5532 tree var = create_tmp_var (TREE_TYPE (lhs));
5533 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
5534 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5535 set_ssa_default_def (cfun, var, lhs);
5537 gimple_move_vops (new_stmt, stmt);
5538 gsi_replace (gsi, new_stmt, false);
5539 return true;
5545 /* Check for indirect calls that became direct calls, and then
5546 no longer require a static chain. */
5547 if (gimple_call_chain (stmt))
5549 tree fn = gimple_call_fndecl (stmt);
5550 if (fn && !DECL_STATIC_CHAIN (fn))
5552 gimple_call_set_chain (stmt, NULL);
5553 changed = true;
5557 if (inplace)
5558 return changed;
5560 /* Check for builtins that CCP can handle using information not
5561 available in the generic fold routines. */
5562 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5564 if (gimple_fold_builtin (gsi))
5565 changed = true;
5567 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
5569 changed |= targetm.gimple_fold_builtin (gsi);
5571 else if (gimple_call_internal_p (stmt))
5573 enum tree_code subcode = ERROR_MARK;
5574 tree result = NULL_TREE;
5575 bool cplx_result = false;
5576 tree overflow = NULL_TREE;
5577 switch (gimple_call_internal_fn (stmt))
5579 case IFN_BUILTIN_EXPECT:
5580 result = fold_builtin_expect (gimple_location (stmt),
5581 gimple_call_arg (stmt, 0),
5582 gimple_call_arg (stmt, 1),
5583 gimple_call_arg (stmt, 2),
5584 NULL_TREE);
5585 break;
5586 case IFN_UBSAN_OBJECT_SIZE:
5588 tree offset = gimple_call_arg (stmt, 1);
5589 tree objsize = gimple_call_arg (stmt, 2);
5590 if (integer_all_onesp (objsize)
5591 || (TREE_CODE (offset) == INTEGER_CST
5592 && TREE_CODE (objsize) == INTEGER_CST
5593 && tree_int_cst_le (offset, objsize)))
5595 replace_call_with_value (gsi, NULL_TREE);
5596 return true;
5599 break;
5600 case IFN_UBSAN_PTR:
5601 if (integer_zerop (gimple_call_arg (stmt, 1)))
5603 replace_call_with_value (gsi, NULL_TREE);
5604 return true;
5606 break;
5607 case IFN_UBSAN_BOUNDS:
5609 tree index = gimple_call_arg (stmt, 1);
5610 tree bound = gimple_call_arg (stmt, 2);
5611 if (TREE_CODE (index) == INTEGER_CST
5612 && TREE_CODE (bound) == INTEGER_CST)
5614 index = fold_convert (TREE_TYPE (bound), index);
5615 if (TREE_CODE (index) == INTEGER_CST
5616 && tree_int_cst_le (index, bound))
5618 replace_call_with_value (gsi, NULL_TREE);
5619 return true;
5623 break;
5624 case IFN_GOACC_DIM_SIZE:
5625 case IFN_GOACC_DIM_POS:
5626 result = fold_internal_goacc_dim (stmt);
5627 break;
5628 case IFN_UBSAN_CHECK_ADD:
5629 subcode = PLUS_EXPR;
5630 break;
5631 case IFN_UBSAN_CHECK_SUB:
5632 subcode = MINUS_EXPR;
5633 break;
5634 case IFN_UBSAN_CHECK_MUL:
5635 subcode = MULT_EXPR;
5636 break;
5637 case IFN_ADD_OVERFLOW:
5638 subcode = PLUS_EXPR;
5639 cplx_result = true;
5640 break;
5641 case IFN_SUB_OVERFLOW:
5642 subcode = MINUS_EXPR;
5643 cplx_result = true;
5644 break;
5645 case IFN_MUL_OVERFLOW:
5646 subcode = MULT_EXPR;
5647 cplx_result = true;
5648 break;
5649 case IFN_MASK_LOAD:
5650 changed |= gimple_fold_mask_load (gsi, stmt);
5651 break;
5652 case IFN_MASK_STORE:
5653 changed |= gimple_fold_mask_store (gsi, stmt);
5654 break;
5655 default:
5656 break;
5658 if (subcode != ERROR_MARK)
5660 tree arg0 = gimple_call_arg (stmt, 0);
5661 tree arg1 = gimple_call_arg (stmt, 1);
5662 tree type = TREE_TYPE (arg0);
5663 if (cplx_result)
5665 tree lhs = gimple_call_lhs (stmt);
5666 if (lhs == NULL_TREE)
5667 type = NULL_TREE;
5668 else
5669 type = TREE_TYPE (TREE_TYPE (lhs));
5671 if (type == NULL_TREE)
5673 /* x = y + 0; x = y - 0; x = y * 0; */
5674 else if (integer_zerop (arg1))
5675 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
5676 /* x = 0 + y; x = 0 * y; */
5677 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
5678 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
5679 /* x = y - y; */
5680 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
5681 result = integer_zero_node;
5682 /* x = y * 1; x = 1 * y; */
5683 else if (subcode == MULT_EXPR && integer_onep (arg1))
5684 result = arg0;
5685 else if (subcode == MULT_EXPR && integer_onep (arg0))
5686 result = arg1;
5687 else if (TREE_CODE (arg0) == INTEGER_CST
5688 && TREE_CODE (arg1) == INTEGER_CST)
5690 if (cplx_result)
5691 result = int_const_binop (subcode, fold_convert (type, arg0),
5692 fold_convert (type, arg1));
5693 else
5694 result = int_const_binop (subcode, arg0, arg1);
5695 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
5697 if (cplx_result)
5698 overflow = build_one_cst (type);
5699 else
5700 result = NULL_TREE;
5703 if (result)
5705 if (result == integer_zero_node)
5706 result = build_zero_cst (type);
5707 else if (cplx_result && TREE_TYPE (result) != type)
5709 if (TREE_CODE (result) == INTEGER_CST)
5711 if (arith_overflowed_p (PLUS_EXPR, type, result,
5712 integer_zero_node))
5713 overflow = build_one_cst (type);
5715 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
5716 && TYPE_UNSIGNED (type))
5717 || (TYPE_PRECISION (type)
5718 < (TYPE_PRECISION (TREE_TYPE (result))
5719 + (TYPE_UNSIGNED (TREE_TYPE (result))
5720 && !TYPE_UNSIGNED (type)))))
5721 result = NULL_TREE;
5722 if (result)
5723 result = fold_convert (type, result);
5728 if (result)
5730 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
5731 result = drop_tree_overflow (result);
5732 if (cplx_result)
5734 if (overflow == NULL_TREE)
5735 overflow = build_zero_cst (TREE_TYPE (result));
5736 tree ctype = build_complex_type (TREE_TYPE (result));
5737 if (TREE_CODE (result) == INTEGER_CST
5738 && TREE_CODE (overflow) == INTEGER_CST)
5739 result = build_complex (ctype, result, overflow);
5740 else
5741 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
5742 ctype, result, overflow);
5744 gimplify_and_update_call_from_tree (gsi, result);
5745 changed = true;
5749 return changed;
5753 /* Return true whether NAME has a use on STMT. */
5755 static bool
5756 has_use_on_stmt (tree name, gimple *stmt)
5758 imm_use_iterator iter;
5759 use_operand_p use_p;
5760 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
5761 if (USE_STMT (use_p) == stmt)
5762 return true;
5763 return false;
5766 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5767 gimple_simplify.
5769 Replaces *GSI with the simplification result in RCODE and OPS
5770 and the associated statements in *SEQ. Does the replacement
5771 according to INPLACE and returns true if the operation succeeded. */
5773 static bool
5774 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5775 gimple_match_op *res_op,
5776 gimple_seq *seq, bool inplace)
5778 gimple *stmt = gsi_stmt (*gsi);
5779 tree *ops = res_op->ops;
5780 unsigned int num_ops = res_op->num_ops;
5782 /* Play safe and do not allow abnormals to be mentioned in
5783 newly created statements. See also maybe_push_res_to_seq.
5784 As an exception allow such uses if there was a use of the
5785 same SSA name on the old stmt. */
5786 for (unsigned int i = 0; i < num_ops; ++i)
5787 if (TREE_CODE (ops[i]) == SSA_NAME
5788 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
5789 && !has_use_on_stmt (ops[i], stmt))
5790 return false;
5792 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
5793 for (unsigned int i = 0; i < 2; ++i)
5794 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
5795 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
5796 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
5797 return false;
5799 /* Don't insert new statements when INPLACE is true, even if we could
5800 reuse STMT for the final statement. */
5801 if (inplace && !gimple_seq_empty_p (*seq))
5802 return false;
5804 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
5806 gcc_assert (res_op->code.is_tree_code ());
5807 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
5808 /* GIMPLE_CONDs condition may not throw. */
5809 && (!flag_exceptions
5810 || !cfun->can_throw_non_call_exceptions
5811 || !operation_could_trap_p (res_op->code,
5812 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
5813 false, NULL_TREE)))
5814 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
5815 else if (res_op->code == SSA_NAME)
5816 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
5817 build_zero_cst (TREE_TYPE (ops[0])));
5818 else if (res_op->code == INTEGER_CST)
5820 if (integer_zerop (ops[0]))
5821 gimple_cond_make_false (cond_stmt);
5822 else
5823 gimple_cond_make_true (cond_stmt);
5825 else if (!inplace)
5827 tree res = maybe_push_res_to_seq (res_op, seq);
5828 if (!res)
5829 return false;
5830 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
5831 build_zero_cst (TREE_TYPE (res)));
5833 else
5834 return false;
5835 if (dump_file && (dump_flags & TDF_DETAILS))
5837 fprintf (dump_file, "gimple_simplified to ");
5838 if (!gimple_seq_empty_p (*seq))
5839 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5840 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5841 0, TDF_SLIM);
5843 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5844 return true;
5846 else if (is_gimple_assign (stmt)
5847 && res_op->code.is_tree_code ())
5849 if (!inplace
5850 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
5852 maybe_build_generic_op (res_op);
5853 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
5854 res_op->op_or_null (0),
5855 res_op->op_or_null (1),
5856 res_op->op_or_null (2));
5857 if (dump_file && (dump_flags & TDF_DETAILS))
5859 fprintf (dump_file, "gimple_simplified to ");
5860 if (!gimple_seq_empty_p (*seq))
5861 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5862 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5863 0, TDF_SLIM);
5865 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5866 return true;
5869 else if (res_op->code.is_fn_code ()
5870 && gimple_call_combined_fn (stmt) == res_op->code)
5872 gcc_assert (num_ops == gimple_call_num_args (stmt));
5873 for (unsigned int i = 0; i < num_ops; ++i)
5874 gimple_call_set_arg (stmt, i, ops[i]);
5875 if (dump_file && (dump_flags & TDF_DETAILS))
5877 fprintf (dump_file, "gimple_simplified to ");
5878 if (!gimple_seq_empty_p (*seq))
5879 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5880 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
5882 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5883 return true;
5885 else if (!inplace)
5887 if (gimple_has_lhs (stmt))
5889 tree lhs = gimple_get_lhs (stmt);
5890 if (!maybe_push_res_to_seq (res_op, seq, lhs))
5891 return false;
5892 if (dump_file && (dump_flags & TDF_DETAILS))
5894 fprintf (dump_file, "gimple_simplified to ");
5895 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5897 gsi_replace_with_seq_vops (gsi, *seq);
5898 return true;
5900 else
5901 gcc_unreachable ();
5904 return false;
5907 /* Canonicalize MEM_REFs invariant address operand after propagation. */
5909 static bool
5910 maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
5912 bool res = false;
5913 tree *orig_t = t;
5915 if (TREE_CODE (*t) == ADDR_EXPR)
5916 t = &TREE_OPERAND (*t, 0);
5918 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5919 generic vector extension. The actual vector referenced is
5920 view-converted to an array type for this purpose. If the index
5921 is constant the canonical representation in the middle-end is a
5922 BIT_FIELD_REF so re-write the former to the latter here. */
5923 if (TREE_CODE (*t) == ARRAY_REF
5924 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
5925 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
5926 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
5928 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
5929 if (VECTOR_TYPE_P (vtype))
5931 tree low = array_ref_low_bound (*t);
5932 if (TREE_CODE (low) == INTEGER_CST)
5934 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
5936 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
5937 wi::to_widest (low));
5938 idx = wi::mul (idx, wi::to_widest
5939 (TYPE_SIZE (TREE_TYPE (*t))));
5940 widest_int ext
5941 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
5942 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
5944 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
5945 TREE_TYPE (*t),
5946 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
5947 TYPE_SIZE (TREE_TYPE (*t)),
5948 wide_int_to_tree (bitsizetype, idx));
5949 res = true;
5956 while (handled_component_p (*t))
5957 t = &TREE_OPERAND (*t, 0);
5959 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5960 of invariant addresses into a SSA name MEM_REF address. */
5961 if (TREE_CODE (*t) == MEM_REF
5962 || TREE_CODE (*t) == TARGET_MEM_REF)
5964 tree addr = TREE_OPERAND (*t, 0);
5965 if (TREE_CODE (addr) == ADDR_EXPR
5966 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
5967 || handled_component_p (TREE_OPERAND (addr, 0))))
5969 tree base;
5970 poly_int64 coffset;
5971 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
5972 &coffset);
5973 if (!base)
5975 if (is_debug)
5976 return false;
5977 gcc_unreachable ();
5980 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
5981 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
5982 TREE_OPERAND (*t, 1),
5983 size_int (coffset));
5984 res = true;
5986 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
5987 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
5990 /* Canonicalize back MEM_REFs to plain reference trees if the object
5991 accessed is a decl that has the same access semantics as the MEM_REF. */
5992 if (TREE_CODE (*t) == MEM_REF
5993 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
5994 && integer_zerop (TREE_OPERAND (*t, 1))
5995 && MR_DEPENDENCE_CLIQUE (*t) == 0)
5997 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
5998 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
5999 if (/* Same volatile qualification. */
6000 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
6001 /* Same TBAA behavior with -fstrict-aliasing. */
6002 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
6003 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
6004 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
6005 /* Same alignment. */
6006 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
6007 /* We have to look out here to not drop a required conversion
6008 from the rhs to the lhs if *t appears on the lhs or vice-versa
6009 if it appears on the rhs. Thus require strict type
6010 compatibility. */
6011 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
6013 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6014 res = true;
6018 else if (TREE_CODE (*orig_t) == ADDR_EXPR
6019 && TREE_CODE (*t) == MEM_REF
6020 && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
6022 tree base;
6023 poly_int64 coffset;
6024 base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
6025 &coffset);
6026 if (base)
6028 gcc_assert (TREE_CODE (base) == MEM_REF);
6029 poly_int64 moffset;
6030 if (mem_ref_offset (base).to_shwi (&moffset))
6032 coffset += moffset;
6033 if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
6035 coffset += moffset;
6036 *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
6037 return true;
6043 /* Canonicalize TARGET_MEM_REF in particular with respect to
6044 the indexes becoming constant. */
6045 else if (TREE_CODE (*t) == TARGET_MEM_REF)
6047 tree tem = maybe_fold_tmr (*t);
6048 if (tem)
6050 *t = tem;
6051 if (TREE_CODE (*orig_t) == ADDR_EXPR)
6052 recompute_tree_invariant_for_addr_expr (*orig_t);
6053 res = true;
6057 return res;
6060 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6061 distinguishes both cases. */
6063 static bool
6064 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
6066 bool changed = false;
6067 gimple *stmt = gsi_stmt (*gsi);
6068 bool nowarning = gimple_no_warning_p (stmt);
6069 unsigned i;
6070 fold_defer_overflow_warnings ();
6072 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6073 after propagation.
6074 ??? This shouldn't be done in generic folding but in the
6075 propagation helpers which also know whether an address was
6076 propagated.
6077 Also canonicalize operand order. */
6078 switch (gimple_code (stmt))
6080 case GIMPLE_ASSIGN:
6081 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
6083 tree *rhs = gimple_assign_rhs1_ptr (stmt);
6084 if ((REFERENCE_CLASS_P (*rhs)
6085 || TREE_CODE (*rhs) == ADDR_EXPR)
6086 && maybe_canonicalize_mem_ref_addr (rhs))
6087 changed = true;
6088 tree *lhs = gimple_assign_lhs_ptr (stmt);
6089 if (REFERENCE_CLASS_P (*lhs)
6090 && maybe_canonicalize_mem_ref_addr (lhs))
6091 changed = true;
6093 else
6095 /* Canonicalize operand order. */
6096 enum tree_code code = gimple_assign_rhs_code (stmt);
6097 if (TREE_CODE_CLASS (code) == tcc_comparison
6098 || commutative_tree_code (code)
6099 || commutative_ternary_tree_code (code))
6101 tree rhs1 = gimple_assign_rhs1 (stmt);
6102 tree rhs2 = gimple_assign_rhs2 (stmt);
6103 if (tree_swap_operands_p (rhs1, rhs2))
6105 gimple_assign_set_rhs1 (stmt, rhs2);
6106 gimple_assign_set_rhs2 (stmt, rhs1);
6107 if (TREE_CODE_CLASS (code) == tcc_comparison)
6108 gimple_assign_set_rhs_code (stmt,
6109 swap_tree_comparison (code));
6110 changed = true;
6114 break;
6115 case GIMPLE_CALL:
6117 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6119 tree *arg = gimple_call_arg_ptr (stmt, i);
6120 if (REFERENCE_CLASS_P (*arg)
6121 && maybe_canonicalize_mem_ref_addr (arg))
6122 changed = true;
6124 tree *lhs = gimple_call_lhs_ptr (stmt);
6125 if (*lhs
6126 && REFERENCE_CLASS_P (*lhs)
6127 && maybe_canonicalize_mem_ref_addr (lhs))
6128 changed = true;
6129 break;
6131 case GIMPLE_ASM:
6133 gasm *asm_stmt = as_a <gasm *> (stmt);
6134 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
6136 tree link = gimple_asm_output_op (asm_stmt, i);
6137 tree op = TREE_VALUE (link);
6138 if (REFERENCE_CLASS_P (op)
6139 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6140 changed = true;
6142 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
6144 tree link = gimple_asm_input_op (asm_stmt, i);
6145 tree op = TREE_VALUE (link);
6146 if ((REFERENCE_CLASS_P (op)
6147 || TREE_CODE (op) == ADDR_EXPR)
6148 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6149 changed = true;
6152 break;
6153 case GIMPLE_DEBUG:
6154 if (gimple_debug_bind_p (stmt))
6156 tree *val = gimple_debug_bind_get_value_ptr (stmt);
6157 if (*val
6158 && (REFERENCE_CLASS_P (*val)
6159 || TREE_CODE (*val) == ADDR_EXPR)
6160 && maybe_canonicalize_mem_ref_addr (val, true))
6161 changed = true;
6163 break;
6164 case GIMPLE_COND:
6166 /* Canonicalize operand order. */
6167 tree lhs = gimple_cond_lhs (stmt);
6168 tree rhs = gimple_cond_rhs (stmt);
6169 if (tree_swap_operands_p (lhs, rhs))
6171 gcond *gc = as_a <gcond *> (stmt);
6172 gimple_cond_set_lhs (gc, rhs);
6173 gimple_cond_set_rhs (gc, lhs);
6174 gimple_cond_set_code (gc,
6175 swap_tree_comparison (gimple_cond_code (gc)));
6176 changed = true;
6179 default:;
6182 /* Dispatch to pattern-based folding. */
6183 if (!inplace
6184 || is_gimple_assign (stmt)
6185 || gimple_code (stmt) == GIMPLE_COND)
6187 gimple_seq seq = NULL;
6188 gimple_match_op res_op;
6189 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
6190 valueize, valueize))
6192 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
6193 changed = true;
6194 else
6195 gimple_seq_discard (seq);
6199 stmt = gsi_stmt (*gsi);
6201 /* Fold the main computation performed by the statement. */
6202 switch (gimple_code (stmt))
6204 case GIMPLE_ASSIGN:
6206 /* Try to canonicalize for boolean-typed X the comparisons
6207 X == 0, X == 1, X != 0, and X != 1. */
6208 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
6209 || gimple_assign_rhs_code (stmt) == NE_EXPR)
6211 tree lhs = gimple_assign_lhs (stmt);
6212 tree op1 = gimple_assign_rhs1 (stmt);
6213 tree op2 = gimple_assign_rhs2 (stmt);
6214 tree type = TREE_TYPE (op1);
6216 /* Check whether the comparison operands are of the same boolean
6217 type as the result type is.
6218 Check that second operand is an integer-constant with value
6219 one or zero. */
6220 if (TREE_CODE (op2) == INTEGER_CST
6221 && (integer_zerop (op2) || integer_onep (op2))
6222 && useless_type_conversion_p (TREE_TYPE (lhs), type))
6224 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
6225 bool is_logical_not = false;
6227 /* X == 0 and X != 1 is a logical-not.of X
6228 X == 1 and X != 0 is X */
6229 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
6230 || (cmp_code == NE_EXPR && integer_onep (op2)))
6231 is_logical_not = true;
6233 if (is_logical_not == false)
6234 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
6235 /* Only for one-bit precision typed X the transformation
6236 !X -> ~X is valied. */
6237 else if (TYPE_PRECISION (type) == 1)
6238 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
6239 /* Otherwise we use !X -> X ^ 1. */
6240 else
6241 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
6242 build_int_cst (type, 1));
6243 changed = true;
6244 break;
6248 unsigned old_num_ops = gimple_num_ops (stmt);
6249 tree lhs = gimple_assign_lhs (stmt);
6250 tree new_rhs = fold_gimple_assign (gsi);
6251 if (new_rhs
6252 && !useless_type_conversion_p (TREE_TYPE (lhs),
6253 TREE_TYPE (new_rhs)))
6254 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6255 if (new_rhs
6256 && (!inplace
6257 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6259 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6260 changed = true;
6262 break;
6265 case GIMPLE_CALL:
6266 changed |= gimple_fold_call (gsi, inplace);
6267 break;
6269 case GIMPLE_DEBUG:
6270 if (gimple_debug_bind_p (stmt))
6272 tree val = gimple_debug_bind_get_value (stmt);
6273 if (val
6274 && REFERENCE_CLASS_P (val))
6276 tree tem = maybe_fold_reference (val);
6277 if (tem)
6279 gimple_debug_bind_set_value (stmt, tem);
6280 changed = true;
6283 else if (val
6284 && TREE_CODE (val) == ADDR_EXPR)
6286 tree ref = TREE_OPERAND (val, 0);
6287 tree tem = maybe_fold_reference (ref);
6288 if (tem)
6290 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
6291 gimple_debug_bind_set_value (stmt, tem);
6292 changed = true;
6296 break;
6298 case GIMPLE_RETURN:
6300 greturn *ret_stmt = as_a<greturn *> (stmt);
6301 tree ret = gimple_return_retval(ret_stmt);
6303 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6305 tree val = valueize (ret);
6306 if (val && val != ret
6307 && may_propagate_copy (ret, val))
6309 gimple_return_set_retval (ret_stmt, val);
6310 changed = true;
6314 break;
6316 default:;
6319 stmt = gsi_stmt (*gsi);
6321 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
6322 return changed;
6325 /* Valueziation callback that ends up not following SSA edges. */
6327 tree
6328 no_follow_ssa_edges (tree)
6330 return NULL_TREE;
6333 /* Valueization callback that ends up following single-use SSA edges only. */
6335 tree
6336 follow_single_use_edges (tree val)
6338 if (TREE_CODE (val) == SSA_NAME
6339 && !has_single_use (val))
6340 return NULL_TREE;
6341 return val;
6344 /* Valueization callback that follows all SSA edges. */
6346 tree
6347 follow_all_ssa_edges (tree val)
6349 return val;
6352 /* Fold the statement pointed to by GSI. In some cases, this function may
6353 replace the whole statement with a new one. Returns true iff folding
6354 makes any changes.
6355 The statement pointed to by GSI should be in valid gimple form but may
6356 be in unfolded state as resulting from for example constant propagation
6357 which can produce *&x = 0. */
6359 bool
6360 fold_stmt (gimple_stmt_iterator *gsi)
6362 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
6365 bool
6366 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
6368 return fold_stmt_1 (gsi, false, valueize);
6371 /* Perform the minimal folding on statement *GSI. Only operations like
6372 *&x created by constant propagation are handled. The statement cannot
6373 be replaced with a new one. Return true if the statement was
6374 changed, false otherwise.
6375 The statement *GSI should be in valid gimple form but may
6376 be in unfolded state as resulting from for example constant propagation
6377 which can produce *&x = 0. */
6379 bool
6380 fold_stmt_inplace (gimple_stmt_iterator *gsi)
6382 gimple *stmt = gsi_stmt (*gsi);
6383 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
6384 gcc_assert (gsi_stmt (*gsi) == stmt);
6385 return changed;
6388 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6389 if EXPR is null or we don't know how.
6390 If non-null, the result always has boolean type. */
6392 static tree
6393 canonicalize_bool (tree expr, bool invert)
6395 if (!expr)
6396 return NULL_TREE;
6397 else if (invert)
6399 if (integer_nonzerop (expr))
6400 return boolean_false_node;
6401 else if (integer_zerop (expr))
6402 return boolean_true_node;
6403 else if (TREE_CODE (expr) == SSA_NAME)
6404 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
6405 build_int_cst (TREE_TYPE (expr), 0));
6406 else if (COMPARISON_CLASS_P (expr))
6407 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
6408 boolean_type_node,
6409 TREE_OPERAND (expr, 0),
6410 TREE_OPERAND (expr, 1));
6411 else
6412 return NULL_TREE;
6414 else
6416 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6417 return expr;
6418 if (integer_nonzerop (expr))
6419 return boolean_true_node;
6420 else if (integer_zerop (expr))
6421 return boolean_false_node;
6422 else if (TREE_CODE (expr) == SSA_NAME)
6423 return fold_build2 (NE_EXPR, boolean_type_node, expr,
6424 build_int_cst (TREE_TYPE (expr), 0));
6425 else if (COMPARISON_CLASS_P (expr))
6426 return fold_build2 (TREE_CODE (expr),
6427 boolean_type_node,
6428 TREE_OPERAND (expr, 0),
6429 TREE_OPERAND (expr, 1));
6430 else
6431 return NULL_TREE;
6435 /* Check to see if a boolean expression EXPR is logically equivalent to the
6436 comparison (OP1 CODE OP2). Check for various identities involving
6437 SSA_NAMEs. */
6439 static bool
6440 same_bool_comparison_p (const_tree expr, enum tree_code code,
6441 const_tree op1, const_tree op2)
6443 gimple *s;
6445 /* The obvious case. */
6446 if (TREE_CODE (expr) == code
6447 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
6448 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
6449 return true;
6451 /* Check for comparing (name, name != 0) and the case where expr
6452 is an SSA_NAME with a definition matching the comparison. */
6453 if (TREE_CODE (expr) == SSA_NAME
6454 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6456 if (operand_equal_p (expr, op1, 0))
6457 return ((code == NE_EXPR && integer_zerop (op2))
6458 || (code == EQ_EXPR && integer_nonzerop (op2)));
6459 s = SSA_NAME_DEF_STMT (expr);
6460 if (is_gimple_assign (s)
6461 && gimple_assign_rhs_code (s) == code
6462 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
6463 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
6464 return true;
6467 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6468 of name is a comparison, recurse. */
6469 if (TREE_CODE (op1) == SSA_NAME
6470 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
6472 s = SSA_NAME_DEF_STMT (op1);
6473 if (is_gimple_assign (s)
6474 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
6476 enum tree_code c = gimple_assign_rhs_code (s);
6477 if ((c == NE_EXPR && integer_zerop (op2))
6478 || (c == EQ_EXPR && integer_nonzerop (op2)))
6479 return same_bool_comparison_p (expr, c,
6480 gimple_assign_rhs1 (s),
6481 gimple_assign_rhs2 (s));
6482 if ((c == EQ_EXPR && integer_zerop (op2))
6483 || (c == NE_EXPR && integer_nonzerop (op2)))
6484 return same_bool_comparison_p (expr,
6485 invert_tree_comparison (c, false),
6486 gimple_assign_rhs1 (s),
6487 gimple_assign_rhs2 (s));
6490 return false;
6493 /* Check to see if two boolean expressions OP1 and OP2 are logically
6494 equivalent. */
6496 static bool
6497 same_bool_result_p (const_tree op1, const_tree op2)
6499 /* Simple cases first. */
6500 if (operand_equal_p (op1, op2, 0))
6501 return true;
6503 /* Check the cases where at least one of the operands is a comparison.
6504 These are a bit smarter than operand_equal_p in that they apply some
6505 identifies on SSA_NAMEs. */
6506 if (COMPARISON_CLASS_P (op2)
6507 && same_bool_comparison_p (op1, TREE_CODE (op2),
6508 TREE_OPERAND (op2, 0),
6509 TREE_OPERAND (op2, 1)))
6510 return true;
6511 if (COMPARISON_CLASS_P (op1)
6512 && same_bool_comparison_p (op2, TREE_CODE (op1),
6513 TREE_OPERAND (op1, 0),
6514 TREE_OPERAND (op1, 1)))
6515 return true;
6517 /* Default case. */
6518 return false;
6521 /* Forward declarations for some mutually recursive functions. */
6523 static tree
6524 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6525 enum tree_code code2, tree op2a, tree op2b);
6526 static tree
6527 and_var_with_comparison (tree type, tree var, bool invert,
6528 enum tree_code code2, tree op2a, tree op2b);
6529 static tree
6530 and_var_with_comparison_1 (tree type, gimple *stmt,
6531 enum tree_code code2, tree op2a, tree op2b);
6532 static tree
6533 or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
6534 enum tree_code code2, tree op2a, tree op2b);
6535 static tree
6536 or_var_with_comparison (tree, tree var, bool invert,
6537 enum tree_code code2, tree op2a, tree op2b);
6538 static tree
6539 or_var_with_comparison_1 (tree, gimple *stmt,
6540 enum tree_code code2, tree op2a, tree op2b);
6542 /* Helper function for and_comparisons_1: try to simplify the AND of the
6543 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6544 If INVERT is true, invert the value of the VAR before doing the AND.
6545 Return NULL_EXPR if we can't simplify this to a single expression. */
6547 static tree
6548 and_var_with_comparison (tree type, tree var, bool invert,
6549 enum tree_code code2, tree op2a, tree op2b)
6551 tree t;
6552 gimple *stmt = SSA_NAME_DEF_STMT (var);
6554 /* We can only deal with variables whose definitions are assignments. */
6555 if (!is_gimple_assign (stmt))
6556 return NULL_TREE;
6558 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6559 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6560 Then we only have to consider the simpler non-inverted cases. */
6561 if (invert)
6562 t = or_var_with_comparison_1 (type, stmt,
6563 invert_tree_comparison (code2, false),
6564 op2a, op2b);
6565 else
6566 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
6567 return canonicalize_bool (t, invert);
6570 /* Try to simplify the AND of the ssa variable defined by the assignment
6571 STMT with the comparison specified by (OP2A CODE2 OP2B).
6572 Return NULL_EXPR if we can't simplify this to a single expression. */
6574 static tree
6575 and_var_with_comparison_1 (tree type, gimple *stmt,
6576 enum tree_code code2, tree op2a, tree op2b)
6578 tree var = gimple_assign_lhs (stmt);
6579 tree true_test_var = NULL_TREE;
6580 tree false_test_var = NULL_TREE;
6581 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6583 /* Check for identities like (var AND (var == 0)) => false. */
6584 if (TREE_CODE (op2a) == SSA_NAME
6585 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6587 if ((code2 == NE_EXPR && integer_zerop (op2b))
6588 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6590 true_test_var = op2a;
6591 if (var == true_test_var)
6592 return var;
6594 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6595 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6597 false_test_var = op2a;
6598 if (var == false_test_var)
6599 return boolean_false_node;
6603 /* If the definition is a comparison, recurse on it. */
6604 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6606 tree t = and_comparisons_1 (type, innercode,
6607 gimple_assign_rhs1 (stmt),
6608 gimple_assign_rhs2 (stmt),
6609 code2,
6610 op2a,
6611 op2b);
6612 if (t)
6613 return t;
6616 /* If the definition is an AND or OR expression, we may be able to
6617 simplify by reassociating. */
6618 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6619 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
6621 tree inner1 = gimple_assign_rhs1 (stmt);
6622 tree inner2 = gimple_assign_rhs2 (stmt);
6623 gimple *s;
6624 tree t;
6625 tree partial = NULL_TREE;
6626 bool is_and = (innercode == BIT_AND_EXPR);
6628 /* Check for boolean identities that don't require recursive examination
6629 of inner1/inner2:
6630 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6631 inner1 AND (inner1 OR inner2) => inner1
6632 !inner1 AND (inner1 AND inner2) => false
6633 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6634 Likewise for similar cases involving inner2. */
6635 if (inner1 == true_test_var)
6636 return (is_and ? var : inner1);
6637 else if (inner2 == true_test_var)
6638 return (is_and ? var : inner2);
6639 else if (inner1 == false_test_var)
6640 return (is_and
6641 ? boolean_false_node
6642 : and_var_with_comparison (type, inner2, false, code2, op2a,
6643 op2b));
6644 else if (inner2 == false_test_var)
6645 return (is_and
6646 ? boolean_false_node
6647 : and_var_with_comparison (type, inner1, false, code2, op2a,
6648 op2b));
6650 /* Next, redistribute/reassociate the AND across the inner tests.
6651 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6652 if (TREE_CODE (inner1) == SSA_NAME
6653 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6654 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6655 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6656 gimple_assign_rhs1 (s),
6657 gimple_assign_rhs2 (s),
6658 code2, op2a, op2b)))
6660 /* Handle the AND case, where we are reassociating:
6661 (inner1 AND inner2) AND (op2a code2 op2b)
6662 => (t AND inner2)
6663 If the partial result t is a constant, we win. Otherwise
6664 continue on to try reassociating with the other inner test. */
6665 if (is_and)
6667 if (integer_onep (t))
6668 return inner2;
6669 else if (integer_zerop (t))
6670 return boolean_false_node;
6673 /* Handle the OR case, where we are redistributing:
6674 (inner1 OR inner2) AND (op2a code2 op2b)
6675 => (t OR (inner2 AND (op2a code2 op2b))) */
6676 else if (integer_onep (t))
6677 return boolean_true_node;
6679 /* Save partial result for later. */
6680 partial = t;
6683 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6684 if (TREE_CODE (inner2) == SSA_NAME
6685 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6686 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6687 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6688 gimple_assign_rhs1 (s),
6689 gimple_assign_rhs2 (s),
6690 code2, op2a, op2b)))
6692 /* Handle the AND case, where we are reassociating:
6693 (inner1 AND inner2) AND (op2a code2 op2b)
6694 => (inner1 AND t) */
6695 if (is_and)
6697 if (integer_onep (t))
6698 return inner1;
6699 else if (integer_zerop (t))
6700 return boolean_false_node;
6701 /* If both are the same, we can apply the identity
6702 (x AND x) == x. */
6703 else if (partial && same_bool_result_p (t, partial))
6704 return t;
6707 /* Handle the OR case. where we are redistributing:
6708 (inner1 OR inner2) AND (op2a code2 op2b)
6709 => (t OR (inner1 AND (op2a code2 op2b)))
6710 => (t OR partial) */
6711 else
6713 if (integer_onep (t))
6714 return boolean_true_node;
6715 else if (partial)
6717 /* We already got a simplification for the other
6718 operand to the redistributed OR expression. The
6719 interesting case is when at least one is false.
6720 Or, if both are the same, we can apply the identity
6721 (x OR x) == x. */
6722 if (integer_zerop (partial))
6723 return t;
6724 else if (integer_zerop (t))
6725 return partial;
6726 else if (same_bool_result_p (t, partial))
6727 return t;
6732 return NULL_TREE;
6735 /* Try to simplify the AND of two comparisons defined by
6736 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6737 If this can be done without constructing an intermediate value,
6738 return the resulting tree; otherwise NULL_TREE is returned.
6739 This function is deliberately asymmetric as it recurses on SSA_DEFs
6740 in the first comparison but not the second. */
6742 static tree
6743 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6744 enum tree_code code2, tree op2a, tree op2b)
6746 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6748 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6749 if (operand_equal_p (op1a, op2a, 0)
6750 && operand_equal_p (op1b, op2b, 0))
6752 /* Result will be either NULL_TREE, or a combined comparison. */
6753 tree t = combine_comparisons (UNKNOWN_LOCATION,
6754 TRUTH_ANDIF_EXPR, code1, code2,
6755 truth_type, op1a, op1b);
6756 if (t)
6757 return t;
6760 /* Likewise the swapped case of the above. */
6761 if (operand_equal_p (op1a, op2b, 0)
6762 && operand_equal_p (op1b, op2a, 0))
6764 /* Result will be either NULL_TREE, or a combined comparison. */
6765 tree t = combine_comparisons (UNKNOWN_LOCATION,
6766 TRUTH_ANDIF_EXPR, code1,
6767 swap_tree_comparison (code2),
6768 truth_type, op1a, op1b);
6769 if (t)
6770 return t;
6773 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6774 NAME's definition is a truth value. See if there are any simplifications
6775 that can be done against the NAME's definition. */
6776 if (TREE_CODE (op1a) == SSA_NAME
6777 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6778 && (integer_zerop (op1b) || integer_onep (op1b)))
6780 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6781 || (code1 == NE_EXPR && integer_onep (op1b)));
6782 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6783 switch (gimple_code (stmt))
6785 case GIMPLE_ASSIGN:
6786 /* Try to simplify by copy-propagating the definition. */
6787 return and_var_with_comparison (type, op1a, invert, code2, op2a,
6788 op2b);
6790 case GIMPLE_PHI:
6791 /* If every argument to the PHI produces the same result when
6792 ANDed with the second comparison, we win.
6793 Do not do this unless the type is bool since we need a bool
6794 result here anyway. */
6795 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6797 tree result = NULL_TREE;
6798 unsigned i;
6799 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6801 tree arg = gimple_phi_arg_def (stmt, i);
6803 /* If this PHI has itself as an argument, ignore it.
6804 If all the other args produce the same result,
6805 we're still OK. */
6806 if (arg == gimple_phi_result (stmt))
6807 continue;
6808 else if (TREE_CODE (arg) == INTEGER_CST)
6810 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
6812 if (!result)
6813 result = boolean_false_node;
6814 else if (!integer_zerop (result))
6815 return NULL_TREE;
6817 else if (!result)
6818 result = fold_build2 (code2, boolean_type_node,
6819 op2a, op2b);
6820 else if (!same_bool_comparison_p (result,
6821 code2, op2a, op2b))
6822 return NULL_TREE;
6824 else if (TREE_CODE (arg) == SSA_NAME
6825 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6827 tree temp;
6828 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6829 /* In simple cases we can look through PHI nodes,
6830 but we have to be careful with loops.
6831 See PR49073. */
6832 if (! dom_info_available_p (CDI_DOMINATORS)
6833 || gimple_bb (def_stmt) == gimple_bb (stmt)
6834 || dominated_by_p (CDI_DOMINATORS,
6835 gimple_bb (def_stmt),
6836 gimple_bb (stmt)))
6837 return NULL_TREE;
6838 temp = and_var_with_comparison (type, arg, invert, code2,
6839 op2a, op2b);
6840 if (!temp)
6841 return NULL_TREE;
6842 else if (!result)
6843 result = temp;
6844 else if (!same_bool_result_p (result, temp))
6845 return NULL_TREE;
6847 else
6848 return NULL_TREE;
6850 return result;
6853 default:
6854 break;
6857 return NULL_TREE;
6860 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6861 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6862 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6863 simplify this to a single expression. As we are going to lower the cost
6864 of building SSA names / gimple stmts significantly, we need to allocate
6865 them ont the stack. This will cause the code to be a bit ugly. */
6867 static tree
6868 maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
6869 enum tree_code code1,
6870 tree op1a, tree op1b,
6871 enum tree_code code2, tree op2a,
6872 tree op2b)
6874 /* Allocate gimple stmt1 on the stack. */
6875 gassign *stmt1
6876 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6877 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
6878 gimple_assign_set_rhs_code (stmt1, code1);
6879 gimple_assign_set_rhs1 (stmt1, op1a);
6880 gimple_assign_set_rhs2 (stmt1, op1b);
6882 /* Allocate gimple stmt2 on the stack. */
6883 gassign *stmt2
6884 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6885 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
6886 gimple_assign_set_rhs_code (stmt2, code2);
6887 gimple_assign_set_rhs1 (stmt2, op2a);
6888 gimple_assign_set_rhs2 (stmt2, op2b);
6890 /* Allocate SSA names(lhs1) on the stack. */
6891 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
6892 memset (lhs1, 0, sizeof (tree_ssa_name));
6893 TREE_SET_CODE (lhs1, SSA_NAME);
6894 TREE_TYPE (lhs1) = type;
6895 init_ssa_name_imm_use (lhs1);
6897 /* Allocate SSA names(lhs2) on the stack. */
6898 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
6899 memset (lhs2, 0, sizeof (tree_ssa_name));
6900 TREE_SET_CODE (lhs2, SSA_NAME);
6901 TREE_TYPE (lhs2) = type;
6902 init_ssa_name_imm_use (lhs2);
6904 gimple_assign_set_lhs (stmt1, lhs1);
6905 gimple_assign_set_lhs (stmt2, lhs2);
6907 gimple_match_op op (gimple_match_cond::UNCOND, code,
6908 type, gimple_assign_lhs (stmt1),
6909 gimple_assign_lhs (stmt2));
6910 if (op.resimplify (NULL, follow_all_ssa_edges))
6912 if (gimple_simplified_result_is_gimple_val (&op))
6914 tree res = op.ops[0];
6915 if (res == lhs1)
6916 return build2 (code1, type, op1a, op1b);
6917 else if (res == lhs2)
6918 return build2 (code2, type, op2a, op2b);
6919 else
6920 return res;
6922 else if (op.code.is_tree_code ()
6923 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
6925 tree op0 = op.ops[0];
6926 tree op1 = op.ops[1];
6927 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
6928 return NULL_TREE; /* not simple */
6930 return build2 ((enum tree_code)op.code, op.type, op0, op1);
6934 return NULL_TREE;
6937 /* Try to simplify the AND of two comparisons, specified by
6938 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6939 If this can be simplified to a single expression (without requiring
6940 introducing more SSA variables to hold intermediate values),
6941 return the resulting tree. Otherwise return NULL_TREE.
6942 If the result expression is non-null, it has boolean type. */
6944 tree
6945 maybe_fold_and_comparisons (tree type,
6946 enum tree_code code1, tree op1a, tree op1b,
6947 enum tree_code code2, tree op2a, tree op2b)
6949 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
6950 return t;
6952 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
6953 return t;
6955 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
6956 op1a, op1b, code2, op2a,
6957 op2b))
6958 return t;
6960 return NULL_TREE;
6963 /* Helper function for or_comparisons_1: try to simplify the OR of the
6964 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6965 If INVERT is true, invert the value of VAR before doing the OR.
6966 Return NULL_EXPR if we can't simplify this to a single expression. */
6968 static tree
6969 or_var_with_comparison (tree type, tree var, bool invert,
6970 enum tree_code code2, tree op2a, tree op2b)
6972 tree t;
6973 gimple *stmt = SSA_NAME_DEF_STMT (var);
6975 /* We can only deal with variables whose definitions are assignments. */
6976 if (!is_gimple_assign (stmt))
6977 return NULL_TREE;
6979 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6980 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
6981 Then we only have to consider the simpler non-inverted cases. */
6982 if (invert)
6983 t = and_var_with_comparison_1 (type, stmt,
6984 invert_tree_comparison (code2, false),
6985 op2a, op2b);
6986 else
6987 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
6988 return canonicalize_bool (t, invert);
6991 /* Try to simplify the OR of the ssa variable defined by the assignment
6992 STMT with the comparison specified by (OP2A CODE2 OP2B).
6993 Return NULL_EXPR if we can't simplify this to a single expression. */
6995 static tree
6996 or_var_with_comparison_1 (tree type, gimple *stmt,
6997 enum tree_code code2, tree op2a, tree op2b)
6999 tree var = gimple_assign_lhs (stmt);
7000 tree true_test_var = NULL_TREE;
7001 tree false_test_var = NULL_TREE;
7002 enum tree_code innercode = gimple_assign_rhs_code (stmt);
7004 /* Check for identities like (var OR (var != 0)) => true . */
7005 if (TREE_CODE (op2a) == SSA_NAME
7006 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
7008 if ((code2 == NE_EXPR && integer_zerop (op2b))
7009 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
7011 true_test_var = op2a;
7012 if (var == true_test_var)
7013 return var;
7015 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
7016 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
7018 false_test_var = op2a;
7019 if (var == false_test_var)
7020 return boolean_true_node;
7024 /* If the definition is a comparison, recurse on it. */
7025 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
7027 tree t = or_comparisons_1 (type, innercode,
7028 gimple_assign_rhs1 (stmt),
7029 gimple_assign_rhs2 (stmt),
7030 code2,
7031 op2a,
7032 op2b);
7033 if (t)
7034 return t;
7037 /* If the definition is an AND or OR expression, we may be able to
7038 simplify by reassociating. */
7039 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
7040 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
7042 tree inner1 = gimple_assign_rhs1 (stmt);
7043 tree inner2 = gimple_assign_rhs2 (stmt);
7044 gimple *s;
7045 tree t;
7046 tree partial = NULL_TREE;
7047 bool is_or = (innercode == BIT_IOR_EXPR);
7049 /* Check for boolean identities that don't require recursive examination
7050 of inner1/inner2:
7051 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7052 inner1 OR (inner1 AND inner2) => inner1
7053 !inner1 OR (inner1 OR inner2) => true
7054 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7056 if (inner1 == true_test_var)
7057 return (is_or ? var : inner1);
7058 else if (inner2 == true_test_var)
7059 return (is_or ? var : inner2);
7060 else if (inner1 == false_test_var)
7061 return (is_or
7062 ? boolean_true_node
7063 : or_var_with_comparison (type, inner2, false, code2, op2a,
7064 op2b));
7065 else if (inner2 == false_test_var)
7066 return (is_or
7067 ? boolean_true_node
7068 : or_var_with_comparison (type, inner1, false, code2, op2a,
7069 op2b));
7071 /* Next, redistribute/reassociate the OR across the inner tests.
7072 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7073 if (TREE_CODE (inner1) == SSA_NAME
7074 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
7075 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7076 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7077 gimple_assign_rhs1 (s),
7078 gimple_assign_rhs2 (s),
7079 code2, op2a, op2b)))
7081 /* Handle the OR case, where we are reassociating:
7082 (inner1 OR inner2) OR (op2a code2 op2b)
7083 => (t OR inner2)
7084 If the partial result t is a constant, we win. Otherwise
7085 continue on to try reassociating with the other inner test. */
7086 if (is_or)
7088 if (integer_onep (t))
7089 return boolean_true_node;
7090 else if (integer_zerop (t))
7091 return inner2;
7094 /* Handle the AND case, where we are redistributing:
7095 (inner1 AND inner2) OR (op2a code2 op2b)
7096 => (t AND (inner2 OR (op2a code op2b))) */
7097 else if (integer_zerop (t))
7098 return boolean_false_node;
7100 /* Save partial result for later. */
7101 partial = t;
7104 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7105 if (TREE_CODE (inner2) == SSA_NAME
7106 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
7107 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7108 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7109 gimple_assign_rhs1 (s),
7110 gimple_assign_rhs2 (s),
7111 code2, op2a, op2b)))
7113 /* Handle the OR case, where we are reassociating:
7114 (inner1 OR inner2) OR (op2a code2 op2b)
7115 => (inner1 OR t)
7116 => (t OR partial) */
7117 if (is_or)
7119 if (integer_zerop (t))
7120 return inner1;
7121 else if (integer_onep (t))
7122 return boolean_true_node;
7123 /* If both are the same, we can apply the identity
7124 (x OR x) == x. */
7125 else if (partial && same_bool_result_p (t, partial))
7126 return t;
7129 /* Handle the AND case, where we are redistributing:
7130 (inner1 AND inner2) OR (op2a code2 op2b)
7131 => (t AND (inner1 OR (op2a code2 op2b)))
7132 => (t AND partial) */
7133 else
7135 if (integer_zerop (t))
7136 return boolean_false_node;
7137 else if (partial)
7139 /* We already got a simplification for the other
7140 operand to the redistributed AND expression. The
7141 interesting case is when at least one is true.
7142 Or, if both are the same, we can apply the identity
7143 (x AND x) == x. */
7144 if (integer_onep (partial))
7145 return t;
7146 else if (integer_onep (t))
7147 return partial;
7148 else if (same_bool_result_p (t, partial))
7149 return t;
7154 return NULL_TREE;
7157 /* Try to simplify the OR of two comparisons defined by
7158 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7159 If this can be done without constructing an intermediate value,
7160 return the resulting tree; otherwise NULL_TREE is returned.
7161 This function is deliberately asymmetric as it recurses on SSA_DEFs
7162 in the first comparison but not the second. */
7164 static tree
7165 or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
7166 enum tree_code code2, tree op2a, tree op2b)
7168 tree truth_type = truth_type_for (TREE_TYPE (op1a));
7170 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7171 if (operand_equal_p (op1a, op2a, 0)
7172 && operand_equal_p (op1b, op2b, 0))
7174 /* Result will be either NULL_TREE, or a combined comparison. */
7175 tree t = combine_comparisons (UNKNOWN_LOCATION,
7176 TRUTH_ORIF_EXPR, code1, code2,
7177 truth_type, op1a, op1b);
7178 if (t)
7179 return t;
7182 /* Likewise the swapped case of the above. */
7183 if (operand_equal_p (op1a, op2b, 0)
7184 && operand_equal_p (op1b, op2a, 0))
7186 /* Result will be either NULL_TREE, or a combined comparison. */
7187 tree t = combine_comparisons (UNKNOWN_LOCATION,
7188 TRUTH_ORIF_EXPR, code1,
7189 swap_tree_comparison (code2),
7190 truth_type, op1a, op1b);
7191 if (t)
7192 return t;
7195 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7196 NAME's definition is a truth value. See if there are any simplifications
7197 that can be done against the NAME's definition. */
7198 if (TREE_CODE (op1a) == SSA_NAME
7199 && (code1 == NE_EXPR || code1 == EQ_EXPR)
7200 && (integer_zerop (op1b) || integer_onep (op1b)))
7202 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7203 || (code1 == NE_EXPR && integer_onep (op1b)));
7204 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
7205 switch (gimple_code (stmt))
7207 case GIMPLE_ASSIGN:
7208 /* Try to simplify by copy-propagating the definition. */
7209 return or_var_with_comparison (type, op1a, invert, code2, op2a,
7210 op2b);
7212 case GIMPLE_PHI:
7213 /* If every argument to the PHI produces the same result when
7214 ORed with the second comparison, we win.
7215 Do not do this unless the type is bool since we need a bool
7216 result here anyway. */
7217 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7219 tree result = NULL_TREE;
7220 unsigned i;
7221 for (i = 0; i < gimple_phi_num_args (stmt); i++)
7223 tree arg = gimple_phi_arg_def (stmt, i);
7225 /* If this PHI has itself as an argument, ignore it.
7226 If all the other args produce the same result,
7227 we're still OK. */
7228 if (arg == gimple_phi_result (stmt))
7229 continue;
7230 else if (TREE_CODE (arg) == INTEGER_CST)
7232 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
7234 if (!result)
7235 result = boolean_true_node;
7236 else if (!integer_onep (result))
7237 return NULL_TREE;
7239 else if (!result)
7240 result = fold_build2 (code2, boolean_type_node,
7241 op2a, op2b);
7242 else if (!same_bool_comparison_p (result,
7243 code2, op2a, op2b))
7244 return NULL_TREE;
7246 else if (TREE_CODE (arg) == SSA_NAME
7247 && !SSA_NAME_IS_DEFAULT_DEF (arg))
7249 tree temp;
7250 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
7251 /* In simple cases we can look through PHI nodes,
7252 but we have to be careful with loops.
7253 See PR49073. */
7254 if (! dom_info_available_p (CDI_DOMINATORS)
7255 || gimple_bb (def_stmt) == gimple_bb (stmt)
7256 || dominated_by_p (CDI_DOMINATORS,
7257 gimple_bb (def_stmt),
7258 gimple_bb (stmt)))
7259 return NULL_TREE;
7260 temp = or_var_with_comparison (type, arg, invert, code2,
7261 op2a, op2b);
7262 if (!temp)
7263 return NULL_TREE;
7264 else if (!result)
7265 result = temp;
7266 else if (!same_bool_result_p (result, temp))
7267 return NULL_TREE;
7269 else
7270 return NULL_TREE;
7272 return result;
7275 default:
7276 break;
7279 return NULL_TREE;
7282 /* Try to simplify the OR of two comparisons, specified by
7283 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7284 If this can be simplified to a single expression (without requiring
7285 introducing more SSA variables to hold intermediate values),
7286 return the resulting tree. Otherwise return NULL_TREE.
7287 If the result expression is non-null, it has boolean type. */
7289 tree
7290 maybe_fold_or_comparisons (tree type,
7291 enum tree_code code1, tree op1a, tree op1b,
7292 enum tree_code code2, tree op2a, tree op2b)
7294 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
7295 return t;
7297 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
7298 return t;
7300 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
7301 op1a, op1b, code2, op2a,
7302 op2b))
7303 return t;
7305 return NULL_TREE;
7308 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7310 Either NULL_TREE, a simplified but non-constant or a constant
7311 is returned.
7313 ??? This should go into a gimple-fold-inline.h file to be eventually
7314 privatized with the single valueize function used in the various TUs
7315 to avoid the indirect function call overhead. */
7317 tree
7318 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
7319 tree (*gvalueize) (tree))
7321 gimple_match_op res_op;
7322 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7323 edges if there are intermediate VARYING defs. For this reason
7324 do not follow SSA edges here even though SCCVN can technically
7325 just deal fine with that. */
7326 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
7328 tree res = NULL_TREE;
7329 if (gimple_simplified_result_is_gimple_val (&res_op))
7330 res = res_op.ops[0];
7331 else if (mprts_hook)
7332 res = mprts_hook (&res_op);
7333 if (res)
7335 if (dump_file && dump_flags & TDF_DETAILS)
7337 fprintf (dump_file, "Match-and-simplified ");
7338 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
7339 fprintf (dump_file, " to ");
7340 print_generic_expr (dump_file, res);
7341 fprintf (dump_file, "\n");
7343 return res;
7347 location_t loc = gimple_location (stmt);
7348 switch (gimple_code (stmt))
7350 case GIMPLE_ASSIGN:
7352 enum tree_code subcode = gimple_assign_rhs_code (stmt);
7354 switch (get_gimple_rhs_class (subcode))
7356 case GIMPLE_SINGLE_RHS:
7358 tree rhs = gimple_assign_rhs1 (stmt);
7359 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
7361 if (TREE_CODE (rhs) == SSA_NAME)
7363 /* If the RHS is an SSA_NAME, return its known constant value,
7364 if any. */
7365 return (*valueize) (rhs);
7367 /* Handle propagating invariant addresses into address
7368 operations. */
7369 else if (TREE_CODE (rhs) == ADDR_EXPR
7370 && !is_gimple_min_invariant (rhs))
7372 poly_int64 offset = 0;
7373 tree base;
7374 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
7375 &offset,
7376 valueize);
7377 if (base
7378 && (CONSTANT_CLASS_P (base)
7379 || decl_address_invariant_p (base)))
7380 return build_invariant_address (TREE_TYPE (rhs),
7381 base, offset);
7383 else if (TREE_CODE (rhs) == CONSTRUCTOR
7384 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
7385 && known_eq (CONSTRUCTOR_NELTS (rhs),
7386 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
7388 unsigned i, nelts;
7389 tree val;
7391 nelts = CONSTRUCTOR_NELTS (rhs);
7392 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
7393 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7395 val = (*valueize) (val);
7396 if (TREE_CODE (val) == INTEGER_CST
7397 || TREE_CODE (val) == REAL_CST
7398 || TREE_CODE (val) == FIXED_CST)
7399 vec.quick_push (val);
7400 else
7401 return NULL_TREE;
7404 return vec.build ();
7406 if (subcode == OBJ_TYPE_REF)
7408 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
7409 /* If callee is constant, we can fold away the wrapper. */
7410 if (is_gimple_min_invariant (val))
7411 return val;
7414 if (kind == tcc_reference)
7416 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
7417 || TREE_CODE (rhs) == REALPART_EXPR
7418 || TREE_CODE (rhs) == IMAGPART_EXPR)
7419 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7421 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7422 return fold_unary_loc (EXPR_LOCATION (rhs),
7423 TREE_CODE (rhs),
7424 TREE_TYPE (rhs), val);
7426 else if (TREE_CODE (rhs) == BIT_FIELD_REF
7427 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7429 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7430 return fold_ternary_loc (EXPR_LOCATION (rhs),
7431 TREE_CODE (rhs),
7432 TREE_TYPE (rhs), val,
7433 TREE_OPERAND (rhs, 1),
7434 TREE_OPERAND (rhs, 2));
7436 else if (TREE_CODE (rhs) == MEM_REF
7437 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7439 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7440 if (TREE_CODE (val) == ADDR_EXPR
7441 && is_gimple_min_invariant (val))
7443 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
7444 unshare_expr (val),
7445 TREE_OPERAND (rhs, 1));
7446 if (tem)
7447 rhs = tem;
7450 return fold_const_aggregate_ref_1 (rhs, valueize);
7452 else if (kind == tcc_declaration)
7453 return get_symbol_constant_value (rhs);
7454 return rhs;
7457 case GIMPLE_UNARY_RHS:
7458 return NULL_TREE;
7460 case GIMPLE_BINARY_RHS:
7461 /* Translate &x + CST into an invariant form suitable for
7462 further propagation. */
7463 if (subcode == POINTER_PLUS_EXPR)
7465 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7466 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7467 if (TREE_CODE (op0) == ADDR_EXPR
7468 && TREE_CODE (op1) == INTEGER_CST)
7470 tree off = fold_convert (ptr_type_node, op1);
7471 return build1_loc
7472 (loc, ADDR_EXPR, TREE_TYPE (op0),
7473 fold_build2 (MEM_REF,
7474 TREE_TYPE (TREE_TYPE (op0)),
7475 unshare_expr (op0), off));
7478 /* Canonicalize bool != 0 and bool == 0 appearing after
7479 valueization. While gimple_simplify handles this
7480 it can get confused by the ~X == 1 -> X == 0 transform
7481 which we cant reduce to a SSA name or a constant
7482 (and we have no way to tell gimple_simplify to not
7483 consider those transforms in the first place). */
7484 else if (subcode == EQ_EXPR
7485 || subcode == NE_EXPR)
7487 tree lhs = gimple_assign_lhs (stmt);
7488 tree op0 = gimple_assign_rhs1 (stmt);
7489 if (useless_type_conversion_p (TREE_TYPE (lhs),
7490 TREE_TYPE (op0)))
7492 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7493 op0 = (*valueize) (op0);
7494 if (TREE_CODE (op0) == INTEGER_CST)
7495 std::swap (op0, op1);
7496 if (TREE_CODE (op1) == INTEGER_CST
7497 && ((subcode == NE_EXPR && integer_zerop (op1))
7498 || (subcode == EQ_EXPR && integer_onep (op1))))
7499 return op0;
7502 return NULL_TREE;
7504 case GIMPLE_TERNARY_RHS:
7506 /* Handle ternary operators that can appear in GIMPLE form. */
7507 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7508 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7509 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
7510 return fold_ternary_loc (loc, subcode,
7511 gimple_expr_type (stmt), op0, op1, op2);
7514 default:
7515 gcc_unreachable ();
7519 case GIMPLE_CALL:
7521 tree fn;
7522 gcall *call_stmt = as_a <gcall *> (stmt);
7524 if (gimple_call_internal_p (stmt))
7526 enum tree_code subcode = ERROR_MARK;
7527 switch (gimple_call_internal_fn (stmt))
7529 case IFN_UBSAN_CHECK_ADD:
7530 subcode = PLUS_EXPR;
7531 break;
7532 case IFN_UBSAN_CHECK_SUB:
7533 subcode = MINUS_EXPR;
7534 break;
7535 case IFN_UBSAN_CHECK_MUL:
7536 subcode = MULT_EXPR;
7537 break;
7538 case IFN_BUILTIN_EXPECT:
7540 tree arg0 = gimple_call_arg (stmt, 0);
7541 tree op0 = (*valueize) (arg0);
7542 if (TREE_CODE (op0) == INTEGER_CST)
7543 return op0;
7544 return NULL_TREE;
7546 default:
7547 return NULL_TREE;
7549 tree arg0 = gimple_call_arg (stmt, 0);
7550 tree arg1 = gimple_call_arg (stmt, 1);
7551 tree op0 = (*valueize) (arg0);
7552 tree op1 = (*valueize) (arg1);
7554 if (TREE_CODE (op0) != INTEGER_CST
7555 || TREE_CODE (op1) != INTEGER_CST)
7557 switch (subcode)
7559 case MULT_EXPR:
7560 /* x * 0 = 0 * x = 0 without overflow. */
7561 if (integer_zerop (op0) || integer_zerop (op1))
7562 return build_zero_cst (TREE_TYPE (arg0));
7563 break;
7564 case MINUS_EXPR:
7565 /* y - y = 0 without overflow. */
7566 if (operand_equal_p (op0, op1, 0))
7567 return build_zero_cst (TREE_TYPE (arg0));
7568 break;
7569 default:
7570 break;
7573 tree res
7574 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
7575 if (res
7576 && TREE_CODE (res) == INTEGER_CST
7577 && !TREE_OVERFLOW (res))
7578 return res;
7579 return NULL_TREE;
7582 fn = (*valueize) (gimple_call_fn (stmt));
7583 if (TREE_CODE (fn) == ADDR_EXPR
7584 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
7585 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
7586 && gimple_builtin_call_types_compatible_p (stmt,
7587 TREE_OPERAND (fn, 0)))
7589 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
7590 tree retval;
7591 unsigned i;
7592 for (i = 0; i < gimple_call_num_args (stmt); ++i)
7593 args[i] = (*valueize) (gimple_call_arg (stmt, i));
7594 retval = fold_builtin_call_array (loc,
7595 gimple_call_return_type (call_stmt),
7596 fn, gimple_call_num_args (stmt), args);
7597 if (retval)
7599 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7600 STRIP_NOPS (retval);
7601 retval = fold_convert (gimple_call_return_type (call_stmt),
7602 retval);
7604 return retval;
7606 return NULL_TREE;
7609 default:
7610 return NULL_TREE;
7614 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7615 Returns NULL_TREE if folding to a constant is not possible, otherwise
7616 returns a constant according to is_gimple_min_invariant. */
7618 tree
7619 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
7621 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
7622 if (res && is_gimple_min_invariant (res))
7623 return res;
7624 return NULL_TREE;
7628 /* The following set of functions are supposed to fold references using
7629 their constant initializers. */
7631 /* See if we can find constructor defining value of BASE.
7632 When we know the consructor with constant offset (such as
7633 base is array[40] and we do know constructor of array), then
7634 BIT_OFFSET is adjusted accordingly.
7636 As a special case, return error_mark_node when constructor
7637 is not explicitly available, but it is known to be zero
7638 such as 'static const int a;'. */
7639 static tree
7640 get_base_constructor (tree base, poly_int64_pod *bit_offset,
7641 tree (*valueize)(tree))
7643 poly_int64 bit_offset2, size, max_size;
7644 bool reverse;
7646 if (TREE_CODE (base) == MEM_REF)
7648 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
7649 if (!boff.to_shwi (bit_offset))
7650 return NULL_TREE;
7652 if (valueize
7653 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
7654 base = valueize (TREE_OPERAND (base, 0));
7655 if (!base || TREE_CODE (base) != ADDR_EXPR)
7656 return NULL_TREE;
7657 base = TREE_OPERAND (base, 0);
7659 else if (valueize
7660 && TREE_CODE (base) == SSA_NAME)
7661 base = valueize (base);
7663 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7664 DECL_INITIAL. If BASE is a nested reference into another
7665 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7666 the inner reference. */
7667 switch (TREE_CODE (base))
7669 case VAR_DECL:
7670 case CONST_DECL:
7672 tree init = ctor_for_folding (base);
7674 /* Our semantic is exact opposite of ctor_for_folding;
7675 NULL means unknown, while error_mark_node is 0. */
7676 if (init == error_mark_node)
7677 return NULL_TREE;
7678 if (!init)
7679 return error_mark_node;
7680 return init;
7683 case VIEW_CONVERT_EXPR:
7684 return get_base_constructor (TREE_OPERAND (base, 0),
7685 bit_offset, valueize);
7687 case ARRAY_REF:
7688 case COMPONENT_REF:
7689 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
7690 &reverse);
7691 if (!known_size_p (max_size) || maybe_ne (size, max_size))
7692 return NULL_TREE;
7693 *bit_offset += bit_offset2;
7694 return get_base_constructor (base, bit_offset, valueize);
7696 case CONSTRUCTOR:
7697 return base;
7699 default:
7700 if (CONSTANT_CLASS_P (base))
7701 return base;
7703 return NULL_TREE;
7707 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7708 to the memory at bit OFFSET. When non-null, TYPE is the expected
7709 type of the reference; otherwise the type of the referenced element
7710 is used instead. When SIZE is zero, attempt to fold a reference to
7711 the entire element which OFFSET refers to. Increment *SUBOFF by
7712 the bit offset of the accessed element. */
7714 static tree
7715 fold_array_ctor_reference (tree type, tree ctor,
7716 unsigned HOST_WIDE_INT offset,
7717 unsigned HOST_WIDE_INT size,
7718 tree from_decl,
7719 unsigned HOST_WIDE_INT *suboff)
7721 offset_int low_bound;
7722 offset_int elt_size;
7723 offset_int access_index;
7724 tree domain_type = NULL_TREE;
7725 HOST_WIDE_INT inner_offset;
7727 /* Compute low bound and elt size. */
7728 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
7729 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
7730 if (domain_type && TYPE_MIN_VALUE (domain_type))
7732 /* Static constructors for variably sized objects make no sense. */
7733 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
7734 return NULL_TREE;
7735 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
7737 else
7738 low_bound = 0;
7739 /* Static constructors for variably sized objects make no sense. */
7740 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
7741 return NULL_TREE;
7742 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
7744 /* When TYPE is non-null, verify that it specifies a constant-sized
7745 access of a multiple of the array element size. Avoid division
7746 by zero below when ELT_SIZE is zero, such as with the result of
7747 an initializer for a zero-length array or an empty struct. */
7748 if (elt_size == 0
7749 || (type
7750 && (!TYPE_SIZE_UNIT (type)
7751 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
7752 return NULL_TREE;
7754 /* Compute the array index we look for. */
7755 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
7756 elt_size);
7757 access_index += low_bound;
7759 /* And offset within the access. */
7760 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
7762 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
7763 if (size > elt_sz * BITS_PER_UNIT)
7765 /* native_encode_expr constraints. */
7766 if (size > MAX_BITSIZE_MODE_ANY_MODE
7767 || size % BITS_PER_UNIT != 0
7768 || inner_offset % BITS_PER_UNIT != 0
7769 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
7770 return NULL_TREE;
7772 unsigned ctor_idx;
7773 tree val = get_array_ctor_element_at_index (ctor, access_index,
7774 &ctor_idx);
7775 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7776 return build_zero_cst (type);
7778 /* native-encode adjacent ctor elements. */
7779 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7780 unsigned bufoff = 0;
7781 offset_int index = 0;
7782 offset_int max_index = access_index;
7783 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7784 if (!val)
7785 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7786 else if (!CONSTANT_CLASS_P (val))
7787 return NULL_TREE;
7788 if (!elt->index)
7790 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7792 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7793 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7795 else
7796 index = max_index = wi::to_offset (elt->index);
7797 index = wi::umax (index, access_index);
7800 if (bufoff + elt_sz > sizeof (buf))
7801 elt_sz = sizeof (buf) - bufoff;
7802 int len = native_encode_expr (val, buf + bufoff, elt_sz,
7803 inner_offset / BITS_PER_UNIT);
7804 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
7805 return NULL_TREE;
7806 inner_offset = 0;
7807 bufoff += len;
7809 access_index += 1;
7810 if (wi::cmpu (access_index, index) == 0)
7811 val = elt->value;
7812 else if (wi::cmpu (access_index, max_index) > 0)
7814 ctor_idx++;
7815 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7817 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7818 ++max_index;
7820 else
7822 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7823 index = 0;
7824 max_index = access_index;
7825 if (!elt->index)
7827 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7829 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7830 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7832 else
7833 index = max_index = wi::to_offset (elt->index);
7834 index = wi::umax (index, access_index);
7835 if (wi::cmpu (access_index, index) == 0)
7836 val = elt->value;
7837 else
7838 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7842 while (bufoff < size / BITS_PER_UNIT);
7843 *suboff += size;
7844 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
7847 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
7849 if (!size && TREE_CODE (val) != CONSTRUCTOR)
7851 /* For the final reference to the entire accessed element
7852 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7853 may be null) in favor of the type of the element, and set
7854 SIZE to the size of the accessed element. */
7855 inner_offset = 0;
7856 type = TREE_TYPE (val);
7857 size = elt_sz * BITS_PER_UNIT;
7859 else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
7860 && TREE_CODE (val) == CONSTRUCTOR
7861 && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
7862 /* If this isn't the last element in the CTOR and a CTOR itself
7863 and it does not cover the whole object we are requesting give up
7864 since we're not set up for combining from multiple CTORs. */
7865 return NULL_TREE;
7867 *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
7868 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
7869 suboff);
7872 /* Memory not explicitly mentioned in constructor is 0 (or
7873 the reference is out of range). */
7874 return type ? build_zero_cst (type) : NULL_TREE;
7877 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7878 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7879 is the expected type of the reference; otherwise the type of
7880 the referenced member is used instead. When SIZE is zero,
7881 attempt to fold a reference to the entire member which OFFSET
7882 refers to; in this case. Increment *SUBOFF by the bit offset
7883 of the accessed member. */
7885 static tree
7886 fold_nonarray_ctor_reference (tree type, tree ctor,
7887 unsigned HOST_WIDE_INT offset,
7888 unsigned HOST_WIDE_INT size,
7889 tree from_decl,
7890 unsigned HOST_WIDE_INT *suboff)
7892 unsigned HOST_WIDE_INT cnt;
7893 tree cfield, cval;
7895 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
7896 cval)
7898 tree byte_offset = DECL_FIELD_OFFSET (cfield);
7899 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
7900 tree field_size = DECL_SIZE (cfield);
7902 if (!field_size)
7904 /* Determine the size of the flexible array member from
7905 the size of the initializer provided for it. */
7906 field_size = TYPE_SIZE (TREE_TYPE (cval));
7909 /* Variable sized objects in static constructors makes no sense,
7910 but field_size can be NULL for flexible array members. */
7911 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
7912 && TREE_CODE (byte_offset) == INTEGER_CST
7913 && (field_size != NULL_TREE
7914 ? TREE_CODE (field_size) == INTEGER_CST
7915 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
7917 /* Compute bit offset of the field. */
7918 offset_int bitoffset
7919 = (wi::to_offset (field_offset)
7920 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
7921 /* Compute bit offset where the field ends. */
7922 offset_int bitoffset_end;
7923 if (field_size != NULL_TREE)
7924 bitoffset_end = bitoffset + wi::to_offset (field_size);
7925 else
7926 bitoffset_end = 0;
7928 /* Compute the bit offset of the end of the desired access.
7929 As a special case, if the size of the desired access is
7930 zero, assume the access is to the entire field (and let
7931 the caller make any necessary adjustments by storing
7932 the actual bounds of the field in FIELDBOUNDS). */
7933 offset_int access_end = offset_int (offset);
7934 if (size)
7935 access_end += size;
7936 else
7937 access_end = bitoffset_end;
7939 /* Is there any overlap between the desired access at
7940 [OFFSET, OFFSET+SIZE) and the offset of the field within
7941 the object at [BITOFFSET, BITOFFSET_END)? */
7942 if (wi::cmps (access_end, bitoffset) > 0
7943 && (field_size == NULL_TREE
7944 || wi::lts_p (offset, bitoffset_end)))
7946 *suboff += bitoffset.to_uhwi ();
7948 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
7950 /* For the final reference to the entire accessed member
7951 (SIZE is zero), reset OFFSET, disegard TYPE (which may
7952 be null) in favor of the type of the member, and set
7953 SIZE to the size of the accessed member. */
7954 offset = bitoffset.to_uhwi ();
7955 type = TREE_TYPE (cval);
7956 size = (bitoffset_end - bitoffset).to_uhwi ();
7959 /* We do have overlap. Now see if the field is large enough
7960 to cover the access. Give up for accesses that extend
7961 beyond the end of the object or that span multiple fields. */
7962 if (wi::cmps (access_end, bitoffset_end) > 0)
7963 return NULL_TREE;
7964 if (offset < bitoffset)
7965 return NULL_TREE;
7967 offset_int inner_offset = offset_int (offset) - bitoffset;
7968 return fold_ctor_reference (type, cval,
7969 inner_offset.to_uhwi (), size,
7970 from_decl, suboff);
7974 if (!type)
7975 return NULL_TREE;
7977 return build_zero_cst (type);
7980 /* CTOR is value initializing memory. Fold a reference of TYPE and
7981 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
7982 is zero, attempt to fold a reference to the entire subobject
7983 which OFFSET refers to. This is used when folding accesses to
7984 string members of aggregates. When non-null, set *SUBOFF to
7985 the bit offset of the accessed subobject. */
7987 tree
7988 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
7989 const poly_uint64 &poly_size, tree from_decl,
7990 unsigned HOST_WIDE_INT *suboff /* = NULL */)
7992 tree ret;
7994 /* We found the field with exact match. */
7995 if (type
7996 && useless_type_conversion_p (type, TREE_TYPE (ctor))
7997 && known_eq (poly_offset, 0U))
7998 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
8000 /* The remaining optimizations need a constant size and offset. */
8001 unsigned HOST_WIDE_INT size, offset;
8002 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
8003 return NULL_TREE;
8005 /* We are at the end of walk, see if we can view convert the
8006 result. */
8007 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
8008 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
8009 && !compare_tree_int (TYPE_SIZE (type), size)
8010 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
8012 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
8013 if (ret)
8015 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
8016 if (ret)
8017 STRIP_USELESS_TYPE_CONVERSION (ret);
8019 return ret;
8021 /* For constants and byte-aligned/sized reads try to go through
8022 native_encode/interpret. */
8023 if (CONSTANT_CLASS_P (ctor)
8024 && BITS_PER_UNIT == 8
8025 && offset % BITS_PER_UNIT == 0
8026 && offset / BITS_PER_UNIT <= INT_MAX
8027 && size % BITS_PER_UNIT == 0
8028 && size <= MAX_BITSIZE_MODE_ANY_MODE
8029 && can_native_interpret_type_p (type))
8031 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8032 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
8033 offset / BITS_PER_UNIT);
8034 if (len > 0)
8035 return native_interpret_expr (type, buf, len);
8037 if (TREE_CODE (ctor) == CONSTRUCTOR)
8039 unsigned HOST_WIDE_INT dummy = 0;
8040 if (!suboff)
8041 suboff = &dummy;
8043 tree ret;
8044 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
8045 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
8046 ret = fold_array_ctor_reference (type, ctor, offset, size,
8047 from_decl, suboff);
8048 else
8049 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
8050 from_decl, suboff);
8052 /* Fall back to native_encode_initializer. Needs to be done
8053 only in the outermost fold_ctor_reference call (because it itself
8054 recurses into CONSTRUCTORs) and doesn't update suboff. */
8055 if (ret == NULL_TREE
8056 && suboff == &dummy
8057 && BITS_PER_UNIT == 8
8058 && offset % BITS_PER_UNIT == 0
8059 && offset / BITS_PER_UNIT <= INT_MAX
8060 && size % BITS_PER_UNIT == 0
8061 && size <= MAX_BITSIZE_MODE_ANY_MODE
8062 && can_native_interpret_type_p (type))
8064 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8065 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
8066 offset / BITS_PER_UNIT);
8067 if (len > 0)
8068 return native_interpret_expr (type, buf, len);
8071 return ret;
8074 return NULL_TREE;
8077 /* Return the tree representing the element referenced by T if T is an
8078 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8079 names using VALUEIZE. Return NULL_TREE otherwise. */
8081 tree
8082 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
8084 tree ctor, idx, base;
8085 poly_int64 offset, size, max_size;
8086 tree tem;
8087 bool reverse;
8089 if (TREE_THIS_VOLATILE (t))
8090 return NULL_TREE;
8092 if (DECL_P (t))
8093 return get_symbol_constant_value (t);
8095 tem = fold_read_from_constant_string (t);
8096 if (tem)
8097 return tem;
8099 switch (TREE_CODE (t))
8101 case ARRAY_REF:
8102 case ARRAY_RANGE_REF:
8103 /* Constant indexes are handled well by get_base_constructor.
8104 Only special case variable offsets.
8105 FIXME: This code can't handle nested references with variable indexes
8106 (they will be handled only by iteration of ccp). Perhaps we can bring
8107 get_ref_base_and_extent here and make it use a valueize callback. */
8108 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
8109 && valueize
8110 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
8111 && poly_int_tree_p (idx))
8113 tree low_bound, unit_size;
8115 /* If the resulting bit-offset is constant, track it. */
8116 if ((low_bound = array_ref_low_bound (t),
8117 poly_int_tree_p (low_bound))
8118 && (unit_size = array_ref_element_size (t),
8119 tree_fits_uhwi_p (unit_size)))
8121 poly_offset_int woffset
8122 = wi::sext (wi::to_poly_offset (idx)
8123 - wi::to_poly_offset (low_bound),
8124 TYPE_PRECISION (sizetype));
8125 woffset *= tree_to_uhwi (unit_size);
8126 woffset *= BITS_PER_UNIT;
8127 if (woffset.to_shwi (&offset))
8129 base = TREE_OPERAND (t, 0);
8130 ctor = get_base_constructor (base, &offset, valueize);
8131 /* Empty constructor. Always fold to 0. */
8132 if (ctor == error_mark_node)
8133 return build_zero_cst (TREE_TYPE (t));
8134 /* Out of bound array access. Value is undefined,
8135 but don't fold. */
8136 if (maybe_lt (offset, 0))
8137 return NULL_TREE;
8138 /* We cannot determine ctor. */
8139 if (!ctor)
8140 return NULL_TREE;
8141 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
8142 tree_to_uhwi (unit_size)
8143 * BITS_PER_UNIT,
8144 base);
8148 /* Fallthru. */
8150 case COMPONENT_REF:
8151 case BIT_FIELD_REF:
8152 case TARGET_MEM_REF:
8153 case MEM_REF:
8154 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
8155 ctor = get_base_constructor (base, &offset, valueize);
8157 /* Empty constructor. Always fold to 0. */
8158 if (ctor == error_mark_node)
8159 return build_zero_cst (TREE_TYPE (t));
8160 /* We do not know precise address. */
8161 if (!known_size_p (max_size) || maybe_ne (max_size, size))
8162 return NULL_TREE;
8163 /* We cannot determine ctor. */
8164 if (!ctor)
8165 return NULL_TREE;
8167 /* Out of bound array access. Value is undefined, but don't fold. */
8168 if (maybe_lt (offset, 0))
8169 return NULL_TREE;
8171 tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
8172 if (tem)
8173 return tem;
8175 /* For bit field reads try to read the representative and
8176 adjust. */
8177 if (TREE_CODE (t) == COMPONENT_REF
8178 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
8179 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
8181 HOST_WIDE_INT csize, coffset;
8182 tree field = TREE_OPERAND (t, 1);
8183 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8184 if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
8185 && size.is_constant (&csize)
8186 && offset.is_constant (&coffset)
8187 && (coffset % BITS_PER_UNIT != 0
8188 || csize % BITS_PER_UNIT != 0)
8189 && !reverse
8190 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
8192 poly_int64 bitoffset;
8193 poly_uint64 field_offset, repr_offset;
8194 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8195 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
8196 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
8197 else
8198 bitoffset = 0;
8199 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8200 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
8201 HOST_WIDE_INT bitoff;
8202 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8203 - TYPE_PRECISION (TREE_TYPE (field)));
8204 if (bitoffset.is_constant (&bitoff)
8205 && bitoff >= 0
8206 && bitoff <= diff)
8208 offset -= bitoff;
8209 size = tree_to_uhwi (DECL_SIZE (repr));
8211 tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
8212 size, base);
8213 if (tem && TREE_CODE (tem) == INTEGER_CST)
8215 if (!BYTES_BIG_ENDIAN)
8216 tem = wide_int_to_tree (TREE_TYPE (field),
8217 wi::lrshift (wi::to_wide (tem),
8218 bitoff));
8219 else
8220 tem = wide_int_to_tree (TREE_TYPE (field),
8221 wi::lrshift (wi::to_wide (tem),
8222 diff - bitoff));
8223 return tem;
8228 break;
8230 case REALPART_EXPR:
8231 case IMAGPART_EXPR:
8233 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
8234 if (c && TREE_CODE (c) == COMPLEX_CST)
8235 return fold_build1_loc (EXPR_LOCATION (t),
8236 TREE_CODE (t), TREE_TYPE (t), c);
8237 break;
8240 default:
8241 break;
8244 return NULL_TREE;
8247 tree
8248 fold_const_aggregate_ref (tree t)
8250 return fold_const_aggregate_ref_1 (t, NULL);
8253 /* Lookup virtual method with index TOKEN in a virtual table V
8254 at OFFSET.
8255 Set CAN_REFER if non-NULL to false if method
8256 is not referable or if the virtual table is ill-formed (such as rewriten
8257 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8259 tree
8260 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
8261 tree v,
8262 unsigned HOST_WIDE_INT offset,
8263 bool *can_refer)
8265 tree vtable = v, init, fn;
8266 unsigned HOST_WIDE_INT size;
8267 unsigned HOST_WIDE_INT elt_size, access_index;
8268 tree domain_type;
8270 if (can_refer)
8271 *can_refer = true;
8273 /* First of all double check we have virtual table. */
8274 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
8276 /* Pass down that we lost track of the target. */
8277 if (can_refer)
8278 *can_refer = false;
8279 return NULL_TREE;
8282 init = ctor_for_folding (v);
8284 /* The virtual tables should always be born with constructors
8285 and we always should assume that they are avaialble for
8286 folding. At the moment we do not stream them in all cases,
8287 but it should never happen that ctor seem unreachable. */
8288 gcc_assert (init);
8289 if (init == error_mark_node)
8291 /* Pass down that we lost track of the target. */
8292 if (can_refer)
8293 *can_refer = false;
8294 return NULL_TREE;
8296 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
8297 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
8298 offset *= BITS_PER_UNIT;
8299 offset += token * size;
8301 /* Lookup the value in the constructor that is assumed to be array.
8302 This is equivalent to
8303 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8304 offset, size, NULL);
8305 but in a constant time. We expect that frontend produced a simple
8306 array without indexed initializers. */
8308 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
8309 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
8310 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
8311 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
8313 access_index = offset / BITS_PER_UNIT / elt_size;
8314 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
8316 /* The C++ FE can now produce indexed fields, and we check if the indexes
8317 match. */
8318 if (access_index < CONSTRUCTOR_NELTS (init))
8320 fn = CONSTRUCTOR_ELT (init, access_index)->value;
8321 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
8322 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8323 STRIP_NOPS (fn);
8325 else
8326 fn = NULL;
8328 /* For type inconsistent program we may end up looking up virtual method
8329 in virtual table that does not contain TOKEN entries. We may overrun
8330 the virtual table and pick up a constant or RTTI info pointer.
8331 In any case the call is undefined. */
8332 if (!fn
8333 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
8334 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
8335 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
8336 else
8338 fn = TREE_OPERAND (fn, 0);
8340 /* When cgraph node is missing and function is not public, we cannot
8341 devirtualize. This can happen in WHOPR when the actual method
8342 ends up in other partition, because we found devirtualization
8343 possibility too late. */
8344 if (!can_refer_decl_in_current_unit_p (fn, vtable))
8346 if (can_refer)
8348 *can_refer = false;
8349 return fn;
8351 return NULL_TREE;
8355 /* Make sure we create a cgraph node for functions we'll reference.
8356 They can be non-existent if the reference comes from an entry
8357 of an external vtable for example. */
8358 cgraph_node::get_create (fn);
8360 return fn;
8363 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8364 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8365 KNOWN_BINFO carries the binfo describing the true type of
8366 OBJ_TYPE_REF_OBJECT(REF).
8367 Set CAN_REFER if non-NULL to false if method
8368 is not referable or if the virtual table is ill-formed (such as rewriten
8369 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8371 tree
8372 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
8373 bool *can_refer)
8375 unsigned HOST_WIDE_INT offset;
8376 tree v;
8378 v = BINFO_VTABLE (known_binfo);
8379 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8380 if (!v)
8381 return NULL_TREE;
8383 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
8385 if (can_refer)
8386 *can_refer = false;
8387 return NULL_TREE;
8389 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
8392 /* Given a pointer value T, return a simplified version of an
8393 indirection through T, or NULL_TREE if no simplification is
8394 possible. Note that the resulting type may be different from
8395 the type pointed to in the sense that it is still compatible
8396 from the langhooks point of view. */
8398 tree
8399 gimple_fold_indirect_ref (tree t)
8401 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
8402 tree sub = t;
8403 tree subtype;
8405 STRIP_NOPS (sub);
8406 subtype = TREE_TYPE (sub);
8407 if (!POINTER_TYPE_P (subtype)
8408 || TYPE_REF_CAN_ALIAS_ALL (ptype))
8409 return NULL_TREE;
8411 if (TREE_CODE (sub) == ADDR_EXPR)
8413 tree op = TREE_OPERAND (sub, 0);
8414 tree optype = TREE_TYPE (op);
8415 /* *&p => p */
8416 if (useless_type_conversion_p (type, optype))
8417 return op;
8419 /* *(foo *)&fooarray => fooarray[0] */
8420 if (TREE_CODE (optype) == ARRAY_TYPE
8421 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
8422 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8424 tree type_domain = TYPE_DOMAIN (optype);
8425 tree min_val = size_zero_node;
8426 if (type_domain && TYPE_MIN_VALUE (type_domain))
8427 min_val = TYPE_MIN_VALUE (type_domain);
8428 if (TREE_CODE (min_val) == INTEGER_CST)
8429 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
8431 /* *(foo *)&complexfoo => __real__ complexfoo */
8432 else if (TREE_CODE (optype) == COMPLEX_TYPE
8433 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8434 return fold_build1 (REALPART_EXPR, type, op);
8435 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8436 else if (TREE_CODE (optype) == VECTOR_TYPE
8437 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8439 tree part_width = TYPE_SIZE (type);
8440 tree index = bitsize_int (0);
8441 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
8445 /* *(p + CST) -> ... */
8446 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
8447 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
8449 tree addr = TREE_OPERAND (sub, 0);
8450 tree off = TREE_OPERAND (sub, 1);
8451 tree addrtype;
8453 STRIP_NOPS (addr);
8454 addrtype = TREE_TYPE (addr);
8456 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8457 if (TREE_CODE (addr) == ADDR_EXPR
8458 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
8459 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
8460 && tree_fits_uhwi_p (off))
8462 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
8463 tree part_width = TYPE_SIZE (type);
8464 unsigned HOST_WIDE_INT part_widthi
8465 = tree_to_shwi (part_width) / BITS_PER_UNIT;
8466 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
8467 tree index = bitsize_int (indexi);
8468 if (known_lt (offset / part_widthi,
8469 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
8470 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
8471 part_width, index);
8474 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8475 if (TREE_CODE (addr) == ADDR_EXPR
8476 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
8477 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
8479 tree size = TYPE_SIZE_UNIT (type);
8480 if (tree_int_cst_equal (size, off))
8481 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
8484 /* *(p + CST) -> MEM_REF <p, CST>. */
8485 if (TREE_CODE (addr) != ADDR_EXPR
8486 || DECL_P (TREE_OPERAND (addr, 0)))
8487 return fold_build2 (MEM_REF, type,
8488 addr,
8489 wide_int_to_tree (ptype, wi::to_wide (off)));
8492 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8493 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
8494 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
8495 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
8497 tree type_domain;
8498 tree min_val = size_zero_node;
8499 tree osub = sub;
8500 sub = gimple_fold_indirect_ref (sub);
8501 if (! sub)
8502 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
8503 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
8504 if (type_domain && TYPE_MIN_VALUE (type_domain))
8505 min_val = TYPE_MIN_VALUE (type_domain);
8506 if (TREE_CODE (min_val) == INTEGER_CST)
8507 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
8510 return NULL_TREE;
8513 /* Return true if CODE is an operation that when operating on signed
8514 integer types involves undefined behavior on overflow and the
8515 operation can be expressed with unsigned arithmetic. */
8517 bool
8518 arith_code_with_undefined_signed_overflow (tree_code code)
8520 switch (code)
8522 case ABS_EXPR:
8523 case PLUS_EXPR:
8524 case MINUS_EXPR:
8525 case MULT_EXPR:
8526 case NEGATE_EXPR:
8527 case POINTER_PLUS_EXPR:
8528 return true;
8529 default:
8530 return false;
8534 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8535 operation that can be transformed to unsigned arithmetic by converting
8536 its operand, carrying out the operation in the corresponding unsigned
8537 type and converting the result back to the original type.
8539 Returns a sequence of statements that replace STMT and also contain
8540 a modified form of STMT itself. */
8542 gimple_seq
8543 rewrite_to_defined_overflow (gimple *stmt)
8545 if (dump_file && (dump_flags & TDF_DETAILS))
8547 fprintf (dump_file, "rewriting stmt with undefined signed "
8548 "overflow ");
8549 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
8552 tree lhs = gimple_assign_lhs (stmt);
8553 tree type = unsigned_type_for (TREE_TYPE (lhs));
8554 gimple_seq stmts = NULL;
8555 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
8556 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
8557 else
8558 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
8560 tree op = gimple_op (stmt, i);
8561 op = gimple_convert (&stmts, type, op);
8562 gimple_set_op (stmt, i, op);
8564 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
8565 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
8566 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
8567 gimple_set_modified (stmt, true);
8568 gimple_seq_add_stmt (&stmts, stmt);
8569 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
8570 gimple_seq_add_stmt (&stmts, cvt);
8572 return stmts;
8576 /* The valueization hook we use for the gimple_build API simplification.
8577 This makes us match fold_buildN behavior by only combining with
8578 statements in the sequence(s) we are currently building. */
8580 static tree
8581 gimple_build_valueize (tree op)
8583 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
8584 return op;
8585 return NULL_TREE;
8588 /* Build the expression CODE OP0 of type TYPE with location LOC,
8589 simplifying it first if possible. Returns the built
8590 expression value and appends statements possibly defining it
8591 to SEQ. */
8593 tree
8594 gimple_build (gimple_seq *seq, location_t loc,
8595 enum tree_code code, tree type, tree op0)
8597 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
8598 if (!res)
8600 res = create_tmp_reg_or_ssa_name (type);
8601 gimple *stmt;
8602 if (code == REALPART_EXPR
8603 || code == IMAGPART_EXPR
8604 || code == VIEW_CONVERT_EXPR)
8605 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
8606 else
8607 stmt = gimple_build_assign (res, code, op0);
8608 gimple_set_location (stmt, loc);
8609 gimple_seq_add_stmt_without_update (seq, stmt);
8611 return res;
8614 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8615 simplifying it first if possible. Returns the built
8616 expression value and appends statements possibly defining it
8617 to SEQ. */
8619 tree
8620 gimple_build (gimple_seq *seq, location_t loc,
8621 enum tree_code code, tree type, tree op0, tree op1)
8623 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
8624 if (!res)
8626 res = create_tmp_reg_or_ssa_name (type);
8627 gimple *stmt = gimple_build_assign (res, code, op0, op1);
8628 gimple_set_location (stmt, loc);
8629 gimple_seq_add_stmt_without_update (seq, stmt);
8631 return res;
8634 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8635 simplifying it first if possible. Returns the built
8636 expression value and appends statements possibly defining it
8637 to SEQ. */
8639 tree
8640 gimple_build (gimple_seq *seq, location_t loc,
8641 enum tree_code code, tree type, tree op0, tree op1, tree op2)
8643 tree res = gimple_simplify (code, type, op0, op1, op2,
8644 seq, gimple_build_valueize);
8645 if (!res)
8647 res = create_tmp_reg_or_ssa_name (type);
8648 gimple *stmt;
8649 if (code == BIT_FIELD_REF)
8650 stmt = gimple_build_assign (res, code,
8651 build3 (code, type, op0, op1, op2));
8652 else
8653 stmt = gimple_build_assign (res, code, op0, op1, op2);
8654 gimple_set_location (stmt, loc);
8655 gimple_seq_add_stmt_without_update (seq, stmt);
8657 return res;
8660 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8661 void) with a location LOC. Returns the built expression value (or NULL_TREE
8662 if TYPE is void) and appends statements possibly defining it to SEQ. */
8664 tree
8665 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn, tree type)
8667 tree res = NULL_TREE;
8668 gcall *stmt;
8669 if (internal_fn_p (fn))
8670 stmt = gimple_build_call_internal (as_internal_fn (fn), 0);
8671 else
8673 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8674 stmt = gimple_build_call (decl, 0);
8676 if (!VOID_TYPE_P (type))
8678 res = create_tmp_reg_or_ssa_name (type);
8679 gimple_call_set_lhs (stmt, res);
8681 gimple_set_location (stmt, loc);
8682 gimple_seq_add_stmt_without_update (seq, stmt);
8683 return res;
8686 /* Build the call FN (ARG0) with a result of type TYPE
8687 (or no result if TYPE is void) with location LOC,
8688 simplifying it first if possible. Returns the built
8689 expression value (or NULL_TREE if TYPE is void) and appends
8690 statements possibly defining it to SEQ. */
8692 tree
8693 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8694 tree type, tree arg0)
8696 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
8697 if (!res)
8699 gcall *stmt;
8700 if (internal_fn_p (fn))
8701 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
8702 else
8704 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8705 stmt = gimple_build_call (decl, 1, arg0);
8707 if (!VOID_TYPE_P (type))
8709 res = create_tmp_reg_or_ssa_name (type);
8710 gimple_call_set_lhs (stmt, res);
8712 gimple_set_location (stmt, loc);
8713 gimple_seq_add_stmt_without_update (seq, stmt);
8715 return res;
8718 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
8719 (or no result if TYPE is void) with location LOC,
8720 simplifying it first if possible. Returns the built
8721 expression value (or NULL_TREE if TYPE is void) and appends
8722 statements possibly defining it to SEQ. */
8724 tree
8725 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8726 tree type, tree arg0, tree arg1)
8728 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
8729 if (!res)
8731 gcall *stmt;
8732 if (internal_fn_p (fn))
8733 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
8734 else
8736 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8737 stmt = gimple_build_call (decl, 2, arg0, arg1);
8739 if (!VOID_TYPE_P (type))
8741 res = create_tmp_reg_or_ssa_name (type);
8742 gimple_call_set_lhs (stmt, res);
8744 gimple_set_location (stmt, loc);
8745 gimple_seq_add_stmt_without_update (seq, stmt);
8747 return res;
8750 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8751 (or no result if TYPE is void) with location LOC,
8752 simplifying it first if possible. Returns the built
8753 expression value (or NULL_TREE if TYPE is void) and appends
8754 statements possibly defining it to SEQ. */
8756 tree
8757 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8758 tree type, tree arg0, tree arg1, tree arg2)
8760 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
8761 seq, gimple_build_valueize);
8762 if (!res)
8764 gcall *stmt;
8765 if (internal_fn_p (fn))
8766 stmt = gimple_build_call_internal (as_internal_fn (fn),
8767 3, arg0, arg1, arg2);
8768 else
8770 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8771 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
8773 if (!VOID_TYPE_P (type))
8775 res = create_tmp_reg_or_ssa_name (type);
8776 gimple_call_set_lhs (stmt, res);
8778 gimple_set_location (stmt, loc);
8779 gimple_seq_add_stmt_without_update (seq, stmt);
8781 return res;
8784 /* Build the conversion (TYPE) OP with a result of type TYPE
8785 with location LOC if such conversion is neccesary in GIMPLE,
8786 simplifying it first.
8787 Returns the built expression value and appends
8788 statements possibly defining it to SEQ. */
8790 tree
8791 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
8793 if (useless_type_conversion_p (type, TREE_TYPE (op)))
8794 return op;
8795 return gimple_build (seq, loc, NOP_EXPR, type, op);
8798 /* Build the conversion (ptrofftype) OP with a result of a type
8799 compatible with ptrofftype with location LOC if such conversion
8800 is neccesary in GIMPLE, simplifying it first.
8801 Returns the built expression value and appends
8802 statements possibly defining it to SEQ. */
8804 tree
8805 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
8807 if (ptrofftype_p (TREE_TYPE (op)))
8808 return op;
8809 return gimple_convert (seq, loc, sizetype, op);
8812 /* Build a vector of type TYPE in which each element has the value OP.
8813 Return a gimple value for the result, appending any new statements
8814 to SEQ. */
8816 tree
8817 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
8818 tree op)
8820 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
8821 && !CONSTANT_CLASS_P (op))
8822 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
8824 tree res, vec = build_vector_from_val (type, op);
8825 if (is_gimple_val (vec))
8826 return vec;
8827 if (gimple_in_ssa_p (cfun))
8828 res = make_ssa_name (type);
8829 else
8830 res = create_tmp_reg (type);
8831 gimple *stmt = gimple_build_assign (res, vec);
8832 gimple_set_location (stmt, loc);
8833 gimple_seq_add_stmt_without_update (seq, stmt);
8834 return res;
8837 /* Build a vector from BUILDER, handling the case in which some elements
8838 are non-constant. Return a gimple value for the result, appending any
8839 new instructions to SEQ.
8841 BUILDER must not have a stepped encoding on entry. This is because
8842 the function is not geared up to handle the arithmetic that would
8843 be needed in the variable case, and any code building a vector that
8844 is known to be constant should use BUILDER->build () directly. */
8846 tree
8847 gimple_build_vector (gimple_seq *seq, location_t loc,
8848 tree_vector_builder *builder)
8850 gcc_assert (builder->nelts_per_pattern () <= 2);
8851 unsigned int encoded_nelts = builder->encoded_nelts ();
8852 for (unsigned int i = 0; i < encoded_nelts; ++i)
8853 if (!CONSTANT_CLASS_P ((*builder)[i]))
8855 tree type = builder->type ();
8856 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
8857 vec<constructor_elt, va_gc> *v;
8858 vec_alloc (v, nelts);
8859 for (i = 0; i < nelts; ++i)
8860 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
8862 tree res;
8863 if (gimple_in_ssa_p (cfun))
8864 res = make_ssa_name (type);
8865 else
8866 res = create_tmp_reg (type);
8867 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
8868 gimple_set_location (stmt, loc);
8869 gimple_seq_add_stmt_without_update (seq, stmt);
8870 return res;
8872 return builder->build ();
8875 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
8876 and generate a value guaranteed to be rounded upwards to ALIGN.
8878 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
8880 tree
8881 gimple_build_round_up (gimple_seq *seq, location_t loc, tree type,
8882 tree old_size, unsigned HOST_WIDE_INT align)
8884 unsigned HOST_WIDE_INT tg_mask = align - 1;
8885 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
8886 gcc_assert (INTEGRAL_TYPE_P (type));
8887 tree tree_mask = build_int_cst (type, tg_mask);
8888 tree oversize = gimple_build (seq, loc, PLUS_EXPR, type, old_size,
8889 tree_mask);
8891 tree mask = build_int_cst (type, -align);
8892 return gimple_build (seq, loc, BIT_AND_EXPR, type, oversize, mask);
8895 /* Return true if the result of assignment STMT is known to be non-negative.
8896 If the return value is based on the assumption that signed overflow is
8897 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8898 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8900 static bool
8901 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8902 int depth)
8904 enum tree_code code = gimple_assign_rhs_code (stmt);
8905 switch (get_gimple_rhs_class (code))
8907 case GIMPLE_UNARY_RHS:
8908 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
8909 gimple_expr_type (stmt),
8910 gimple_assign_rhs1 (stmt),
8911 strict_overflow_p, depth);
8912 case GIMPLE_BINARY_RHS:
8913 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
8914 gimple_expr_type (stmt),
8915 gimple_assign_rhs1 (stmt),
8916 gimple_assign_rhs2 (stmt),
8917 strict_overflow_p, depth);
8918 case GIMPLE_TERNARY_RHS:
8919 return false;
8920 case GIMPLE_SINGLE_RHS:
8921 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
8922 strict_overflow_p, depth);
8923 case GIMPLE_INVALID_RHS:
8924 break;
8926 gcc_unreachable ();
8929 /* Return true if return value of call STMT is known to be non-negative.
8930 If the return value is based on the assumption that signed overflow is
8931 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8932 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8934 static bool
8935 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8936 int depth)
8938 tree arg0 = gimple_call_num_args (stmt) > 0 ?
8939 gimple_call_arg (stmt, 0) : NULL_TREE;
8940 tree arg1 = gimple_call_num_args (stmt) > 1 ?
8941 gimple_call_arg (stmt, 1) : NULL_TREE;
8943 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
8944 gimple_call_combined_fn (stmt),
8945 arg0,
8946 arg1,
8947 strict_overflow_p, depth);
8950 /* Return true if return value of call STMT is known to be non-negative.
8951 If the return value is based on the assumption that signed overflow is
8952 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8953 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8955 static bool
8956 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8957 int depth)
8959 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
8961 tree arg = gimple_phi_arg_def (stmt, i);
8962 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
8963 return false;
8965 return true;
8968 /* Return true if STMT is known to compute a non-negative value.
8969 If the return value is based on the assumption that signed overflow is
8970 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8971 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8973 bool
8974 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8975 int depth)
8977 switch (gimple_code (stmt))
8979 case GIMPLE_ASSIGN:
8980 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
8981 depth);
8982 case GIMPLE_CALL:
8983 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
8984 depth);
8985 case GIMPLE_PHI:
8986 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
8987 depth);
8988 default:
8989 return false;
8993 /* Return true if the floating-point value computed by assignment STMT
8994 is known to have an integer value. We also allow +Inf, -Inf and NaN
8995 to be considered integer values. Return false for signaling NaN.
8997 DEPTH is the current nesting depth of the query. */
8999 static bool
9000 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
9002 enum tree_code code = gimple_assign_rhs_code (stmt);
9003 switch (get_gimple_rhs_class (code))
9005 case GIMPLE_UNARY_RHS:
9006 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
9007 gimple_assign_rhs1 (stmt), depth);
9008 case GIMPLE_BINARY_RHS:
9009 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
9010 gimple_assign_rhs1 (stmt),
9011 gimple_assign_rhs2 (stmt), depth);
9012 case GIMPLE_TERNARY_RHS:
9013 return false;
9014 case GIMPLE_SINGLE_RHS:
9015 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
9016 case GIMPLE_INVALID_RHS:
9017 break;
9019 gcc_unreachable ();
9022 /* Return true if the floating-point value computed by call STMT is known
9023 to have an integer value. We also allow +Inf, -Inf and NaN to be
9024 considered integer values. Return false for signaling NaN.
9026 DEPTH is the current nesting depth of the query. */
9028 static bool
9029 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
9031 tree arg0 = (gimple_call_num_args (stmt) > 0
9032 ? gimple_call_arg (stmt, 0)
9033 : NULL_TREE);
9034 tree arg1 = (gimple_call_num_args (stmt) > 1
9035 ? gimple_call_arg (stmt, 1)
9036 : NULL_TREE);
9037 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
9038 arg0, arg1, depth);
9041 /* Return true if the floating-point result of phi STMT is known to have
9042 an integer value. We also allow +Inf, -Inf and NaN to be considered
9043 integer values. Return false for signaling NaN.
9045 DEPTH is the current nesting depth of the query. */
9047 static bool
9048 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
9050 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9052 tree arg = gimple_phi_arg_def (stmt, i);
9053 if (!integer_valued_real_single_p (arg, depth + 1))
9054 return false;
9056 return true;
9059 /* Return true if the floating-point value computed by STMT is known
9060 to have an integer value. We also allow +Inf, -Inf and NaN to be
9061 considered integer values. Return false for signaling NaN.
9063 DEPTH is the current nesting depth of the query. */
9065 bool
9066 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
9068 switch (gimple_code (stmt))
9070 case GIMPLE_ASSIGN:
9071 return gimple_assign_integer_valued_real_p (stmt, depth);
9072 case GIMPLE_CALL:
9073 return gimple_call_integer_valued_real_p (stmt, depth);
9074 case GIMPLE_PHI:
9075 return gimple_phi_integer_valued_real_p (stmt, depth);
9076 default:
9077 return false;