PR middle-end/84095 - false-positive -Wrestrict warnings for memcpy within array
[official-gcc.git] / gcc / gimple-fold.c
blobc9dad6f42d13638995db9019a0861cc7501b811a
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "ipa-chkp.h"
59 #include "tree-cfg.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "asan.h"
64 #include "diagnostic-core.h"
65 #include "intl.h"
66 #include "calls.h"
67 #include "tree-vector-builder.h"
69 /* Return true when DECL can be referenced from current unit.
70 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
71 We can get declarations that are not possible to reference for various
72 reasons:
74 1) When analyzing C++ virtual tables.
75 C++ virtual tables do have known constructors even
76 when they are keyed to other compilation unit.
77 Those tables can contain pointers to methods and vars
78 in other units. Those methods have both STATIC and EXTERNAL
79 set.
80 2) In WHOPR mode devirtualization might lead to reference
81 to method that was partitioned elsehwere.
82 In this case we have static VAR_DECL or FUNCTION_DECL
83 that has no corresponding callgraph/varpool node
84 declaring the body.
85 3) COMDAT functions referred by external vtables that
86 we devirtualize only during final compilation stage.
87 At this time we already decided that we will not output
88 the function body and thus we can't reference the symbol
89 directly. */
91 static bool
92 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
94 varpool_node *vnode;
95 struct cgraph_node *node;
96 symtab_node *snode;
98 if (DECL_ABSTRACT_P (decl))
99 return false;
101 /* We are concerned only about static/external vars and functions. */
102 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
103 || !VAR_OR_FUNCTION_DECL_P (decl))
104 return true;
106 /* Static objects can be referred only if they was not optimized out yet. */
107 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
109 /* Before we start optimizing unreachable code we can be sure all
110 static objects are defined. */
111 if (symtab->function_flags_ready)
112 return true;
113 snode = symtab_node::get (decl);
114 if (!snode || !snode->definition)
115 return false;
116 node = dyn_cast <cgraph_node *> (snode);
117 return !node || !node->global.inlined_to;
120 /* We will later output the initializer, so we can refer to it.
121 So we are concerned only when DECL comes from initializer of
122 external var or var that has been optimized out. */
123 if (!from_decl
124 || !VAR_P (from_decl)
125 || (!DECL_EXTERNAL (from_decl)
126 && (vnode = varpool_node::get (from_decl)) != NULL
127 && vnode->definition)
128 || (flag_ltrans
129 && (vnode = varpool_node::get (from_decl)) != NULL
130 && vnode->in_other_partition))
131 return true;
132 /* We are folding reference from external vtable. The vtable may reffer
133 to a symbol keyed to other compilation unit. The other compilation
134 unit may be in separate DSO and the symbol may be hidden. */
135 if (DECL_VISIBILITY_SPECIFIED (decl)
136 && DECL_EXTERNAL (decl)
137 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
138 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
139 return false;
140 /* When function is public, we always can introduce new reference.
141 Exception are the COMDAT functions where introducing a direct
142 reference imply need to include function body in the curren tunit. */
143 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
144 return true;
145 /* We have COMDAT. We are going to check if we still have definition
146 or if the definition is going to be output in other partition.
147 Bypass this when gimplifying; all needed functions will be produced.
149 As observed in PR20991 for already optimized out comdat virtual functions
150 it may be tempting to not necessarily give up because the copy will be
151 output elsewhere when corresponding vtable is output.
152 This is however not possible - ABI specify that COMDATs are output in
153 units where they are used and when the other unit was compiled with LTO
154 it is possible that vtable was kept public while the function itself
155 was privatized. */
156 if (!symtab->function_flags_ready)
157 return true;
159 snode = symtab_node::get (decl);
160 if (!snode
161 || ((!snode->definition || DECL_EXTERNAL (decl))
162 && (!snode->in_other_partition
163 || (!snode->forced_by_abi && !snode->force_output))))
164 return false;
165 node = dyn_cast <cgraph_node *> (snode);
166 return !node || !node->global.inlined_to;
169 /* Create a temporary for TYPE for a statement STMT. If the current function
170 is in SSA form, a SSA name is created. Otherwise a temporary register
171 is made. */
173 tree
174 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
176 if (gimple_in_ssa_p (cfun))
177 return make_ssa_name (type, stmt);
178 else
179 return create_tmp_reg (type);
182 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
183 acceptable form for is_gimple_min_invariant.
184 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
186 tree
187 canonicalize_constructor_val (tree cval, tree from_decl)
189 tree orig_cval = cval;
190 STRIP_NOPS (cval);
191 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
192 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
194 tree ptr = TREE_OPERAND (cval, 0);
195 if (is_gimple_min_invariant (ptr))
196 cval = build1_loc (EXPR_LOCATION (cval),
197 ADDR_EXPR, TREE_TYPE (ptr),
198 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
199 ptr,
200 fold_convert (ptr_type_node,
201 TREE_OPERAND (cval, 1))));
203 if (TREE_CODE (cval) == ADDR_EXPR)
205 tree base = NULL_TREE;
206 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
208 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
209 if (base)
210 TREE_OPERAND (cval, 0) = base;
212 else
213 base = get_base_address (TREE_OPERAND (cval, 0));
214 if (!base)
215 return NULL_TREE;
217 if (VAR_OR_FUNCTION_DECL_P (base)
218 && !can_refer_decl_in_current_unit_p (base, from_decl))
219 return NULL_TREE;
220 if (TREE_TYPE (base) == error_mark_node)
221 return NULL_TREE;
222 if (VAR_P (base))
223 TREE_ADDRESSABLE (base) = 1;
224 else if (TREE_CODE (base) == FUNCTION_DECL)
226 /* Make sure we create a cgraph node for functions we'll reference.
227 They can be non-existent if the reference comes from an entry
228 of an external vtable for example. */
229 cgraph_node::get_create (base);
231 /* Fixup types in global initializers. */
232 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
233 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
235 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
236 cval = fold_convert (TREE_TYPE (orig_cval), cval);
237 return cval;
239 if (TREE_OVERFLOW_P (cval))
240 return drop_tree_overflow (cval);
241 return orig_cval;
244 /* If SYM is a constant variable with known value, return the value.
245 NULL_TREE is returned otherwise. */
247 tree
248 get_symbol_constant_value (tree sym)
250 tree val = ctor_for_folding (sym);
251 if (val != error_mark_node)
253 if (val)
255 val = canonicalize_constructor_val (unshare_expr (val), sym);
256 if (val && is_gimple_min_invariant (val))
257 return val;
258 else
259 return NULL_TREE;
261 /* Variables declared 'const' without an initializer
262 have zero as the initializer if they may not be
263 overridden at link or run time. */
264 if (!val
265 && is_gimple_reg_type (TREE_TYPE (sym)))
266 return build_zero_cst (TREE_TYPE (sym));
269 return NULL_TREE;
274 /* Subroutine of fold_stmt. We perform several simplifications of the
275 memory reference tree EXPR and make sure to re-gimplify them properly
276 after propagation of constant addresses. IS_LHS is true if the
277 reference is supposed to be an lvalue. */
279 static tree
280 maybe_fold_reference (tree expr, bool is_lhs)
282 tree result;
284 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
285 || TREE_CODE (expr) == REALPART_EXPR
286 || TREE_CODE (expr) == IMAGPART_EXPR)
287 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
288 return fold_unary_loc (EXPR_LOCATION (expr),
289 TREE_CODE (expr),
290 TREE_TYPE (expr),
291 TREE_OPERAND (expr, 0));
292 else if (TREE_CODE (expr) == BIT_FIELD_REF
293 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
294 return fold_ternary_loc (EXPR_LOCATION (expr),
295 TREE_CODE (expr),
296 TREE_TYPE (expr),
297 TREE_OPERAND (expr, 0),
298 TREE_OPERAND (expr, 1),
299 TREE_OPERAND (expr, 2));
301 if (!is_lhs
302 && (result = fold_const_aggregate_ref (expr))
303 && is_gimple_min_invariant (result))
304 return result;
306 return NULL_TREE;
310 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
311 replacement rhs for the statement or NULL_TREE if no simplification
312 could be made. It is assumed that the operands have been previously
313 folded. */
315 static tree
316 fold_gimple_assign (gimple_stmt_iterator *si)
318 gimple *stmt = gsi_stmt (*si);
319 enum tree_code subcode = gimple_assign_rhs_code (stmt);
320 location_t loc = gimple_location (stmt);
322 tree result = NULL_TREE;
324 switch (get_gimple_rhs_class (subcode))
326 case GIMPLE_SINGLE_RHS:
328 tree rhs = gimple_assign_rhs1 (stmt);
330 if (TREE_CLOBBER_P (rhs))
331 return NULL_TREE;
333 if (REFERENCE_CLASS_P (rhs))
334 return maybe_fold_reference (rhs, false);
336 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
338 tree val = OBJ_TYPE_REF_EXPR (rhs);
339 if (is_gimple_min_invariant (val))
340 return val;
341 else if (flag_devirtualize && virtual_method_call_p (rhs))
343 bool final;
344 vec <cgraph_node *>targets
345 = possible_polymorphic_call_targets (rhs, stmt, &final);
346 if (final && targets.length () <= 1 && dbg_cnt (devirt))
348 if (dump_enabled_p ())
350 location_t loc = gimple_location_safe (stmt);
351 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
352 "resolving virtual function address "
353 "reference to function %s\n",
354 targets.length () == 1
355 ? targets[0]->name ()
356 : "NULL");
358 if (targets.length () == 1)
360 val = fold_convert (TREE_TYPE (val),
361 build_fold_addr_expr_loc
362 (loc, targets[0]->decl));
363 STRIP_USELESS_TYPE_CONVERSION (val);
365 else
366 /* We can not use __builtin_unreachable here because it
367 can not have address taken. */
368 val = build_int_cst (TREE_TYPE (val), 0);
369 return val;
374 else if (TREE_CODE (rhs) == ADDR_EXPR)
376 tree ref = TREE_OPERAND (rhs, 0);
377 tree tem = maybe_fold_reference (ref, true);
378 if (tem
379 && TREE_CODE (tem) == MEM_REF
380 && integer_zerop (TREE_OPERAND (tem, 1)))
381 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
382 else if (tem)
383 result = fold_convert (TREE_TYPE (rhs),
384 build_fold_addr_expr_loc (loc, tem));
385 else if (TREE_CODE (ref) == MEM_REF
386 && integer_zerop (TREE_OPERAND (ref, 1)))
387 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
389 if (result)
391 /* Strip away useless type conversions. Both the
392 NON_LVALUE_EXPR that may have been added by fold, and
393 "useless" type conversions that might now be apparent
394 due to propagation. */
395 STRIP_USELESS_TYPE_CONVERSION (result);
397 if (result != rhs && valid_gimple_rhs_p (result))
398 return result;
402 else if (TREE_CODE (rhs) == CONSTRUCTOR
403 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
405 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
406 unsigned i;
407 tree val;
409 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
410 if (! CONSTANT_CLASS_P (val))
411 return NULL_TREE;
413 return build_vector_from_ctor (TREE_TYPE (rhs),
414 CONSTRUCTOR_ELTS (rhs));
417 else if (DECL_P (rhs))
418 return get_symbol_constant_value (rhs);
420 break;
422 case GIMPLE_UNARY_RHS:
423 break;
425 case GIMPLE_BINARY_RHS:
426 break;
428 case GIMPLE_TERNARY_RHS:
429 result = fold_ternary_loc (loc, subcode,
430 TREE_TYPE (gimple_assign_lhs (stmt)),
431 gimple_assign_rhs1 (stmt),
432 gimple_assign_rhs2 (stmt),
433 gimple_assign_rhs3 (stmt));
435 if (result)
437 STRIP_USELESS_TYPE_CONVERSION (result);
438 if (valid_gimple_rhs_p (result))
439 return result;
441 break;
443 case GIMPLE_INVALID_RHS:
444 gcc_unreachable ();
447 return NULL_TREE;
451 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
452 adjusting the replacement stmts location and virtual operands.
453 If the statement has a lhs the last stmt in the sequence is expected
454 to assign to that lhs. */
456 static void
457 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
459 gimple *stmt = gsi_stmt (*si_p);
461 if (gimple_has_location (stmt))
462 annotate_all_with_location (stmts, gimple_location (stmt));
464 /* First iterate over the replacement statements backward, assigning
465 virtual operands to their defining statements. */
466 gimple *laststore = NULL;
467 for (gimple_stmt_iterator i = gsi_last (stmts);
468 !gsi_end_p (i); gsi_prev (&i))
470 gimple *new_stmt = gsi_stmt (i);
471 if ((gimple_assign_single_p (new_stmt)
472 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
473 || (is_gimple_call (new_stmt)
474 && (gimple_call_flags (new_stmt)
475 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
477 tree vdef;
478 if (!laststore)
479 vdef = gimple_vdef (stmt);
480 else
481 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
482 gimple_set_vdef (new_stmt, vdef);
483 if (vdef && TREE_CODE (vdef) == SSA_NAME)
484 SSA_NAME_DEF_STMT (vdef) = new_stmt;
485 laststore = new_stmt;
489 /* Second iterate over the statements forward, assigning virtual
490 operands to their uses. */
491 tree reaching_vuse = gimple_vuse (stmt);
492 for (gimple_stmt_iterator i = gsi_start (stmts);
493 !gsi_end_p (i); gsi_next (&i))
495 gimple *new_stmt = gsi_stmt (i);
496 /* If the new statement possibly has a VUSE, update it with exact SSA
497 name we know will reach this one. */
498 if (gimple_has_mem_ops (new_stmt))
499 gimple_set_vuse (new_stmt, reaching_vuse);
500 gimple_set_modified (new_stmt, true);
501 if (gimple_vdef (new_stmt))
502 reaching_vuse = gimple_vdef (new_stmt);
505 /* If the new sequence does not do a store release the virtual
506 definition of the original statement. */
507 if (reaching_vuse
508 && reaching_vuse == gimple_vuse (stmt))
510 tree vdef = gimple_vdef (stmt);
511 if (vdef
512 && TREE_CODE (vdef) == SSA_NAME)
514 unlink_stmt_vdef (stmt);
515 release_ssa_name (vdef);
519 /* Finally replace the original statement with the sequence. */
520 gsi_replace_with_seq (si_p, stmts, false);
523 /* Convert EXPR into a GIMPLE value suitable for substitution on the
524 RHS of an assignment. Insert the necessary statements before
525 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
526 is replaced. If the call is expected to produces a result, then it
527 is replaced by an assignment of the new RHS to the result variable.
528 If the result is to be ignored, then the call is replaced by a
529 GIMPLE_NOP. A proper VDEF chain is retained by making the first
530 VUSE and the last VDEF of the whole sequence be the same as the replaced
531 statement and using new SSA names for stores in between. */
533 void
534 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
536 tree lhs;
537 gimple *stmt, *new_stmt;
538 gimple_stmt_iterator i;
539 gimple_seq stmts = NULL;
541 stmt = gsi_stmt (*si_p);
543 gcc_assert (is_gimple_call (stmt));
545 push_gimplify_context (gimple_in_ssa_p (cfun));
547 lhs = gimple_call_lhs (stmt);
548 if (lhs == NULL_TREE)
550 gimplify_and_add (expr, &stmts);
551 /* We can end up with folding a memcpy of an empty class assignment
552 which gets optimized away by C++ gimplification. */
553 if (gimple_seq_empty_p (stmts))
555 pop_gimplify_context (NULL);
556 if (gimple_in_ssa_p (cfun))
558 unlink_stmt_vdef (stmt);
559 release_defs (stmt);
561 gsi_replace (si_p, gimple_build_nop (), false);
562 return;
565 else
567 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
568 new_stmt = gimple_build_assign (lhs, tmp);
569 i = gsi_last (stmts);
570 gsi_insert_after_without_update (&i, new_stmt,
571 GSI_CONTINUE_LINKING);
574 pop_gimplify_context (NULL);
576 gsi_replace_with_seq_vops (si_p, stmts);
580 /* Replace the call at *GSI with the gimple value VAL. */
582 void
583 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
585 gimple *stmt = gsi_stmt (*gsi);
586 tree lhs = gimple_call_lhs (stmt);
587 gimple *repl;
588 if (lhs)
590 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
591 val = fold_convert (TREE_TYPE (lhs), val);
592 repl = gimple_build_assign (lhs, val);
594 else
595 repl = gimple_build_nop ();
596 tree vdef = gimple_vdef (stmt);
597 if (vdef && TREE_CODE (vdef) == SSA_NAME)
599 unlink_stmt_vdef (stmt);
600 release_ssa_name (vdef);
602 gsi_replace (gsi, repl, false);
605 /* Replace the call at *GSI with the new call REPL and fold that
606 again. */
608 static void
609 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
611 gimple *stmt = gsi_stmt (*gsi);
612 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
613 gimple_set_location (repl, gimple_location (stmt));
614 if (gimple_vdef (stmt)
615 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
617 gimple_set_vdef (repl, gimple_vdef (stmt));
618 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
620 if (gimple_vuse (stmt))
621 gimple_set_vuse (repl, gimple_vuse (stmt));
622 gsi_replace (gsi, repl, false);
623 fold_stmt (gsi);
626 /* Return true if VAR is a VAR_DECL or a component thereof. */
628 static bool
629 var_decl_component_p (tree var)
631 tree inner = var;
632 while (handled_component_p (inner))
633 inner = TREE_OPERAND (inner, 0);
634 return SSA_VAR_P (inner);
637 /* If the SIZE argument representing the size of an object is in a range
638 of values of which exactly one is valid (and that is zero), return
639 true, otherwise false. */
641 static bool
642 size_must_be_zero_p (tree size)
644 if (integer_zerop (size))
645 return true;
647 if (TREE_CODE (size) != SSA_NAME)
648 return false;
650 wide_int min, max;
651 enum value_range_type rtype = get_range_info (size, &min, &max);
652 if (rtype != VR_ANTI_RANGE)
653 return false;
655 tree type = TREE_TYPE (size);
656 int prec = TYPE_PRECISION (type);
658 wide_int wone = wi::one (prec);
660 /* Compute the value of SSIZE_MAX, the largest positive value that
661 can be stored in ssize_t, the signed counterpart of size_t. */
662 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
664 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
667 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
668 diagnose (otherwise undefined) overlapping copies without preventing
669 folding. When folded, GCC guarantees that overlapping memcpy has
670 the same semantics as memmove. Call to the library memcpy need not
671 provide the same guarantee. Return false if no simplification can
672 be made. */
674 static bool
675 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
676 tree dest, tree src, int endp)
678 gimple *stmt = gsi_stmt (*gsi);
679 tree lhs = gimple_call_lhs (stmt);
680 tree len = gimple_call_arg (stmt, 2);
681 tree destvar, srcvar;
682 location_t loc = gimple_location (stmt);
684 tree func = gimple_call_fndecl (stmt);
685 bool nowarn = gimple_no_warning_p (stmt);
686 bool check_overlap = (DECL_FUNCTION_CODE (func) != BUILT_IN_MEMMOVE
687 && DECL_FUNCTION_CODE (func) != BUILT_IN_MEMMOVE_CHK
688 && !nowarn);
690 /* If the LEN parameter is a constant zero or in range where
691 the only valid value is zero, return DEST. */
692 if (size_must_be_zero_p (len))
694 gimple *repl;
695 if (gimple_call_lhs (stmt))
696 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
697 else
698 repl = gimple_build_nop ();
699 tree vdef = gimple_vdef (stmt);
700 if (vdef && TREE_CODE (vdef) == SSA_NAME)
702 unlink_stmt_vdef (stmt);
703 release_ssa_name (vdef);
705 gsi_replace (gsi, repl, false);
706 return true;
709 /* If SRC and DEST are the same (and not volatile), return
710 DEST{,+LEN,+LEN-1}. */
711 if (operand_equal_p (src, dest, 0))
713 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
714 It's safe and may even be emitted by GCC itself (see bug
715 32667). However, diagnose it in explicit calls to the memcpy
716 function. */
717 if (check_overlap && *IDENTIFIER_POINTER (DECL_NAME (func)) != '_')
718 warning_at (loc, OPT_Wrestrict,
719 "%qD source argument is the same as destination",
720 func);
722 unlink_stmt_vdef (stmt);
723 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
724 release_ssa_name (gimple_vdef (stmt));
725 if (!lhs)
727 gsi_replace (gsi, gimple_build_nop (), false);
728 return true;
730 goto done;
732 else
734 tree srctype, desttype;
735 unsigned int src_align, dest_align;
736 tree off0;
738 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
739 pointers as wide integer) and also may result in huge function
740 size because of inlined bounds copy. Thus don't inline for
741 functions we want to instrument. */
742 if (flag_check_pointer_bounds
743 && chkp_instrumentable_p (cfun->decl)
744 /* Even if data may contain pointers we can inline if copy
745 less than a pointer size. */
746 && (!tree_fits_uhwi_p (len)
747 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
748 return false;
750 /* Build accesses at offset zero with a ref-all character type. */
751 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
752 ptr_mode, true), 0);
754 /* If we can perform the copy efficiently with first doing all loads
755 and then all stores inline it that way. Currently efficiently
756 means that we can load all the memory into a single integer
757 register which is what MOVE_MAX gives us. */
758 src_align = get_pointer_alignment (src);
759 dest_align = get_pointer_alignment (dest);
760 if (tree_fits_uhwi_p (len)
761 && compare_tree_int (len, MOVE_MAX) <= 0
762 /* ??? Don't transform copies from strings with known length this
763 confuses the tree-ssa-strlen.c. This doesn't handle
764 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
765 reason. */
766 && !c_strlen (src, 2))
768 unsigned ilen = tree_to_uhwi (len);
769 if (pow2p_hwi (ilen))
771 /* Detect invalid bounds and overlapping copies and issue
772 either -Warray-bounds or -Wrestrict. */
773 if (!nowarn
774 && check_bounds_or_overlap (as_a <gcall *>(stmt),
775 dest, src, len, len))
776 gimple_set_no_warning (stmt, true);
778 scalar_int_mode mode;
779 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
780 if (type
781 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
782 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
783 /* If the destination pointer is not aligned we must be able
784 to emit an unaligned store. */
785 && (dest_align >= GET_MODE_ALIGNMENT (mode)
786 || !targetm.slow_unaligned_access (mode, dest_align)
787 || (optab_handler (movmisalign_optab, mode)
788 != CODE_FOR_nothing)))
790 tree srctype = type;
791 tree desttype = type;
792 if (src_align < GET_MODE_ALIGNMENT (mode))
793 srctype = build_aligned_type (type, src_align);
794 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
795 tree tem = fold_const_aggregate_ref (srcmem);
796 if (tem)
797 srcmem = tem;
798 else if (src_align < GET_MODE_ALIGNMENT (mode)
799 && targetm.slow_unaligned_access (mode, src_align)
800 && (optab_handler (movmisalign_optab, mode)
801 == CODE_FOR_nothing))
802 srcmem = NULL_TREE;
803 if (srcmem)
805 gimple *new_stmt;
806 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
808 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
809 srcmem
810 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
811 new_stmt);
812 gimple_assign_set_lhs (new_stmt, srcmem);
813 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
814 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
816 if (dest_align < GET_MODE_ALIGNMENT (mode))
817 desttype = build_aligned_type (type, dest_align);
818 new_stmt
819 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
820 dest, off0),
821 srcmem);
822 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
823 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
824 if (gimple_vdef (new_stmt)
825 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
826 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
827 if (!lhs)
829 gsi_replace (gsi, new_stmt, false);
830 return true;
832 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
833 goto done;
839 if (endp == 3)
841 /* Both DEST and SRC must be pointer types.
842 ??? This is what old code did. Is the testing for pointer types
843 really mandatory?
845 If either SRC is readonly or length is 1, we can use memcpy. */
846 if (!dest_align || !src_align)
847 return false;
848 if (readonly_data_expr (src)
849 || (tree_fits_uhwi_p (len)
850 && (MIN (src_align, dest_align) / BITS_PER_UNIT
851 >= tree_to_uhwi (len))))
853 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
854 if (!fn)
855 return false;
856 gimple_call_set_fndecl (stmt, fn);
857 gimple_call_set_arg (stmt, 0, dest);
858 gimple_call_set_arg (stmt, 1, src);
859 fold_stmt (gsi);
860 return true;
863 /* If *src and *dest can't overlap, optimize into memcpy as well. */
864 if (TREE_CODE (src) == ADDR_EXPR
865 && TREE_CODE (dest) == ADDR_EXPR)
867 tree src_base, dest_base, fn;
868 poly_int64 src_offset = 0, dest_offset = 0;
869 poly_uint64 maxsize;
871 srcvar = TREE_OPERAND (src, 0);
872 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
873 if (src_base == NULL)
874 src_base = srcvar;
875 destvar = TREE_OPERAND (dest, 0);
876 dest_base = get_addr_base_and_unit_offset (destvar,
877 &dest_offset);
878 if (dest_base == NULL)
879 dest_base = destvar;
880 if (!poly_int_tree_p (len, &maxsize))
881 maxsize = -1;
882 if (SSA_VAR_P (src_base)
883 && SSA_VAR_P (dest_base))
885 if (operand_equal_p (src_base, dest_base, 0)
886 && ranges_maybe_overlap_p (src_offset, maxsize,
887 dest_offset, maxsize))
888 return false;
890 else if (TREE_CODE (src_base) == MEM_REF
891 && TREE_CODE (dest_base) == MEM_REF)
893 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
894 TREE_OPERAND (dest_base, 0), 0))
895 return false;
896 poly_offset_int full_src_offset
897 = mem_ref_offset (src_base) + src_offset;
898 poly_offset_int full_dest_offset
899 = mem_ref_offset (dest_base) + dest_offset;
900 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
901 full_dest_offset, maxsize))
902 return false;
904 else
905 return false;
907 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
908 if (!fn)
909 return false;
910 gimple_call_set_fndecl (stmt, fn);
911 gimple_call_set_arg (stmt, 0, dest);
912 gimple_call_set_arg (stmt, 1, src);
913 fold_stmt (gsi);
914 return true;
917 /* If the destination and source do not alias optimize into
918 memcpy as well. */
919 if ((is_gimple_min_invariant (dest)
920 || TREE_CODE (dest) == SSA_NAME)
921 && (is_gimple_min_invariant (src)
922 || TREE_CODE (src) == SSA_NAME))
924 ao_ref destr, srcr;
925 ao_ref_init_from_ptr_and_size (&destr, dest, len);
926 ao_ref_init_from_ptr_and_size (&srcr, src, len);
927 if (!refs_may_alias_p_1 (&destr, &srcr, false))
929 tree fn;
930 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
931 if (!fn)
932 return false;
933 gimple_call_set_fndecl (stmt, fn);
934 gimple_call_set_arg (stmt, 0, dest);
935 gimple_call_set_arg (stmt, 1, src);
936 fold_stmt (gsi);
937 return true;
941 return false;
944 if (!tree_fits_shwi_p (len))
945 return false;
946 if (!POINTER_TYPE_P (TREE_TYPE (src))
947 || !POINTER_TYPE_P (TREE_TYPE (dest)))
948 return false;
949 /* In the following try to find a type that is most natural to be
950 used for the memcpy source and destination and that allows
951 the most optimization when memcpy is turned into a plain assignment
952 using that type. In theory we could always use a char[len] type
953 but that only gains us that the destination and source possibly
954 no longer will have their address taken. */
955 srctype = TREE_TYPE (TREE_TYPE (src));
956 if (TREE_CODE (srctype) == ARRAY_TYPE
957 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
958 srctype = TREE_TYPE (srctype);
959 desttype = TREE_TYPE (TREE_TYPE (dest));
960 if (TREE_CODE (desttype) == ARRAY_TYPE
961 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
962 desttype = TREE_TYPE (desttype);
963 if (TREE_ADDRESSABLE (srctype)
964 || TREE_ADDRESSABLE (desttype))
965 return false;
967 /* Make sure we are not copying using a floating-point mode or
968 a type whose size possibly does not match its precision. */
969 if (FLOAT_MODE_P (TYPE_MODE (desttype))
970 || TREE_CODE (desttype) == BOOLEAN_TYPE
971 || TREE_CODE (desttype) == ENUMERAL_TYPE)
972 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
973 if (FLOAT_MODE_P (TYPE_MODE (srctype))
974 || TREE_CODE (srctype) == BOOLEAN_TYPE
975 || TREE_CODE (srctype) == ENUMERAL_TYPE)
976 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
977 if (!srctype)
978 srctype = desttype;
979 if (!desttype)
980 desttype = srctype;
981 if (!srctype)
982 return false;
984 src_align = get_pointer_alignment (src);
985 dest_align = get_pointer_alignment (dest);
986 if (dest_align < TYPE_ALIGN (desttype)
987 || src_align < TYPE_ALIGN (srctype))
988 return false;
990 destvar = NULL_TREE;
991 if (TREE_CODE (dest) == ADDR_EXPR
992 && var_decl_component_p (TREE_OPERAND (dest, 0))
993 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
994 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
996 srcvar = NULL_TREE;
997 if (TREE_CODE (src) == ADDR_EXPR
998 && var_decl_component_p (TREE_OPERAND (src, 0))
999 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1001 if (!destvar
1002 || src_align >= TYPE_ALIGN (desttype))
1003 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1004 src, off0);
1005 else if (!STRICT_ALIGNMENT)
1007 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1008 src_align);
1009 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1013 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1014 return false;
1016 if (srcvar == NULL_TREE)
1018 if (src_align >= TYPE_ALIGN (desttype))
1019 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1020 else
1022 if (STRICT_ALIGNMENT)
1023 return false;
1024 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1025 src_align);
1026 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1029 else if (destvar == NULL_TREE)
1031 if (dest_align >= TYPE_ALIGN (srctype))
1032 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1033 else
1035 if (STRICT_ALIGNMENT)
1036 return false;
1037 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1038 dest_align);
1039 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1043 /* Detect invalid bounds and overlapping copies and issue either
1044 -Warray-bounds or -Wrestrict. */
1045 if (!nowarn)
1046 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1048 gimple *new_stmt;
1049 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1051 tree tem = fold_const_aggregate_ref (srcvar);
1052 if (tem)
1053 srcvar = tem;
1054 if (! is_gimple_min_invariant (srcvar))
1056 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1057 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1058 new_stmt);
1059 gimple_assign_set_lhs (new_stmt, srcvar);
1060 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1061 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1063 new_stmt = gimple_build_assign (destvar, srcvar);
1064 goto set_vop_and_replace;
1067 /* We get an aggregate copy. Use an unsigned char[] type to
1068 perform the copying to preserve padding and to avoid any issues
1069 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1070 desttype = build_array_type_nelts (unsigned_char_type_node,
1071 tree_to_uhwi (len));
1072 srctype = desttype;
1073 if (src_align > TYPE_ALIGN (srctype))
1074 srctype = build_aligned_type (srctype, src_align);
1075 if (dest_align > TYPE_ALIGN (desttype))
1076 desttype = build_aligned_type (desttype, dest_align);
1077 new_stmt
1078 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1079 fold_build2 (MEM_REF, srctype, src, off0));
1080 set_vop_and_replace:
1081 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1082 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1083 if (gimple_vdef (new_stmt)
1084 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1085 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1086 if (!lhs)
1088 gsi_replace (gsi, new_stmt, false);
1089 return true;
1091 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1094 done:
1095 gimple_seq stmts = NULL;
1096 if (endp == 0 || endp == 3)
1097 len = NULL_TREE;
1098 else if (endp == 2)
1099 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1100 ssize_int (1));
1101 if (endp == 2 || endp == 1)
1103 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1104 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1105 TREE_TYPE (dest), dest, len);
1108 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1109 gimple *repl = gimple_build_assign (lhs, dest);
1110 gsi_replace (gsi, repl, false);
1111 return true;
1114 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1115 to built-in memcmp (a, b, len). */
1117 static bool
1118 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1120 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1122 if (!fn)
1123 return false;
1125 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1127 gimple *stmt = gsi_stmt (*gsi);
1128 tree a = gimple_call_arg (stmt, 0);
1129 tree b = gimple_call_arg (stmt, 1);
1130 tree len = gimple_call_arg (stmt, 2);
1132 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1133 replace_call_with_call_and_fold (gsi, repl);
1135 return true;
1138 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1139 to built-in memmove (dest, src, len). */
1141 static bool
1142 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1144 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1146 if (!fn)
1147 return false;
1149 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1150 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1151 len) into memmove (dest, src, len). */
1153 gimple *stmt = gsi_stmt (*gsi);
1154 tree src = gimple_call_arg (stmt, 0);
1155 tree dest = gimple_call_arg (stmt, 1);
1156 tree len = gimple_call_arg (stmt, 2);
1158 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1159 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1160 replace_call_with_call_and_fold (gsi, repl);
1162 return true;
1165 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1166 to built-in memset (dest, 0, len). */
1168 static bool
1169 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1171 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1173 if (!fn)
1174 return false;
1176 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1178 gimple *stmt = gsi_stmt (*gsi);
1179 tree dest = gimple_call_arg (stmt, 0);
1180 tree len = gimple_call_arg (stmt, 1);
1182 gimple_seq seq = NULL;
1183 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1184 gimple_seq_add_stmt_without_update (&seq, repl);
1185 gsi_replace_with_seq_vops (gsi, seq);
1186 fold_stmt (gsi);
1188 return true;
1191 /* Fold function call to builtin memset or bzero at *GSI setting the
1192 memory of size LEN to VAL. Return whether a simplification was made. */
1194 static bool
1195 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1197 gimple *stmt = gsi_stmt (*gsi);
1198 tree etype;
1199 unsigned HOST_WIDE_INT length, cval;
1201 /* If the LEN parameter is zero, return DEST. */
1202 if (integer_zerop (len))
1204 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1205 return true;
1208 if (! tree_fits_uhwi_p (len))
1209 return false;
1211 if (TREE_CODE (c) != INTEGER_CST)
1212 return false;
1214 tree dest = gimple_call_arg (stmt, 0);
1215 tree var = dest;
1216 if (TREE_CODE (var) != ADDR_EXPR)
1217 return false;
1219 var = TREE_OPERAND (var, 0);
1220 if (TREE_THIS_VOLATILE (var))
1221 return false;
1223 etype = TREE_TYPE (var);
1224 if (TREE_CODE (etype) == ARRAY_TYPE)
1225 etype = TREE_TYPE (etype);
1227 if (!INTEGRAL_TYPE_P (etype)
1228 && !POINTER_TYPE_P (etype))
1229 return NULL_TREE;
1231 if (! var_decl_component_p (var))
1232 return NULL_TREE;
1234 length = tree_to_uhwi (len);
1235 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1236 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1237 return NULL_TREE;
1239 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1240 return NULL_TREE;
1242 if (integer_zerop (c))
1243 cval = 0;
1244 else
1246 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1247 return NULL_TREE;
1249 cval = TREE_INT_CST_LOW (c);
1250 cval &= 0xff;
1251 cval |= cval << 8;
1252 cval |= cval << 16;
1253 cval |= (cval << 31) << 1;
1256 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1257 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1258 gimple_set_vuse (store, gimple_vuse (stmt));
1259 tree vdef = gimple_vdef (stmt);
1260 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1262 gimple_set_vdef (store, gimple_vdef (stmt));
1263 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1265 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1266 if (gimple_call_lhs (stmt))
1268 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1269 gsi_replace (gsi, asgn, false);
1271 else
1273 gimple_stmt_iterator gsi2 = *gsi;
1274 gsi_prev (gsi);
1275 gsi_remove (&gsi2, true);
1278 return true;
1282 /* Obtain the minimum and maximum string length or minimum and maximum
1283 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1284 If ARG is an SSA name variable, follow its use-def chains. When
1285 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1286 if we are unable to determine the length or value, return False.
1287 VISITED is a bitmap of visited variables.
1288 TYPE is 0 if string length should be obtained, 1 for maximum string
1289 length and 2 for maximum value ARG can have.
1290 When FUZZY is set and the length of a string cannot be determined,
1291 the function instead considers as the maximum possible length the
1292 size of a character array it may refer to.
1293 Set *FLEXP to true if the range of the string lengths has been
1294 obtained from the upper bound of an array at the end of a struct.
1295 Such an array may hold a string that's longer than its upper bound
1296 due to it being used as a poor-man's flexible array member. */
1298 static bool
1299 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1300 bool fuzzy, bool *flexp)
1302 tree var, val = NULL_TREE;
1303 gimple *def_stmt;
1305 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1306 but MINLEN may be cleared during the execution of the function. */
1307 tree *minlen = length;
1308 tree *const maxlen = length + 1;
1310 if (TREE_CODE (arg) != SSA_NAME)
1312 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1313 if (TREE_CODE (arg) == ADDR_EXPR
1314 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1316 tree op = TREE_OPERAND (arg, 0);
1317 if (integer_zerop (TREE_OPERAND (op, 1)))
1319 tree aop0 = TREE_OPERAND (op, 0);
1320 if (TREE_CODE (aop0) == INDIRECT_REF
1321 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1322 return get_range_strlen (TREE_OPERAND (aop0, 0),
1323 length, visited, type, fuzzy, flexp);
1325 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF && fuzzy)
1327 /* Fail if an array is the last member of a struct object
1328 since it could be treated as a (fake) flexible array
1329 member. */
1330 tree idx = TREE_OPERAND (op, 1);
1332 arg = TREE_OPERAND (op, 0);
1333 tree optype = TREE_TYPE (arg);
1334 if (tree dom = TYPE_DOMAIN (optype))
1335 if (tree bound = TYPE_MAX_VALUE (dom))
1336 if (TREE_CODE (bound) == INTEGER_CST
1337 && TREE_CODE (idx) == INTEGER_CST
1338 && tree_int_cst_lt (bound, idx))
1339 return false;
1343 if (type == 2)
1345 val = arg;
1346 if (TREE_CODE (val) != INTEGER_CST
1347 || tree_int_cst_sgn (val) < 0)
1348 return false;
1350 else
1351 val = c_strlen (arg, 1);
1353 if (!val && fuzzy)
1355 if (TREE_CODE (arg) == ADDR_EXPR)
1356 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1357 visited, type, fuzzy, flexp);
1359 if (TREE_CODE (arg) == ARRAY_REF)
1361 tree type = TREE_TYPE (TREE_OPERAND (arg, 0));
1363 /* Determine the "innermost" array type. */
1364 while (TREE_CODE (type) == ARRAY_TYPE
1365 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1366 type = TREE_TYPE (type);
1368 /* Avoid arrays of pointers. */
1369 tree eltype = TREE_TYPE (type);
1370 if (TREE_CODE (type) != ARRAY_TYPE
1371 || !INTEGRAL_TYPE_P (eltype))
1372 return false;
1374 val = TYPE_SIZE_UNIT (type);
1375 if (!val || integer_zerop (val))
1376 return false;
1378 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1379 integer_one_node);
1380 /* Set the minimum size to zero since the string in
1381 the array could have zero length. */
1382 *minlen = ssize_int (0);
1384 if (TREE_CODE (TREE_OPERAND (arg, 0)) == COMPONENT_REF
1385 && type == TREE_TYPE (TREE_OPERAND (arg, 0))
1386 && array_at_struct_end_p (TREE_OPERAND (arg, 0)))
1387 *flexp = true;
1389 else if (TREE_CODE (arg) == COMPONENT_REF
1390 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1391 == ARRAY_TYPE))
1393 /* Use the type of the member array to determine the upper
1394 bound on the length of the array. This may be overly
1395 optimistic if the array itself isn't NUL-terminated and
1396 the caller relies on the subsequent member to contain
1397 the NUL but that would only be considered valid if
1398 the array were the last member of a struct.
1399 Set *FLEXP to true if the array whose bound is being
1400 used is at the end of a struct. */
1401 if (array_at_struct_end_p (arg))
1402 *flexp = true;
1404 arg = TREE_OPERAND (arg, 1);
1406 tree type = TREE_TYPE (arg);
1408 while (TREE_CODE (type) == ARRAY_TYPE
1409 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1410 type = TREE_TYPE (type);
1412 /* Fail when the array bound is unknown or zero. */
1413 val = TYPE_SIZE_UNIT (type);
1414 if (!val || integer_zerop (val))
1415 return false;
1416 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1417 integer_one_node);
1418 /* Set the minimum size to zero since the string in
1419 the array could have zero length. */
1420 *minlen = ssize_int (0);
1423 if (VAR_P (arg))
1425 tree type = TREE_TYPE (arg);
1426 if (POINTER_TYPE_P (type))
1427 type = TREE_TYPE (type);
1429 if (TREE_CODE (type) == ARRAY_TYPE)
1431 val = TYPE_SIZE_UNIT (type);
1432 if (!val
1433 || TREE_CODE (val) != INTEGER_CST
1434 || integer_zerop (val))
1435 return false;
1436 val = wide_int_to_tree (TREE_TYPE (val),
1437 wi::sub (wi::to_wide (val), 1));
1438 /* Set the minimum size to zero since the string in
1439 the array could have zero length. */
1440 *minlen = ssize_int (0);
1445 if (!val)
1446 return false;
1448 if (minlen
1449 && (!*minlen
1450 || (type > 0
1451 && TREE_CODE (*minlen) == INTEGER_CST
1452 && TREE_CODE (val) == INTEGER_CST
1453 && tree_int_cst_lt (val, *minlen))))
1454 *minlen = val;
1456 if (*maxlen)
1458 if (type > 0)
1460 if (TREE_CODE (*maxlen) != INTEGER_CST
1461 || TREE_CODE (val) != INTEGER_CST)
1462 return false;
1464 if (tree_int_cst_lt (*maxlen, val))
1465 *maxlen = val;
1466 return true;
1468 else if (simple_cst_equal (val, *maxlen) != 1)
1469 return false;
1472 *maxlen = val;
1473 return true;
1476 /* If ARG is registered for SSA update we cannot look at its defining
1477 statement. */
1478 if (name_registered_for_update_p (arg))
1479 return false;
1481 /* If we were already here, break the infinite cycle. */
1482 if (!*visited)
1483 *visited = BITMAP_ALLOC (NULL);
1484 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1485 return true;
1487 var = arg;
1488 def_stmt = SSA_NAME_DEF_STMT (var);
1490 switch (gimple_code (def_stmt))
1492 case GIMPLE_ASSIGN:
1493 /* The RHS of the statement defining VAR must either have a
1494 constant length or come from another SSA_NAME with a constant
1495 length. */
1496 if (gimple_assign_single_p (def_stmt)
1497 || gimple_assign_unary_nop_p (def_stmt))
1499 tree rhs = gimple_assign_rhs1 (def_stmt);
1500 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
1502 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1504 tree op2 = gimple_assign_rhs2 (def_stmt);
1505 tree op3 = gimple_assign_rhs3 (def_stmt);
1506 return get_range_strlen (op2, length, visited, type, fuzzy, flexp)
1507 && get_range_strlen (op3, length, visited, type, fuzzy, flexp);
1509 return false;
1511 case GIMPLE_PHI:
1513 /* All the arguments of the PHI node must have the same constant
1514 length. */
1515 unsigned i;
1517 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1519 tree arg = gimple_phi_arg (def_stmt, i)->def;
1521 /* If this PHI has itself as an argument, we cannot
1522 determine the string length of this argument. However,
1523 if we can find a constant string length for the other
1524 PHI args then we can still be sure that this is a
1525 constant string length. So be optimistic and just
1526 continue with the next argument. */
1527 if (arg == gimple_phi_result (def_stmt))
1528 continue;
1530 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
1532 if (fuzzy)
1533 *maxlen = build_all_ones_cst (size_type_node);
1534 else
1535 return false;
1539 return true;
1541 default:
1542 return false;
1546 /* Determine the minimum and maximum value or string length that ARG
1547 refers to and store each in the first two elements of MINMAXLEN.
1548 For expressions that point to strings of unknown lengths that are
1549 character arrays, use the upper bound of the array as the maximum
1550 length. For example, given an expression like 'x ? array : "xyz"'
1551 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1552 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1553 stored in array.
1554 Return true if the range of the string lengths has been obtained
1555 from the upper bound of an array at the end of a struct. Such
1556 an array may hold a string that's longer than its upper bound
1557 due to it being used as a poor-man's flexible array member. */
1559 bool
1560 get_range_strlen (tree arg, tree minmaxlen[2])
1562 bitmap visited = NULL;
1564 minmaxlen[0] = NULL_TREE;
1565 minmaxlen[1] = NULL_TREE;
1567 bool flexarray = false;
1568 get_range_strlen (arg, minmaxlen, &visited, 1, true, &flexarray);
1570 if (visited)
1571 BITMAP_FREE (visited);
1573 return flexarray;
1576 tree
1577 get_maxval_strlen (tree arg, int type)
1579 bitmap visited = NULL;
1580 tree len[2] = { NULL_TREE, NULL_TREE };
1582 bool dummy;
1583 if (!get_range_strlen (arg, len, &visited, type, false, &dummy))
1584 len[1] = NULL_TREE;
1585 if (visited)
1586 BITMAP_FREE (visited);
1588 return len[1];
1592 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1593 If LEN is not NULL, it represents the length of the string to be
1594 copied. Return NULL_TREE if no simplification can be made. */
1596 static bool
1597 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1598 tree dest, tree src)
1600 gimple *stmt = gsi_stmt (*gsi);
1601 location_t loc = gimple_location (stmt);
1602 tree fn;
1604 /* If SRC and DEST are the same (and not volatile), return DEST. */
1605 if (operand_equal_p (src, dest, 0))
1607 tree func = gimple_call_fndecl (stmt);
1609 warning_at (loc, OPT_Wrestrict,
1610 "%qD source argument is the same as destination",
1611 func);
1613 replace_call_with_value (gsi, dest);
1614 return true;
1617 if (optimize_function_for_size_p (cfun))
1618 return false;
1620 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1621 if (!fn)
1622 return false;
1624 tree len = get_maxval_strlen (src, 0);
1625 if (!len)
1626 return false;
1628 len = fold_convert_loc (loc, size_type_node, len);
1629 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1630 len = force_gimple_operand_gsi (gsi, len, true,
1631 NULL_TREE, true, GSI_SAME_STMT);
1632 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1633 replace_call_with_call_and_fold (gsi, repl);
1634 return true;
1637 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1638 If SLEN is not NULL, it represents the length of the source string.
1639 Return NULL_TREE if no simplification can be made. */
1641 static bool
1642 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1643 tree dest, tree src, tree len)
1645 gimple *stmt = gsi_stmt (*gsi);
1646 location_t loc = gimple_location (stmt);
1647 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1649 /* If the LEN parameter is zero, return DEST. */
1650 if (integer_zerop (len))
1652 /* Avoid warning if the destination refers to a an array/pointer
1653 decorate with attribute nonstring. */
1654 if (!nonstring)
1656 tree fndecl = gimple_call_fndecl (stmt);
1657 gcall *call = as_a <gcall *> (stmt);
1659 /* Warn about the lack of nul termination: the result is not
1660 a (nul-terminated) string. */
1661 tree slen = get_maxval_strlen (src, 0);
1662 if (slen && !integer_zerop (slen))
1663 warning_at (loc, OPT_Wstringop_truncation,
1664 "%G%qD destination unchanged after copying no bytes "
1665 "from a string of length %E",
1666 call, fndecl, slen);
1667 else
1668 warning_at (loc, OPT_Wstringop_truncation,
1669 "%G%qD destination unchanged after copying no bytes",
1670 call, fndecl);
1673 replace_call_with_value (gsi, dest);
1674 return true;
1677 /* We can't compare slen with len as constants below if len is not a
1678 constant. */
1679 if (TREE_CODE (len) != INTEGER_CST)
1680 return false;
1682 /* Now, we must be passed a constant src ptr parameter. */
1683 tree slen = get_maxval_strlen (src, 0);
1684 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1685 return false;
1687 /* The size of the source string including the terminating nul. */
1688 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1690 /* We do not support simplification of this case, though we do
1691 support it when expanding trees into RTL. */
1692 /* FIXME: generate a call to __builtin_memset. */
1693 if (tree_int_cst_lt (ssize, len))
1694 return false;
1696 if (!nonstring)
1698 if (tree_int_cst_lt (len, slen))
1700 tree fndecl = gimple_call_fndecl (stmt);
1701 gcall *call = as_a <gcall *> (stmt);
1703 warning_at (loc, OPT_Wstringop_truncation,
1704 (tree_int_cst_equal (size_one_node, len)
1705 ? G_("%G%qD output truncated copying %E byte "
1706 "from a string of length %E")
1707 : G_("%G%qD output truncated copying %E bytes "
1708 "from a string of length %E")),
1709 call, fndecl, len, slen);
1711 else if (tree_int_cst_equal (len, slen))
1713 tree fndecl = gimple_call_fndecl (stmt);
1714 gcall *call = as_a <gcall *> (stmt);
1716 warning_at (loc, OPT_Wstringop_truncation,
1717 (tree_int_cst_equal (size_one_node, len)
1718 ? G_("%G%qD output truncated before terminating nul "
1719 "copying %E byte from a string of the same "
1720 "length")
1721 : G_("%G%qD output truncated before terminating nul "
1722 "copying %E bytes from a string of the same "
1723 "length")),
1724 call, fndecl, len);
1728 /* OK transform into builtin memcpy. */
1729 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1730 if (!fn)
1731 return false;
1733 len = fold_convert_loc (loc, size_type_node, len);
1734 len = force_gimple_operand_gsi (gsi, len, true,
1735 NULL_TREE, true, GSI_SAME_STMT);
1736 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1737 replace_call_with_call_and_fold (gsi, repl);
1739 return true;
1742 /* Fold function call to builtin strchr or strrchr.
1743 If both arguments are constant, evaluate and fold the result,
1744 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1745 In general strlen is significantly faster than strchr
1746 due to being a simpler operation. */
1747 static bool
1748 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1750 gimple *stmt = gsi_stmt (*gsi);
1751 tree str = gimple_call_arg (stmt, 0);
1752 tree c = gimple_call_arg (stmt, 1);
1753 location_t loc = gimple_location (stmt);
1754 const char *p;
1755 char ch;
1757 if (!gimple_call_lhs (stmt))
1758 return false;
1760 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1762 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1764 if (p1 == NULL)
1766 replace_call_with_value (gsi, integer_zero_node);
1767 return true;
1770 tree len = build_int_cst (size_type_node, p1 - p);
1771 gimple_seq stmts = NULL;
1772 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1773 POINTER_PLUS_EXPR, str, len);
1774 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1775 gsi_replace_with_seq_vops (gsi, stmts);
1776 return true;
1779 if (!integer_zerop (c))
1780 return false;
1782 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1783 if (is_strrchr && optimize_function_for_size_p (cfun))
1785 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1787 if (strchr_fn)
1789 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1790 replace_call_with_call_and_fold (gsi, repl);
1791 return true;
1794 return false;
1797 tree len;
1798 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1800 if (!strlen_fn)
1801 return false;
1803 /* Create newstr = strlen (str). */
1804 gimple_seq stmts = NULL;
1805 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1806 gimple_set_location (new_stmt, loc);
1807 len = create_tmp_reg_or_ssa_name (size_type_node);
1808 gimple_call_set_lhs (new_stmt, len);
1809 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1811 /* Create (str p+ strlen (str)). */
1812 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1813 POINTER_PLUS_EXPR, str, len);
1814 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1815 gsi_replace_with_seq_vops (gsi, stmts);
1816 /* gsi now points at the assignment to the lhs, get a
1817 stmt iterator to the strlen.
1818 ??? We can't use gsi_for_stmt as that doesn't work when the
1819 CFG isn't built yet. */
1820 gimple_stmt_iterator gsi2 = *gsi;
1821 gsi_prev (&gsi2);
1822 fold_stmt (&gsi2);
1823 return true;
1826 /* Fold function call to builtin strstr.
1827 If both arguments are constant, evaluate and fold the result,
1828 additionally fold strstr (x, "") into x and strstr (x, "c")
1829 into strchr (x, 'c'). */
1830 static bool
1831 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1833 gimple *stmt = gsi_stmt (*gsi);
1834 tree haystack = gimple_call_arg (stmt, 0);
1835 tree needle = gimple_call_arg (stmt, 1);
1836 const char *p, *q;
1838 if (!gimple_call_lhs (stmt))
1839 return false;
1841 q = c_getstr (needle);
1842 if (q == NULL)
1843 return false;
1845 if ((p = c_getstr (haystack)))
1847 const char *r = strstr (p, q);
1849 if (r == NULL)
1851 replace_call_with_value (gsi, integer_zero_node);
1852 return true;
1855 tree len = build_int_cst (size_type_node, r - p);
1856 gimple_seq stmts = NULL;
1857 gimple *new_stmt
1858 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1859 haystack, len);
1860 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1861 gsi_replace_with_seq_vops (gsi, stmts);
1862 return true;
1865 /* For strstr (x, "") return x. */
1866 if (q[0] == '\0')
1868 replace_call_with_value (gsi, haystack);
1869 return true;
1872 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1873 if (q[1] == '\0')
1875 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1876 if (strchr_fn)
1878 tree c = build_int_cst (integer_type_node, q[0]);
1879 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1880 replace_call_with_call_and_fold (gsi, repl);
1881 return true;
1885 return false;
1888 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1889 to the call.
1891 Return NULL_TREE if no simplification was possible, otherwise return the
1892 simplified form of the call as a tree.
1894 The simplified form may be a constant or other expression which
1895 computes the same value, but in a more efficient manner (including
1896 calls to other builtin functions).
1898 The call may contain arguments which need to be evaluated, but
1899 which are not useful to determine the result of the call. In
1900 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1901 COMPOUND_EXPR will be an argument which must be evaluated.
1902 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1903 COMPOUND_EXPR in the chain will contain the tree for the simplified
1904 form of the builtin function call. */
1906 static bool
1907 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1909 gimple *stmt = gsi_stmt (*gsi);
1910 location_t loc = gimple_location (stmt);
1912 const char *p = c_getstr (src);
1914 /* If the string length is zero, return the dst parameter. */
1915 if (p && *p == '\0')
1917 replace_call_with_value (gsi, dst);
1918 return true;
1921 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1922 return false;
1924 /* See if we can store by pieces into (dst + strlen(dst)). */
1925 tree newdst;
1926 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1927 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1929 if (!strlen_fn || !memcpy_fn)
1930 return false;
1932 /* If the length of the source string isn't computable don't
1933 split strcat into strlen and memcpy. */
1934 tree len = get_maxval_strlen (src, 0);
1935 if (! len)
1936 return false;
1938 /* Create strlen (dst). */
1939 gimple_seq stmts = NULL, stmts2;
1940 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1941 gimple_set_location (repl, loc);
1942 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1943 gimple_call_set_lhs (repl, newdst);
1944 gimple_seq_add_stmt_without_update (&stmts, repl);
1946 /* Create (dst p+ strlen (dst)). */
1947 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1948 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1949 gimple_seq_add_seq_without_update (&stmts, stmts2);
1951 len = fold_convert_loc (loc, size_type_node, len);
1952 len = size_binop_loc (loc, PLUS_EXPR, len,
1953 build_int_cst (size_type_node, 1));
1954 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1955 gimple_seq_add_seq_without_update (&stmts, stmts2);
1957 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1958 gimple_seq_add_stmt_without_update (&stmts, repl);
1959 if (gimple_call_lhs (stmt))
1961 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1962 gimple_seq_add_stmt_without_update (&stmts, repl);
1963 gsi_replace_with_seq_vops (gsi, stmts);
1964 /* gsi now points at the assignment to the lhs, get a
1965 stmt iterator to the memcpy call.
1966 ??? We can't use gsi_for_stmt as that doesn't work when the
1967 CFG isn't built yet. */
1968 gimple_stmt_iterator gsi2 = *gsi;
1969 gsi_prev (&gsi2);
1970 fold_stmt (&gsi2);
1972 else
1974 gsi_replace_with_seq_vops (gsi, stmts);
1975 fold_stmt (gsi);
1977 return true;
1980 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1981 are the arguments to the call. */
1983 static bool
1984 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1986 gimple *stmt = gsi_stmt (*gsi);
1987 tree dest = gimple_call_arg (stmt, 0);
1988 tree src = gimple_call_arg (stmt, 1);
1989 tree size = gimple_call_arg (stmt, 2);
1990 tree fn;
1991 const char *p;
1994 p = c_getstr (src);
1995 /* If the SRC parameter is "", return DEST. */
1996 if (p && *p == '\0')
1998 replace_call_with_value (gsi, dest);
1999 return true;
2002 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2003 return false;
2005 /* If __builtin_strcat_chk is used, assume strcat is available. */
2006 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2007 if (!fn)
2008 return false;
2010 gimple *repl = gimple_build_call (fn, 2, dest, src);
2011 replace_call_with_call_and_fold (gsi, repl);
2012 return true;
2015 /* Simplify a call to the strncat builtin. */
2017 static bool
2018 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2020 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2021 tree dst = gimple_call_arg (stmt, 0);
2022 tree src = gimple_call_arg (stmt, 1);
2023 tree len = gimple_call_arg (stmt, 2);
2025 const char *p = c_getstr (src);
2027 /* If the requested length is zero, or the src parameter string
2028 length is zero, return the dst parameter. */
2029 if (integer_zerop (len) || (p && *p == '\0'))
2031 replace_call_with_value (gsi, dst);
2032 return true;
2035 if (TREE_CODE (len) != INTEGER_CST || !p)
2036 return false;
2038 unsigned srclen = strlen (p);
2040 int cmpsrc = compare_tree_int (len, srclen);
2042 /* Return early if the requested len is less than the string length.
2043 Warnings will be issued elsewhere later. */
2044 if (cmpsrc < 0)
2045 return false;
2047 unsigned HOST_WIDE_INT dstsize;
2049 bool nowarn = gimple_no_warning_p (stmt);
2051 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2053 int cmpdst = compare_tree_int (len, dstsize);
2055 if (cmpdst >= 0)
2057 tree fndecl = gimple_call_fndecl (stmt);
2059 /* Strncat copies (at most) LEN bytes and always appends
2060 the terminating NUL so the specified bound should never
2061 be equal to (or greater than) the size of the destination.
2062 If it is, the copy could overflow. */
2063 location_t loc = gimple_location (stmt);
2064 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2065 cmpdst == 0
2066 ? G_("%G%qD specified bound %E equals "
2067 "destination size")
2068 : G_("%G%qD specified bound %E exceeds "
2069 "destination size %wu"),
2070 stmt, fndecl, len, dstsize);
2071 if (nowarn)
2072 gimple_set_no_warning (stmt, true);
2076 if (!nowarn && cmpsrc == 0)
2078 tree fndecl = gimple_call_fndecl (stmt);
2080 /* To avoid certain truncation the specified bound should also
2081 not be equal to (or less than) the length of the source. */
2082 location_t loc = gimple_location (stmt);
2083 if (warning_at (loc, OPT_Wstringop_overflow_,
2084 "%G%qD specified bound %E equals source length",
2085 stmt, fndecl, len))
2086 gimple_set_no_warning (stmt, true);
2089 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2091 /* If the replacement _DECL isn't initialized, don't do the
2092 transformation. */
2093 if (!fn)
2094 return false;
2096 /* Otherwise, emit a call to strcat. */
2097 gcall *repl = gimple_build_call (fn, 2, dst, src);
2098 replace_call_with_call_and_fold (gsi, repl);
2099 return true;
2102 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2103 LEN, and SIZE. */
2105 static bool
2106 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2108 gimple *stmt = gsi_stmt (*gsi);
2109 tree dest = gimple_call_arg (stmt, 0);
2110 tree src = gimple_call_arg (stmt, 1);
2111 tree len = gimple_call_arg (stmt, 2);
2112 tree size = gimple_call_arg (stmt, 3);
2113 tree fn;
2114 const char *p;
2116 p = c_getstr (src);
2117 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2118 if ((p && *p == '\0')
2119 || integer_zerop (len))
2121 replace_call_with_value (gsi, dest);
2122 return true;
2125 if (! tree_fits_uhwi_p (size))
2126 return false;
2128 if (! integer_all_onesp (size))
2130 tree src_len = c_strlen (src, 1);
2131 if (src_len
2132 && tree_fits_uhwi_p (src_len)
2133 && tree_fits_uhwi_p (len)
2134 && ! tree_int_cst_lt (len, src_len))
2136 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2137 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2138 if (!fn)
2139 return false;
2141 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2142 replace_call_with_call_and_fold (gsi, repl);
2143 return true;
2145 return false;
2148 /* If __builtin_strncat_chk is used, assume strncat is available. */
2149 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2150 if (!fn)
2151 return false;
2153 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2154 replace_call_with_call_and_fold (gsi, repl);
2155 return true;
2158 /* Build and append gimple statements to STMTS that would load a first
2159 character of a memory location identified by STR. LOC is location
2160 of the statement. */
2162 static tree
2163 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2165 tree var;
2167 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2168 tree cst_uchar_ptr_node
2169 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2170 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2172 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2173 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2174 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2176 gimple_assign_set_lhs (stmt, var);
2177 gimple_seq_add_stmt_without_update (stmts, stmt);
2179 return var;
2182 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2183 FCODE is the name of the builtin. */
2185 static bool
2186 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2188 gimple *stmt = gsi_stmt (*gsi);
2189 tree callee = gimple_call_fndecl (stmt);
2190 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2192 tree type = integer_type_node;
2193 tree str1 = gimple_call_arg (stmt, 0);
2194 tree str2 = gimple_call_arg (stmt, 1);
2195 tree lhs = gimple_call_lhs (stmt);
2196 HOST_WIDE_INT length = -1;
2198 /* Handle strncmp and strncasecmp functions. */
2199 if (gimple_call_num_args (stmt) == 3)
2201 tree len = gimple_call_arg (stmt, 2);
2202 if (tree_fits_uhwi_p (len))
2203 length = tree_to_uhwi (len);
2206 /* If the LEN parameter is zero, return zero. */
2207 if (length == 0)
2209 replace_call_with_value (gsi, integer_zero_node);
2210 return true;
2213 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2214 if (operand_equal_p (str1, str2, 0))
2216 replace_call_with_value (gsi, integer_zero_node);
2217 return true;
2220 const char *p1 = c_getstr (str1);
2221 const char *p2 = c_getstr (str2);
2223 /* For known strings, return an immediate value. */
2224 if (p1 && p2)
2226 int r = 0;
2227 bool known_result = false;
2229 switch (fcode)
2231 case BUILT_IN_STRCMP:
2233 r = strcmp (p1, p2);
2234 known_result = true;
2235 break;
2237 case BUILT_IN_STRNCMP:
2239 if (length == -1)
2240 break;
2241 r = strncmp (p1, p2, length);
2242 known_result = true;
2243 break;
2245 /* Only handleable situation is where the string are equal (result 0),
2246 which is already handled by operand_equal_p case. */
2247 case BUILT_IN_STRCASECMP:
2248 break;
2249 case BUILT_IN_STRNCASECMP:
2251 if (length == -1)
2252 break;
2253 r = strncmp (p1, p2, length);
2254 if (r == 0)
2255 known_result = true;
2256 break;
2258 default:
2259 gcc_unreachable ();
2262 if (known_result)
2264 replace_call_with_value (gsi, build_cmp_result (type, r));
2265 return true;
2269 bool nonzero_length = length >= 1
2270 || fcode == BUILT_IN_STRCMP
2271 || fcode == BUILT_IN_STRCASECMP;
2273 location_t loc = gimple_location (stmt);
2275 /* If the second arg is "", return *(const unsigned char*)arg1. */
2276 if (p2 && *p2 == '\0' && nonzero_length)
2278 gimple_seq stmts = NULL;
2279 tree var = gimple_load_first_char (loc, str1, &stmts);
2280 if (lhs)
2282 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2283 gimple_seq_add_stmt_without_update (&stmts, stmt);
2286 gsi_replace_with_seq_vops (gsi, stmts);
2287 return true;
2290 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2291 if (p1 && *p1 == '\0' && nonzero_length)
2293 gimple_seq stmts = NULL;
2294 tree var = gimple_load_first_char (loc, str2, &stmts);
2296 if (lhs)
2298 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2299 stmt = gimple_build_assign (c, NOP_EXPR, var);
2300 gimple_seq_add_stmt_without_update (&stmts, stmt);
2302 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2303 gimple_seq_add_stmt_without_update (&stmts, stmt);
2306 gsi_replace_with_seq_vops (gsi, stmts);
2307 return true;
2310 /* If len parameter is one, return an expression corresponding to
2311 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2312 if (fcode == BUILT_IN_STRNCMP && length == 1)
2314 gimple_seq stmts = NULL;
2315 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2316 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2318 if (lhs)
2320 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2321 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2322 gimple_seq_add_stmt_without_update (&stmts, convert1);
2324 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2325 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2326 gimple_seq_add_stmt_without_update (&stmts, convert2);
2328 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2329 gimple_seq_add_stmt_without_update (&stmts, stmt);
2332 gsi_replace_with_seq_vops (gsi, stmts);
2333 return true;
2336 /* If length is larger than the length of one constant string,
2337 replace strncmp with corresponding strcmp */
2338 if (fcode == BUILT_IN_STRNCMP
2339 && length > 0
2340 && ((p2 && (size_t) length > strlen (p2))
2341 || (p1 && (size_t) length > strlen (p1))))
2343 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2344 if (!fn)
2345 return false;
2346 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2347 replace_call_with_call_and_fold (gsi, repl);
2348 return true;
2351 return false;
2354 /* Fold a call to the memchr pointed by GSI iterator. */
2356 static bool
2357 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2359 gimple *stmt = gsi_stmt (*gsi);
2360 tree lhs = gimple_call_lhs (stmt);
2361 tree arg1 = gimple_call_arg (stmt, 0);
2362 tree arg2 = gimple_call_arg (stmt, 1);
2363 tree len = gimple_call_arg (stmt, 2);
2365 /* If the LEN parameter is zero, return zero. */
2366 if (integer_zerop (len))
2368 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2369 return true;
2372 char c;
2373 if (TREE_CODE (arg2) != INTEGER_CST
2374 || !tree_fits_uhwi_p (len)
2375 || !target_char_cst_p (arg2, &c))
2376 return false;
2378 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2379 unsigned HOST_WIDE_INT string_length;
2380 const char *p1 = c_getstr (arg1, &string_length);
2382 if (p1)
2384 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2385 if (r == NULL)
2387 if (length <= string_length)
2389 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2390 return true;
2393 else
2395 unsigned HOST_WIDE_INT offset = r - p1;
2396 gimple_seq stmts = NULL;
2397 if (lhs != NULL_TREE)
2399 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2400 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2401 arg1, offset_cst);
2402 gimple_seq_add_stmt_without_update (&stmts, stmt);
2404 else
2405 gimple_seq_add_stmt_without_update (&stmts,
2406 gimple_build_nop ());
2408 gsi_replace_with_seq_vops (gsi, stmts);
2409 return true;
2413 return false;
2416 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2417 to the call. IGNORE is true if the value returned
2418 by the builtin will be ignored. UNLOCKED is true is true if this
2419 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2420 the known length of the string. Return NULL_TREE if no simplification
2421 was possible. */
2423 static bool
2424 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2425 tree arg0, tree arg1,
2426 bool unlocked)
2428 gimple *stmt = gsi_stmt (*gsi);
2430 /* If we're using an unlocked function, assume the other unlocked
2431 functions exist explicitly. */
2432 tree const fn_fputc = (unlocked
2433 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2434 : builtin_decl_implicit (BUILT_IN_FPUTC));
2435 tree const fn_fwrite = (unlocked
2436 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2437 : builtin_decl_implicit (BUILT_IN_FWRITE));
2439 /* If the return value is used, don't do the transformation. */
2440 if (gimple_call_lhs (stmt))
2441 return false;
2443 /* Get the length of the string passed to fputs. If the length
2444 can't be determined, punt. */
2445 tree len = get_maxval_strlen (arg0, 0);
2446 if (!len
2447 || TREE_CODE (len) != INTEGER_CST)
2448 return false;
2450 switch (compare_tree_int (len, 1))
2452 case -1: /* length is 0, delete the call entirely . */
2453 replace_call_with_value (gsi, integer_zero_node);
2454 return true;
2456 case 0: /* length is 1, call fputc. */
2458 const char *p = c_getstr (arg0);
2459 if (p != NULL)
2461 if (!fn_fputc)
2462 return false;
2464 gimple *repl = gimple_build_call (fn_fputc, 2,
2465 build_int_cst
2466 (integer_type_node, p[0]), arg1);
2467 replace_call_with_call_and_fold (gsi, repl);
2468 return true;
2471 /* FALLTHROUGH */
2472 case 1: /* length is greater than 1, call fwrite. */
2474 /* If optimizing for size keep fputs. */
2475 if (optimize_function_for_size_p (cfun))
2476 return false;
2477 /* New argument list transforming fputs(string, stream) to
2478 fwrite(string, 1, len, stream). */
2479 if (!fn_fwrite)
2480 return false;
2482 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2483 size_one_node, len, arg1);
2484 replace_call_with_call_and_fold (gsi, repl);
2485 return true;
2487 default:
2488 gcc_unreachable ();
2490 return false;
2493 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2494 DEST, SRC, LEN, and SIZE are the arguments to the call.
2495 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2496 code of the builtin. If MAXLEN is not NULL, it is maximum length
2497 passed as third argument. */
2499 static bool
2500 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2501 tree dest, tree src, tree len, tree size,
2502 enum built_in_function fcode)
2504 gimple *stmt = gsi_stmt (*gsi);
2505 location_t loc = gimple_location (stmt);
2506 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2507 tree fn;
2509 /* If SRC and DEST are the same (and not volatile), return DEST
2510 (resp. DEST+LEN for __mempcpy_chk). */
2511 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2513 if (fcode != BUILT_IN_MEMMOVE && fcode != BUILT_IN_MEMMOVE_CHK)
2515 tree func = gimple_call_fndecl (stmt);
2517 warning_at (loc, OPT_Wrestrict,
2518 "%qD source argument is the same as destination",
2519 func);
2522 if (fcode != BUILT_IN_MEMPCPY_CHK)
2524 replace_call_with_value (gsi, dest);
2525 return true;
2527 else
2529 gimple_seq stmts = NULL;
2530 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2531 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2532 TREE_TYPE (dest), dest, len);
2533 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2534 replace_call_with_value (gsi, temp);
2535 return true;
2539 if (! tree_fits_uhwi_p (size))
2540 return false;
2542 tree maxlen = get_maxval_strlen (len, 2);
2543 if (! integer_all_onesp (size))
2545 if (! tree_fits_uhwi_p (len))
2547 /* If LEN is not constant, try MAXLEN too.
2548 For MAXLEN only allow optimizing into non-_ocs function
2549 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2550 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2552 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2554 /* (void) __mempcpy_chk () can be optimized into
2555 (void) __memcpy_chk (). */
2556 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2557 if (!fn)
2558 return false;
2560 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2561 replace_call_with_call_and_fold (gsi, repl);
2562 return true;
2564 return false;
2567 else
2568 maxlen = len;
2570 if (tree_int_cst_lt (size, maxlen))
2571 return false;
2574 fn = NULL_TREE;
2575 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2576 mem{cpy,pcpy,move,set} is available. */
2577 switch (fcode)
2579 case BUILT_IN_MEMCPY_CHK:
2580 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2581 break;
2582 case BUILT_IN_MEMPCPY_CHK:
2583 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2584 break;
2585 case BUILT_IN_MEMMOVE_CHK:
2586 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2587 break;
2588 case BUILT_IN_MEMSET_CHK:
2589 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2590 break;
2591 default:
2592 break;
2595 if (!fn)
2596 return false;
2598 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2599 replace_call_with_call_and_fold (gsi, repl);
2600 return true;
2603 /* Fold a call to the __st[rp]cpy_chk builtin.
2604 DEST, SRC, and SIZE are the arguments to the call.
2605 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2606 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2607 strings passed as second argument. */
2609 static bool
2610 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2611 tree dest,
2612 tree src, tree size,
2613 enum built_in_function fcode)
2615 gimple *stmt = gsi_stmt (*gsi);
2616 location_t loc = gimple_location (stmt);
2617 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2618 tree len, fn;
2620 /* If SRC and DEST are the same (and not volatile), return DEST. */
2621 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2623 tree func = gimple_call_fndecl (stmt);
2625 warning_at (loc, OPT_Wrestrict,
2626 "%qD source argument is the same as destination",
2627 func);
2629 replace_call_with_value (gsi, dest);
2630 return true;
2633 if (! tree_fits_uhwi_p (size))
2634 return false;
2636 tree maxlen = get_maxval_strlen (src, 1);
2637 if (! integer_all_onesp (size))
2639 len = c_strlen (src, 1);
2640 if (! len || ! tree_fits_uhwi_p (len))
2642 /* If LEN is not constant, try MAXLEN too.
2643 For MAXLEN only allow optimizing into non-_ocs function
2644 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2645 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2647 if (fcode == BUILT_IN_STPCPY_CHK)
2649 if (! ignore)
2650 return false;
2652 /* If return value of __stpcpy_chk is ignored,
2653 optimize into __strcpy_chk. */
2654 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2655 if (!fn)
2656 return false;
2658 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2659 replace_call_with_call_and_fold (gsi, repl);
2660 return true;
2663 if (! len || TREE_SIDE_EFFECTS (len))
2664 return false;
2666 /* If c_strlen returned something, but not a constant,
2667 transform __strcpy_chk into __memcpy_chk. */
2668 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2669 if (!fn)
2670 return false;
2672 gimple_seq stmts = NULL;
2673 len = gimple_convert (&stmts, loc, size_type_node, len);
2674 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2675 build_int_cst (size_type_node, 1));
2676 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2677 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2678 replace_call_with_call_and_fold (gsi, repl);
2679 return true;
2682 else
2683 maxlen = len;
2685 if (! tree_int_cst_lt (maxlen, size))
2686 return false;
2689 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2690 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2691 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2692 if (!fn)
2693 return false;
2695 gimple *repl = gimple_build_call (fn, 2, dest, src);
2696 replace_call_with_call_and_fold (gsi, repl);
2697 return true;
2700 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2701 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2702 length passed as third argument. IGNORE is true if return value can be
2703 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2705 static bool
2706 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2707 tree dest, tree src,
2708 tree len, tree size,
2709 enum built_in_function fcode)
2711 gimple *stmt = gsi_stmt (*gsi);
2712 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2713 tree fn;
2715 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2717 /* If return value of __stpncpy_chk is ignored,
2718 optimize into __strncpy_chk. */
2719 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2720 if (fn)
2722 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2723 replace_call_with_call_and_fold (gsi, repl);
2724 return true;
2728 if (! tree_fits_uhwi_p (size))
2729 return false;
2731 tree maxlen = get_maxval_strlen (len, 2);
2732 if (! integer_all_onesp (size))
2734 if (! tree_fits_uhwi_p (len))
2736 /* If LEN is not constant, try MAXLEN too.
2737 For MAXLEN only allow optimizing into non-_ocs function
2738 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2739 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2740 return false;
2742 else
2743 maxlen = len;
2745 if (tree_int_cst_lt (size, maxlen))
2746 return false;
2749 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2750 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2751 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2752 if (!fn)
2753 return false;
2755 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2756 replace_call_with_call_and_fold (gsi, repl);
2757 return true;
2760 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2761 Return NULL_TREE if no simplification can be made. */
2763 static bool
2764 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2766 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2767 location_t loc = gimple_location (stmt);
2768 tree dest = gimple_call_arg (stmt, 0);
2769 tree src = gimple_call_arg (stmt, 1);
2770 tree fn, len, lenp1;
2772 /* If the result is unused, replace stpcpy with strcpy. */
2773 if (gimple_call_lhs (stmt) == NULL_TREE)
2775 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2776 if (!fn)
2777 return false;
2778 gimple_call_set_fndecl (stmt, fn);
2779 fold_stmt (gsi);
2780 return true;
2783 len = c_strlen (src, 1);
2784 if (!len
2785 || TREE_CODE (len) != INTEGER_CST)
2786 return false;
2788 if (optimize_function_for_size_p (cfun)
2789 /* If length is zero it's small enough. */
2790 && !integer_zerop (len))
2791 return false;
2793 /* If the source has a known length replace stpcpy with memcpy. */
2794 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2795 if (!fn)
2796 return false;
2798 gimple_seq stmts = NULL;
2799 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2800 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2801 tem, build_int_cst (size_type_node, 1));
2802 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2803 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2804 gimple_set_vuse (repl, gimple_vuse (stmt));
2805 gimple_set_vdef (repl, gimple_vdef (stmt));
2806 if (gimple_vdef (repl)
2807 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2808 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2809 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2810 /* Replace the result with dest + len. */
2811 stmts = NULL;
2812 tem = gimple_convert (&stmts, loc, sizetype, len);
2813 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2814 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2815 POINTER_PLUS_EXPR, dest, tem);
2816 gsi_replace (gsi, ret, false);
2817 /* Finally fold the memcpy call. */
2818 gimple_stmt_iterator gsi2 = *gsi;
2819 gsi_prev (&gsi2);
2820 fold_stmt (&gsi2);
2821 return true;
2824 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2825 NULL_TREE if a normal call should be emitted rather than expanding
2826 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2827 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2828 passed as second argument. */
2830 static bool
2831 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2832 enum built_in_function fcode)
2834 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2835 tree dest, size, len, fn, fmt, flag;
2836 const char *fmt_str;
2838 /* Verify the required arguments in the original call. */
2839 if (gimple_call_num_args (stmt) < 5)
2840 return false;
2842 dest = gimple_call_arg (stmt, 0);
2843 len = gimple_call_arg (stmt, 1);
2844 flag = gimple_call_arg (stmt, 2);
2845 size = gimple_call_arg (stmt, 3);
2846 fmt = gimple_call_arg (stmt, 4);
2848 if (! tree_fits_uhwi_p (size))
2849 return false;
2851 if (! integer_all_onesp (size))
2853 tree maxlen = get_maxval_strlen (len, 2);
2854 if (! tree_fits_uhwi_p (len))
2856 /* If LEN is not constant, try MAXLEN too.
2857 For MAXLEN only allow optimizing into non-_ocs function
2858 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2859 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2860 return false;
2862 else
2863 maxlen = len;
2865 if (tree_int_cst_lt (size, maxlen))
2866 return false;
2869 if (!init_target_chars ())
2870 return false;
2872 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2873 or if format doesn't contain % chars or is "%s". */
2874 if (! integer_zerop (flag))
2876 fmt_str = c_getstr (fmt);
2877 if (fmt_str == NULL)
2878 return false;
2879 if (strchr (fmt_str, target_percent) != NULL
2880 && strcmp (fmt_str, target_percent_s))
2881 return false;
2884 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2885 available. */
2886 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2887 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2888 if (!fn)
2889 return false;
2891 /* Replace the called function and the first 5 argument by 3 retaining
2892 trailing varargs. */
2893 gimple_call_set_fndecl (stmt, fn);
2894 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2895 gimple_call_set_arg (stmt, 0, dest);
2896 gimple_call_set_arg (stmt, 1, len);
2897 gimple_call_set_arg (stmt, 2, fmt);
2898 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2899 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2900 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2901 fold_stmt (gsi);
2902 return true;
2905 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2906 Return NULL_TREE if a normal call should be emitted rather than
2907 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2908 or BUILT_IN_VSPRINTF_CHK. */
2910 static bool
2911 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2912 enum built_in_function fcode)
2914 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2915 tree dest, size, len, fn, fmt, flag;
2916 const char *fmt_str;
2917 unsigned nargs = gimple_call_num_args (stmt);
2919 /* Verify the required arguments in the original call. */
2920 if (nargs < 4)
2921 return false;
2922 dest = gimple_call_arg (stmt, 0);
2923 flag = gimple_call_arg (stmt, 1);
2924 size = gimple_call_arg (stmt, 2);
2925 fmt = gimple_call_arg (stmt, 3);
2927 if (! tree_fits_uhwi_p (size))
2928 return false;
2930 len = NULL_TREE;
2932 if (!init_target_chars ())
2933 return false;
2935 /* Check whether the format is a literal string constant. */
2936 fmt_str = c_getstr (fmt);
2937 if (fmt_str != NULL)
2939 /* If the format doesn't contain % args or %%, we know the size. */
2940 if (strchr (fmt_str, target_percent) == 0)
2942 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2943 len = build_int_cstu (size_type_node, strlen (fmt_str));
2945 /* If the format is "%s" and first ... argument is a string literal,
2946 we know the size too. */
2947 else if (fcode == BUILT_IN_SPRINTF_CHK
2948 && strcmp (fmt_str, target_percent_s) == 0)
2950 tree arg;
2952 if (nargs == 5)
2954 arg = gimple_call_arg (stmt, 4);
2955 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2957 len = c_strlen (arg, 1);
2958 if (! len || ! tree_fits_uhwi_p (len))
2959 len = NULL_TREE;
2965 if (! integer_all_onesp (size))
2967 if (! len || ! tree_int_cst_lt (len, size))
2968 return false;
2971 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2972 or if format doesn't contain % chars or is "%s". */
2973 if (! integer_zerop (flag))
2975 if (fmt_str == NULL)
2976 return false;
2977 if (strchr (fmt_str, target_percent) != NULL
2978 && strcmp (fmt_str, target_percent_s))
2979 return false;
2982 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2983 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2984 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2985 if (!fn)
2986 return false;
2988 /* Replace the called function and the first 4 argument by 2 retaining
2989 trailing varargs. */
2990 gimple_call_set_fndecl (stmt, fn);
2991 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2992 gimple_call_set_arg (stmt, 0, dest);
2993 gimple_call_set_arg (stmt, 1, fmt);
2994 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2995 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2996 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2997 fold_stmt (gsi);
2998 return true;
3001 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3002 ORIG may be null if this is a 2-argument call. We don't attempt to
3003 simplify calls with more than 3 arguments.
3005 Return true if simplification was possible, otherwise false. */
3007 bool
3008 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3010 gimple *stmt = gsi_stmt (*gsi);
3011 tree dest = gimple_call_arg (stmt, 0);
3012 tree fmt = gimple_call_arg (stmt, 1);
3013 tree orig = NULL_TREE;
3014 const char *fmt_str = NULL;
3016 /* Verify the required arguments in the original call. We deal with two
3017 types of sprintf() calls: 'sprintf (str, fmt)' and
3018 'sprintf (dest, "%s", orig)'. */
3019 if (gimple_call_num_args (stmt) > 3)
3020 return false;
3022 if (gimple_call_num_args (stmt) == 3)
3023 orig = gimple_call_arg (stmt, 2);
3025 /* Check whether the format is a literal string constant. */
3026 fmt_str = c_getstr (fmt);
3027 if (fmt_str == NULL)
3028 return false;
3030 if (!init_target_chars ())
3031 return false;
3033 /* If the format doesn't contain % args or %%, use strcpy. */
3034 if (strchr (fmt_str, target_percent) == NULL)
3036 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3038 if (!fn)
3039 return false;
3041 /* Don't optimize sprintf (buf, "abc", ptr++). */
3042 if (orig)
3043 return false;
3045 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3046 'format' is known to contain no % formats. */
3047 gimple_seq stmts = NULL;
3048 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3049 gimple_seq_add_stmt_without_update (&stmts, repl);
3050 if (gimple_call_lhs (stmt))
3052 repl = gimple_build_assign (gimple_call_lhs (stmt),
3053 build_int_cst (integer_type_node,
3054 strlen (fmt_str)));
3055 gimple_seq_add_stmt_without_update (&stmts, repl);
3056 gsi_replace_with_seq_vops (gsi, stmts);
3057 /* gsi now points at the assignment to the lhs, get a
3058 stmt iterator to the memcpy call.
3059 ??? We can't use gsi_for_stmt as that doesn't work when the
3060 CFG isn't built yet. */
3061 gimple_stmt_iterator gsi2 = *gsi;
3062 gsi_prev (&gsi2);
3063 fold_stmt (&gsi2);
3065 else
3067 gsi_replace_with_seq_vops (gsi, stmts);
3068 fold_stmt (gsi);
3070 return true;
3073 /* If the format is "%s", use strcpy if the result isn't used. */
3074 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3076 tree fn;
3077 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3079 if (!fn)
3080 return false;
3082 /* Don't crash on sprintf (str1, "%s"). */
3083 if (!orig)
3084 return false;
3086 tree orig_len = NULL_TREE;
3087 if (gimple_call_lhs (stmt))
3089 orig_len = get_maxval_strlen (orig, 0);
3090 if (!orig_len)
3091 return false;
3094 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3095 gimple_seq stmts = NULL;
3096 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3097 gimple_seq_add_stmt_without_update (&stmts, repl);
3098 if (gimple_call_lhs (stmt))
3100 if (!useless_type_conversion_p (integer_type_node,
3101 TREE_TYPE (orig_len)))
3102 orig_len = fold_convert (integer_type_node, orig_len);
3103 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3104 gimple_seq_add_stmt_without_update (&stmts, repl);
3105 gsi_replace_with_seq_vops (gsi, stmts);
3106 /* gsi now points at the assignment to the lhs, get a
3107 stmt iterator to the memcpy call.
3108 ??? We can't use gsi_for_stmt as that doesn't work when the
3109 CFG isn't built yet. */
3110 gimple_stmt_iterator gsi2 = *gsi;
3111 gsi_prev (&gsi2);
3112 fold_stmt (&gsi2);
3114 else
3116 gsi_replace_with_seq_vops (gsi, stmts);
3117 fold_stmt (gsi);
3119 return true;
3121 return false;
3124 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3125 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3126 attempt to simplify calls with more than 4 arguments.
3128 Return true if simplification was possible, otherwise false. */
3130 bool
3131 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3133 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3134 tree dest = gimple_call_arg (stmt, 0);
3135 tree destsize = gimple_call_arg (stmt, 1);
3136 tree fmt = gimple_call_arg (stmt, 2);
3137 tree orig = NULL_TREE;
3138 const char *fmt_str = NULL;
3140 if (gimple_call_num_args (stmt) > 4)
3141 return false;
3143 if (gimple_call_num_args (stmt) == 4)
3144 orig = gimple_call_arg (stmt, 3);
3146 if (!tree_fits_uhwi_p (destsize))
3147 return false;
3148 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3150 /* Check whether the format is a literal string constant. */
3151 fmt_str = c_getstr (fmt);
3152 if (fmt_str == NULL)
3153 return false;
3155 if (!init_target_chars ())
3156 return false;
3158 /* If the format doesn't contain % args or %%, use strcpy. */
3159 if (strchr (fmt_str, target_percent) == NULL)
3161 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3162 if (!fn)
3163 return false;
3165 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3166 if (orig)
3167 return false;
3169 /* We could expand this as
3170 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3171 or to
3172 memcpy (str, fmt_with_nul_at_cstm1, cst);
3173 but in the former case that might increase code size
3174 and in the latter case grow .rodata section too much.
3175 So punt for now. */
3176 size_t len = strlen (fmt_str);
3177 if (len >= destlen)
3178 return false;
3180 gimple_seq stmts = NULL;
3181 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3182 gimple_seq_add_stmt_without_update (&stmts, repl);
3183 if (gimple_call_lhs (stmt))
3185 repl = gimple_build_assign (gimple_call_lhs (stmt),
3186 build_int_cst (integer_type_node, len));
3187 gimple_seq_add_stmt_without_update (&stmts, repl);
3188 gsi_replace_with_seq_vops (gsi, stmts);
3189 /* gsi now points at the assignment to the lhs, get a
3190 stmt iterator to the memcpy call.
3191 ??? We can't use gsi_for_stmt as that doesn't work when the
3192 CFG isn't built yet. */
3193 gimple_stmt_iterator gsi2 = *gsi;
3194 gsi_prev (&gsi2);
3195 fold_stmt (&gsi2);
3197 else
3199 gsi_replace_with_seq_vops (gsi, stmts);
3200 fold_stmt (gsi);
3202 return true;
3205 /* If the format is "%s", use strcpy if the result isn't used. */
3206 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3208 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3209 if (!fn)
3210 return false;
3212 /* Don't crash on snprintf (str1, cst, "%s"). */
3213 if (!orig)
3214 return false;
3216 tree orig_len = get_maxval_strlen (orig, 0);
3217 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3218 return false;
3220 /* We could expand this as
3221 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3222 or to
3223 memcpy (str1, str2_with_nul_at_cstm1, cst);
3224 but in the former case that might increase code size
3225 and in the latter case grow .rodata section too much.
3226 So punt for now. */
3227 if (compare_tree_int (orig_len, destlen) >= 0)
3228 return false;
3230 /* Convert snprintf (str1, cst, "%s", str2) into
3231 strcpy (str1, str2) if strlen (str2) < cst. */
3232 gimple_seq stmts = NULL;
3233 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3234 gimple_seq_add_stmt_without_update (&stmts, repl);
3235 if (gimple_call_lhs (stmt))
3237 if (!useless_type_conversion_p (integer_type_node,
3238 TREE_TYPE (orig_len)))
3239 orig_len = fold_convert (integer_type_node, orig_len);
3240 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3241 gimple_seq_add_stmt_without_update (&stmts, repl);
3242 gsi_replace_with_seq_vops (gsi, stmts);
3243 /* gsi now points at the assignment to the lhs, get a
3244 stmt iterator to the memcpy call.
3245 ??? We can't use gsi_for_stmt as that doesn't work when the
3246 CFG isn't built yet. */
3247 gimple_stmt_iterator gsi2 = *gsi;
3248 gsi_prev (&gsi2);
3249 fold_stmt (&gsi2);
3251 else
3253 gsi_replace_with_seq_vops (gsi, stmts);
3254 fold_stmt (gsi);
3256 return true;
3258 return false;
3261 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3262 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3263 more than 3 arguments, and ARG may be null in the 2-argument case.
3265 Return NULL_TREE if no simplification was possible, otherwise return the
3266 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3267 code of the function to be simplified. */
3269 static bool
3270 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3271 tree fp, tree fmt, tree arg,
3272 enum built_in_function fcode)
3274 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3275 tree fn_fputc, fn_fputs;
3276 const char *fmt_str = NULL;
3278 /* If the return value is used, don't do the transformation. */
3279 if (gimple_call_lhs (stmt) != NULL_TREE)
3280 return false;
3282 /* Check whether the format is a literal string constant. */
3283 fmt_str = c_getstr (fmt);
3284 if (fmt_str == NULL)
3285 return false;
3287 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3289 /* If we're using an unlocked function, assume the other
3290 unlocked functions exist explicitly. */
3291 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3292 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3294 else
3296 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3297 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3300 if (!init_target_chars ())
3301 return false;
3303 /* If the format doesn't contain % args or %%, use strcpy. */
3304 if (strchr (fmt_str, target_percent) == NULL)
3306 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3307 && arg)
3308 return false;
3310 /* If the format specifier was "", fprintf does nothing. */
3311 if (fmt_str[0] == '\0')
3313 replace_call_with_value (gsi, NULL_TREE);
3314 return true;
3317 /* When "string" doesn't contain %, replace all cases of
3318 fprintf (fp, string) with fputs (string, fp). The fputs
3319 builtin will take care of special cases like length == 1. */
3320 if (fn_fputs)
3322 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3323 replace_call_with_call_and_fold (gsi, repl);
3324 return true;
3328 /* The other optimizations can be done only on the non-va_list variants. */
3329 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3330 return false;
3332 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3333 else if (strcmp (fmt_str, target_percent_s) == 0)
3335 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3336 return false;
3337 if (fn_fputs)
3339 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3340 replace_call_with_call_and_fold (gsi, repl);
3341 return true;
3345 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3346 else if (strcmp (fmt_str, target_percent_c) == 0)
3348 if (!arg
3349 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3350 return false;
3351 if (fn_fputc)
3353 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3354 replace_call_with_call_and_fold (gsi, repl);
3355 return true;
3359 return false;
3362 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3363 FMT and ARG are the arguments to the call; we don't fold cases with
3364 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3366 Return NULL_TREE if no simplification was possible, otherwise return the
3367 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3368 code of the function to be simplified. */
3370 static bool
3371 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3372 tree arg, enum built_in_function fcode)
3374 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3375 tree fn_putchar, fn_puts, newarg;
3376 const char *fmt_str = NULL;
3378 /* If the return value is used, don't do the transformation. */
3379 if (gimple_call_lhs (stmt) != NULL_TREE)
3380 return false;
3382 /* Check whether the format is a literal string constant. */
3383 fmt_str = c_getstr (fmt);
3384 if (fmt_str == NULL)
3385 return false;
3387 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3389 /* If we're using an unlocked function, assume the other
3390 unlocked functions exist explicitly. */
3391 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3392 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3394 else
3396 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3397 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3400 if (!init_target_chars ())
3401 return false;
3403 if (strcmp (fmt_str, target_percent_s) == 0
3404 || strchr (fmt_str, target_percent) == NULL)
3406 const char *str;
3408 if (strcmp (fmt_str, target_percent_s) == 0)
3410 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3411 return false;
3413 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3414 return false;
3416 str = c_getstr (arg);
3417 if (str == NULL)
3418 return false;
3420 else
3422 /* The format specifier doesn't contain any '%' characters. */
3423 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3424 && arg)
3425 return false;
3426 str = fmt_str;
3429 /* If the string was "", printf does nothing. */
3430 if (str[0] == '\0')
3432 replace_call_with_value (gsi, NULL_TREE);
3433 return true;
3436 /* If the string has length of 1, call putchar. */
3437 if (str[1] == '\0')
3439 /* Given printf("c"), (where c is any one character,)
3440 convert "c"[0] to an int and pass that to the replacement
3441 function. */
3442 newarg = build_int_cst (integer_type_node, str[0]);
3443 if (fn_putchar)
3445 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3446 replace_call_with_call_and_fold (gsi, repl);
3447 return true;
3450 else
3452 /* If the string was "string\n", call puts("string"). */
3453 size_t len = strlen (str);
3454 if ((unsigned char)str[len - 1] == target_newline
3455 && (size_t) (int) len == len
3456 && (int) len > 0)
3458 char *newstr;
3459 tree offset_node, string_cst;
3461 /* Create a NUL-terminated string that's one char shorter
3462 than the original, stripping off the trailing '\n'. */
3463 newarg = build_string_literal (len, str);
3464 string_cst = string_constant (newarg, &offset_node);
3465 gcc_checking_assert (string_cst
3466 && (TREE_STRING_LENGTH (string_cst)
3467 == (int) len)
3468 && integer_zerop (offset_node)
3469 && (unsigned char)
3470 TREE_STRING_POINTER (string_cst)[len - 1]
3471 == target_newline);
3472 /* build_string_literal creates a new STRING_CST,
3473 modify it in place to avoid double copying. */
3474 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3475 newstr[len - 1] = '\0';
3476 if (fn_puts)
3478 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3479 replace_call_with_call_and_fold (gsi, repl);
3480 return true;
3483 else
3484 /* We'd like to arrange to call fputs(string,stdout) here,
3485 but we need stdout and don't have a way to get it yet. */
3486 return false;
3490 /* The other optimizations can be done only on the non-va_list variants. */
3491 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3492 return false;
3494 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3495 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3497 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3498 return false;
3499 if (fn_puts)
3501 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3502 replace_call_with_call_and_fold (gsi, repl);
3503 return true;
3507 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3508 else if (strcmp (fmt_str, target_percent_c) == 0)
3510 if (!arg || ! useless_type_conversion_p (integer_type_node,
3511 TREE_TYPE (arg)))
3512 return false;
3513 if (fn_putchar)
3515 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3516 replace_call_with_call_and_fold (gsi, repl);
3517 return true;
3521 return false;
3526 /* Fold a call to __builtin_strlen with known length LEN. */
3528 static bool
3529 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3531 gimple *stmt = gsi_stmt (*gsi);
3533 wide_int minlen;
3534 wide_int maxlen;
3536 tree lenrange[2];
3537 if (!get_range_strlen (gimple_call_arg (stmt, 0), lenrange)
3538 && lenrange[0] && TREE_CODE (lenrange[0]) == INTEGER_CST
3539 && lenrange[1] && TREE_CODE (lenrange[1]) == INTEGER_CST)
3541 /* The range of lengths refers to either a single constant
3542 string or to the longest and shortest constant string
3543 referenced by the argument of the strlen() call, or to
3544 the strings that can possibly be stored in the arrays
3545 the argument refers to. */
3546 minlen = wi::to_wide (lenrange[0]);
3547 maxlen = wi::to_wide (lenrange[1]);
3549 else
3551 unsigned prec = TYPE_PRECISION (sizetype);
3553 minlen = wi::shwi (0, prec);
3554 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3557 if (minlen == maxlen)
3559 lenrange[0] = force_gimple_operand_gsi (gsi, lenrange[0], true, NULL,
3560 true, GSI_SAME_STMT);
3561 replace_call_with_value (gsi, lenrange[0]);
3562 return true;
3565 tree lhs = gimple_call_lhs (stmt);
3566 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3567 set_range_info (lhs, VR_RANGE, minlen, maxlen);
3569 return false;
3572 /* Fold a call to __builtin_acc_on_device. */
3574 static bool
3575 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3577 /* Defer folding until we know which compiler we're in. */
3578 if (symtab->state != EXPANSION)
3579 return false;
3581 unsigned val_host = GOMP_DEVICE_HOST;
3582 unsigned val_dev = GOMP_DEVICE_NONE;
3584 #ifdef ACCEL_COMPILER
3585 val_host = GOMP_DEVICE_NOT_HOST;
3586 val_dev = ACCEL_COMPILER_acc_device;
3587 #endif
3589 location_t loc = gimple_location (gsi_stmt (*gsi));
3591 tree host_eq = make_ssa_name (boolean_type_node);
3592 gimple *host_ass = gimple_build_assign
3593 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3594 gimple_set_location (host_ass, loc);
3595 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3597 tree dev_eq = make_ssa_name (boolean_type_node);
3598 gimple *dev_ass = gimple_build_assign
3599 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3600 gimple_set_location (dev_ass, loc);
3601 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3603 tree result = make_ssa_name (boolean_type_node);
3604 gimple *result_ass = gimple_build_assign
3605 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3606 gimple_set_location (result_ass, loc);
3607 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3609 replace_call_with_value (gsi, result);
3611 return true;
3614 /* Fold realloc (0, n) -> malloc (n). */
3616 static bool
3617 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3619 gimple *stmt = gsi_stmt (*gsi);
3620 tree arg = gimple_call_arg (stmt, 0);
3621 tree size = gimple_call_arg (stmt, 1);
3623 if (operand_equal_p (arg, null_pointer_node, 0))
3625 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3626 if (fn_malloc)
3628 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3629 replace_call_with_call_and_fold (gsi, repl);
3630 return true;
3633 return false;
3636 /* Fold the non-target builtin at *GSI and return whether any simplification
3637 was made. */
3639 static bool
3640 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3642 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3643 tree callee = gimple_call_fndecl (stmt);
3645 /* Give up for always_inline inline builtins until they are
3646 inlined. */
3647 if (avoid_folding_inline_builtin (callee))
3648 return false;
3650 unsigned n = gimple_call_num_args (stmt);
3651 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3652 switch (fcode)
3654 case BUILT_IN_BCMP:
3655 return gimple_fold_builtin_bcmp (gsi);
3656 case BUILT_IN_BCOPY:
3657 return gimple_fold_builtin_bcopy (gsi);
3658 case BUILT_IN_BZERO:
3659 return gimple_fold_builtin_bzero (gsi);
3661 case BUILT_IN_MEMSET:
3662 return gimple_fold_builtin_memset (gsi,
3663 gimple_call_arg (stmt, 1),
3664 gimple_call_arg (stmt, 2));
3665 case BUILT_IN_MEMCPY:
3666 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3667 gimple_call_arg (stmt, 1), 0);
3668 case BUILT_IN_MEMPCPY:
3669 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3670 gimple_call_arg (stmt, 1), 1);
3671 case BUILT_IN_MEMMOVE:
3672 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3673 gimple_call_arg (stmt, 1), 3);
3674 case BUILT_IN_SPRINTF_CHK:
3675 case BUILT_IN_VSPRINTF_CHK:
3676 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3677 case BUILT_IN_STRCAT_CHK:
3678 return gimple_fold_builtin_strcat_chk (gsi);
3679 case BUILT_IN_STRNCAT_CHK:
3680 return gimple_fold_builtin_strncat_chk (gsi);
3681 case BUILT_IN_STRLEN:
3682 return gimple_fold_builtin_strlen (gsi);
3683 case BUILT_IN_STRCPY:
3684 return gimple_fold_builtin_strcpy (gsi,
3685 gimple_call_arg (stmt, 0),
3686 gimple_call_arg (stmt, 1));
3687 case BUILT_IN_STRNCPY:
3688 return gimple_fold_builtin_strncpy (gsi,
3689 gimple_call_arg (stmt, 0),
3690 gimple_call_arg (stmt, 1),
3691 gimple_call_arg (stmt, 2));
3692 case BUILT_IN_STRCAT:
3693 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3694 gimple_call_arg (stmt, 1));
3695 case BUILT_IN_STRNCAT:
3696 return gimple_fold_builtin_strncat (gsi);
3697 case BUILT_IN_INDEX:
3698 case BUILT_IN_STRCHR:
3699 return gimple_fold_builtin_strchr (gsi, false);
3700 case BUILT_IN_RINDEX:
3701 case BUILT_IN_STRRCHR:
3702 return gimple_fold_builtin_strchr (gsi, true);
3703 case BUILT_IN_STRSTR:
3704 return gimple_fold_builtin_strstr (gsi);
3705 case BUILT_IN_STRCMP:
3706 case BUILT_IN_STRCASECMP:
3707 case BUILT_IN_STRNCMP:
3708 case BUILT_IN_STRNCASECMP:
3709 return gimple_fold_builtin_string_compare (gsi);
3710 case BUILT_IN_MEMCHR:
3711 return gimple_fold_builtin_memchr (gsi);
3712 case BUILT_IN_FPUTS:
3713 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3714 gimple_call_arg (stmt, 1), false);
3715 case BUILT_IN_FPUTS_UNLOCKED:
3716 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3717 gimple_call_arg (stmt, 1), true);
3718 case BUILT_IN_MEMCPY_CHK:
3719 case BUILT_IN_MEMPCPY_CHK:
3720 case BUILT_IN_MEMMOVE_CHK:
3721 case BUILT_IN_MEMSET_CHK:
3722 return gimple_fold_builtin_memory_chk (gsi,
3723 gimple_call_arg (stmt, 0),
3724 gimple_call_arg (stmt, 1),
3725 gimple_call_arg (stmt, 2),
3726 gimple_call_arg (stmt, 3),
3727 fcode);
3728 case BUILT_IN_STPCPY:
3729 return gimple_fold_builtin_stpcpy (gsi);
3730 case BUILT_IN_STRCPY_CHK:
3731 case BUILT_IN_STPCPY_CHK:
3732 return gimple_fold_builtin_stxcpy_chk (gsi,
3733 gimple_call_arg (stmt, 0),
3734 gimple_call_arg (stmt, 1),
3735 gimple_call_arg (stmt, 2),
3736 fcode);
3737 case BUILT_IN_STRNCPY_CHK:
3738 case BUILT_IN_STPNCPY_CHK:
3739 return gimple_fold_builtin_stxncpy_chk (gsi,
3740 gimple_call_arg (stmt, 0),
3741 gimple_call_arg (stmt, 1),
3742 gimple_call_arg (stmt, 2),
3743 gimple_call_arg (stmt, 3),
3744 fcode);
3745 case BUILT_IN_SNPRINTF_CHK:
3746 case BUILT_IN_VSNPRINTF_CHK:
3747 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3749 case BUILT_IN_FPRINTF:
3750 case BUILT_IN_FPRINTF_UNLOCKED:
3751 case BUILT_IN_VFPRINTF:
3752 if (n == 2 || n == 3)
3753 return gimple_fold_builtin_fprintf (gsi,
3754 gimple_call_arg (stmt, 0),
3755 gimple_call_arg (stmt, 1),
3756 n == 3
3757 ? gimple_call_arg (stmt, 2)
3758 : NULL_TREE,
3759 fcode);
3760 break;
3761 case BUILT_IN_FPRINTF_CHK:
3762 case BUILT_IN_VFPRINTF_CHK:
3763 if (n == 3 || n == 4)
3764 return gimple_fold_builtin_fprintf (gsi,
3765 gimple_call_arg (stmt, 0),
3766 gimple_call_arg (stmt, 2),
3767 n == 4
3768 ? gimple_call_arg (stmt, 3)
3769 : NULL_TREE,
3770 fcode);
3771 break;
3772 case BUILT_IN_PRINTF:
3773 case BUILT_IN_PRINTF_UNLOCKED:
3774 case BUILT_IN_VPRINTF:
3775 if (n == 1 || n == 2)
3776 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3777 n == 2
3778 ? gimple_call_arg (stmt, 1)
3779 : NULL_TREE, fcode);
3780 break;
3781 case BUILT_IN_PRINTF_CHK:
3782 case BUILT_IN_VPRINTF_CHK:
3783 if (n == 2 || n == 3)
3784 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3785 n == 3
3786 ? gimple_call_arg (stmt, 2)
3787 : NULL_TREE, fcode);
3788 break;
3789 case BUILT_IN_ACC_ON_DEVICE:
3790 return gimple_fold_builtin_acc_on_device (gsi,
3791 gimple_call_arg (stmt, 0));
3792 case BUILT_IN_REALLOC:
3793 return gimple_fold_builtin_realloc (gsi);
3795 default:;
3798 /* Try the generic builtin folder. */
3799 bool ignore = (gimple_call_lhs (stmt) == NULL);
3800 tree result = fold_call_stmt (stmt, ignore);
3801 if (result)
3803 if (ignore)
3804 STRIP_NOPS (result);
3805 else
3806 result = fold_convert (gimple_call_return_type (stmt), result);
3807 if (!update_call_from_tree (gsi, result))
3808 gimplify_and_update_call_from_tree (gsi, result);
3809 return true;
3812 return false;
3815 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3816 function calls to constants, where possible. */
3818 static tree
3819 fold_internal_goacc_dim (const gimple *call)
3821 int axis = oacc_get_ifn_dim_arg (call);
3822 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3823 tree result = NULL_TREE;
3824 tree type = TREE_TYPE (gimple_call_lhs (call));
3826 switch (gimple_call_internal_fn (call))
3828 case IFN_GOACC_DIM_POS:
3829 /* If the size is 1, we know the answer. */
3830 if (size == 1)
3831 result = build_int_cst (type, 0);
3832 break;
3833 case IFN_GOACC_DIM_SIZE:
3834 /* If the size is not dynamic, we know the answer. */
3835 if (size)
3836 result = build_int_cst (type, size);
3837 break;
3838 default:
3839 break;
3842 return result;
3845 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3846 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3847 &var where var is only addressable because of such calls. */
3849 bool
3850 optimize_atomic_compare_exchange_p (gimple *stmt)
3852 if (gimple_call_num_args (stmt) != 6
3853 || !flag_inline_atomics
3854 || !optimize
3855 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3856 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3857 || !gimple_vdef (stmt)
3858 || !gimple_vuse (stmt))
3859 return false;
3861 tree fndecl = gimple_call_fndecl (stmt);
3862 switch (DECL_FUNCTION_CODE (fndecl))
3864 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3865 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3866 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3867 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3868 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3869 break;
3870 default:
3871 return false;
3874 tree expected = gimple_call_arg (stmt, 1);
3875 if (TREE_CODE (expected) != ADDR_EXPR
3876 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3877 return false;
3879 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3880 if (!is_gimple_reg_type (etype)
3881 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3882 || TREE_THIS_VOLATILE (etype)
3883 || VECTOR_TYPE_P (etype)
3884 || TREE_CODE (etype) == COMPLEX_TYPE
3885 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3886 might not preserve all the bits. See PR71716. */
3887 || SCALAR_FLOAT_TYPE_P (etype)
3888 || maybe_ne (TYPE_PRECISION (etype),
3889 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3890 return false;
3892 tree weak = gimple_call_arg (stmt, 3);
3893 if (!integer_zerop (weak) && !integer_onep (weak))
3894 return false;
3896 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3897 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3898 machine_mode mode = TYPE_MODE (itype);
3900 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3901 == CODE_FOR_nothing
3902 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3903 return false;
3905 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
3906 return false;
3908 return true;
3911 /* Fold
3912 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3913 into
3914 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3915 i = IMAGPART_EXPR <t>;
3916 r = (_Bool) i;
3917 e = REALPART_EXPR <t>; */
3919 void
3920 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3922 gimple *stmt = gsi_stmt (*gsi);
3923 tree fndecl = gimple_call_fndecl (stmt);
3924 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3925 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3926 tree ctype = build_complex_type (itype);
3927 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3928 bool throws = false;
3929 edge e = NULL;
3930 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3931 expected);
3932 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3933 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3934 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3936 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3937 build1 (VIEW_CONVERT_EXPR, itype,
3938 gimple_assign_lhs (g)));
3939 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3941 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3942 + int_size_in_bytes (itype);
3943 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3944 gimple_call_arg (stmt, 0),
3945 gimple_assign_lhs (g),
3946 gimple_call_arg (stmt, 2),
3947 build_int_cst (integer_type_node, flag),
3948 gimple_call_arg (stmt, 4),
3949 gimple_call_arg (stmt, 5));
3950 tree lhs = make_ssa_name (ctype);
3951 gimple_call_set_lhs (g, lhs);
3952 gimple_set_vdef (g, gimple_vdef (stmt));
3953 gimple_set_vuse (g, gimple_vuse (stmt));
3954 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3955 tree oldlhs = gimple_call_lhs (stmt);
3956 if (stmt_can_throw_internal (stmt))
3958 throws = true;
3959 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3961 gimple_call_set_nothrow (as_a <gcall *> (g),
3962 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3963 gimple_call_set_lhs (stmt, NULL_TREE);
3964 gsi_replace (gsi, g, true);
3965 if (oldlhs)
3967 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3968 build1 (IMAGPART_EXPR, itype, lhs));
3969 if (throws)
3971 gsi_insert_on_edge_immediate (e, g);
3972 *gsi = gsi_for_stmt (g);
3974 else
3975 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3976 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3977 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3979 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3980 build1 (REALPART_EXPR, itype, lhs));
3981 if (throws && oldlhs == NULL_TREE)
3983 gsi_insert_on_edge_immediate (e, g);
3984 *gsi = gsi_for_stmt (g);
3986 else
3987 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3988 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3990 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3991 VIEW_CONVERT_EXPR,
3992 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3993 gimple_assign_lhs (g)));
3994 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3996 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3997 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3998 *gsi = gsiret;
4001 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4002 doesn't fit into TYPE. The test for overflow should be regardless of
4003 -fwrapv, and even for unsigned types. */
4005 bool
4006 arith_overflowed_p (enum tree_code code, const_tree type,
4007 const_tree arg0, const_tree arg1)
4009 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
4010 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
4011 widest2_int_cst;
4012 widest2_int warg0 = widest2_int_cst (arg0);
4013 widest2_int warg1 = widest2_int_cst (arg1);
4014 widest2_int wres;
4015 switch (code)
4017 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4018 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4019 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4020 default: gcc_unreachable ();
4022 signop sign = TYPE_SIGN (type);
4023 if (sign == UNSIGNED && wi::neg_p (wres))
4024 return true;
4025 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4028 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4029 The statement may be replaced by another statement, e.g., if the call
4030 simplifies to a constant value. Return true if any changes were made.
4031 It is assumed that the operands have been previously folded. */
4033 static bool
4034 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4036 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4037 tree callee;
4038 bool changed = false;
4039 unsigned i;
4041 /* Fold *& in call arguments. */
4042 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4043 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4045 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4046 if (tmp)
4048 gimple_call_set_arg (stmt, i, tmp);
4049 changed = true;
4053 /* Check for virtual calls that became direct calls. */
4054 callee = gimple_call_fn (stmt);
4055 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4057 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4059 if (dump_file && virtual_method_call_p (callee)
4060 && !possible_polymorphic_call_target_p
4061 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4062 (OBJ_TYPE_REF_EXPR (callee)))))
4064 fprintf (dump_file,
4065 "Type inheritance inconsistent devirtualization of ");
4066 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4067 fprintf (dump_file, " to ");
4068 print_generic_expr (dump_file, callee, TDF_SLIM);
4069 fprintf (dump_file, "\n");
4072 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4073 changed = true;
4075 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4077 bool final;
4078 vec <cgraph_node *>targets
4079 = possible_polymorphic_call_targets (callee, stmt, &final);
4080 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4082 tree lhs = gimple_call_lhs (stmt);
4083 if (dump_enabled_p ())
4085 location_t loc = gimple_location_safe (stmt);
4086 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
4087 "folding virtual function call to %s\n",
4088 targets.length () == 1
4089 ? targets[0]->name ()
4090 : "__builtin_unreachable");
4092 if (targets.length () == 1)
4094 tree fndecl = targets[0]->decl;
4095 gimple_call_set_fndecl (stmt, fndecl);
4096 changed = true;
4097 /* If changing the call to __cxa_pure_virtual
4098 or similar noreturn function, adjust gimple_call_fntype
4099 too. */
4100 if (gimple_call_noreturn_p (stmt)
4101 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4102 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4103 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4104 == void_type_node))
4105 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4106 /* If the call becomes noreturn, remove the lhs. */
4107 if (lhs
4108 && gimple_call_noreturn_p (stmt)
4109 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4110 || should_remove_lhs_p (lhs)))
4112 if (TREE_CODE (lhs) == SSA_NAME)
4114 tree var = create_tmp_var (TREE_TYPE (lhs));
4115 tree def = get_or_create_ssa_default_def (cfun, var);
4116 gimple *new_stmt = gimple_build_assign (lhs, def);
4117 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4119 gimple_call_set_lhs (stmt, NULL_TREE);
4121 maybe_remove_unused_call_args (cfun, stmt);
4123 else
4125 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4126 gimple *new_stmt = gimple_build_call (fndecl, 0);
4127 gimple_set_location (new_stmt, gimple_location (stmt));
4128 /* If the call had a SSA name as lhs morph that into
4129 an uninitialized value. */
4130 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4132 tree var = create_tmp_var (TREE_TYPE (lhs));
4133 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4134 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4135 set_ssa_default_def (cfun, var, lhs);
4137 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4138 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4139 gsi_replace (gsi, new_stmt, false);
4140 return true;
4146 /* Check for indirect calls that became direct calls, and then
4147 no longer require a static chain. */
4148 if (gimple_call_chain (stmt))
4150 tree fn = gimple_call_fndecl (stmt);
4151 if (fn && !DECL_STATIC_CHAIN (fn))
4153 gimple_call_set_chain (stmt, NULL);
4154 changed = true;
4156 else
4158 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4159 if (tmp)
4161 gimple_call_set_chain (stmt, tmp);
4162 changed = true;
4167 if (inplace)
4168 return changed;
4170 /* Check for builtins that CCP can handle using information not
4171 available in the generic fold routines. */
4172 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4174 if (gimple_fold_builtin (gsi))
4175 changed = true;
4177 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4179 changed |= targetm.gimple_fold_builtin (gsi);
4181 else if (gimple_call_internal_p (stmt))
4183 enum tree_code subcode = ERROR_MARK;
4184 tree result = NULL_TREE;
4185 bool cplx_result = false;
4186 tree overflow = NULL_TREE;
4187 switch (gimple_call_internal_fn (stmt))
4189 case IFN_BUILTIN_EXPECT:
4190 result = fold_builtin_expect (gimple_location (stmt),
4191 gimple_call_arg (stmt, 0),
4192 gimple_call_arg (stmt, 1),
4193 gimple_call_arg (stmt, 2));
4194 break;
4195 case IFN_UBSAN_OBJECT_SIZE:
4197 tree offset = gimple_call_arg (stmt, 1);
4198 tree objsize = gimple_call_arg (stmt, 2);
4199 if (integer_all_onesp (objsize)
4200 || (TREE_CODE (offset) == INTEGER_CST
4201 && TREE_CODE (objsize) == INTEGER_CST
4202 && tree_int_cst_le (offset, objsize)))
4204 replace_call_with_value (gsi, NULL_TREE);
4205 return true;
4208 break;
4209 case IFN_UBSAN_PTR:
4210 if (integer_zerop (gimple_call_arg (stmt, 1)))
4212 replace_call_with_value (gsi, NULL_TREE);
4213 return true;
4215 break;
4216 case IFN_UBSAN_BOUNDS:
4218 tree index = gimple_call_arg (stmt, 1);
4219 tree bound = gimple_call_arg (stmt, 2);
4220 if (TREE_CODE (index) == INTEGER_CST
4221 && TREE_CODE (bound) == INTEGER_CST)
4223 index = fold_convert (TREE_TYPE (bound), index);
4224 if (TREE_CODE (index) == INTEGER_CST
4225 && tree_int_cst_le (index, bound))
4227 replace_call_with_value (gsi, NULL_TREE);
4228 return true;
4232 break;
4233 case IFN_GOACC_DIM_SIZE:
4234 case IFN_GOACC_DIM_POS:
4235 result = fold_internal_goacc_dim (stmt);
4236 break;
4237 case IFN_UBSAN_CHECK_ADD:
4238 subcode = PLUS_EXPR;
4239 break;
4240 case IFN_UBSAN_CHECK_SUB:
4241 subcode = MINUS_EXPR;
4242 break;
4243 case IFN_UBSAN_CHECK_MUL:
4244 subcode = MULT_EXPR;
4245 break;
4246 case IFN_ADD_OVERFLOW:
4247 subcode = PLUS_EXPR;
4248 cplx_result = true;
4249 break;
4250 case IFN_SUB_OVERFLOW:
4251 subcode = MINUS_EXPR;
4252 cplx_result = true;
4253 break;
4254 case IFN_MUL_OVERFLOW:
4255 subcode = MULT_EXPR;
4256 cplx_result = true;
4257 break;
4258 default:
4259 break;
4261 if (subcode != ERROR_MARK)
4263 tree arg0 = gimple_call_arg (stmt, 0);
4264 tree arg1 = gimple_call_arg (stmt, 1);
4265 tree type = TREE_TYPE (arg0);
4266 if (cplx_result)
4268 tree lhs = gimple_call_lhs (stmt);
4269 if (lhs == NULL_TREE)
4270 type = NULL_TREE;
4271 else
4272 type = TREE_TYPE (TREE_TYPE (lhs));
4274 if (type == NULL_TREE)
4276 /* x = y + 0; x = y - 0; x = y * 0; */
4277 else if (integer_zerop (arg1))
4278 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4279 /* x = 0 + y; x = 0 * y; */
4280 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4281 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4282 /* x = y - y; */
4283 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4284 result = integer_zero_node;
4285 /* x = y * 1; x = 1 * y; */
4286 else if (subcode == MULT_EXPR && integer_onep (arg1))
4287 result = arg0;
4288 else if (subcode == MULT_EXPR && integer_onep (arg0))
4289 result = arg1;
4290 else if (TREE_CODE (arg0) == INTEGER_CST
4291 && TREE_CODE (arg1) == INTEGER_CST)
4293 if (cplx_result)
4294 result = int_const_binop (subcode, fold_convert (type, arg0),
4295 fold_convert (type, arg1));
4296 else
4297 result = int_const_binop (subcode, arg0, arg1);
4298 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4300 if (cplx_result)
4301 overflow = build_one_cst (type);
4302 else
4303 result = NULL_TREE;
4306 if (result)
4308 if (result == integer_zero_node)
4309 result = build_zero_cst (type);
4310 else if (cplx_result && TREE_TYPE (result) != type)
4312 if (TREE_CODE (result) == INTEGER_CST)
4314 if (arith_overflowed_p (PLUS_EXPR, type, result,
4315 integer_zero_node))
4316 overflow = build_one_cst (type);
4318 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4319 && TYPE_UNSIGNED (type))
4320 || (TYPE_PRECISION (type)
4321 < (TYPE_PRECISION (TREE_TYPE (result))
4322 + (TYPE_UNSIGNED (TREE_TYPE (result))
4323 && !TYPE_UNSIGNED (type)))))
4324 result = NULL_TREE;
4325 if (result)
4326 result = fold_convert (type, result);
4331 if (result)
4333 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4334 result = drop_tree_overflow (result);
4335 if (cplx_result)
4337 if (overflow == NULL_TREE)
4338 overflow = build_zero_cst (TREE_TYPE (result));
4339 tree ctype = build_complex_type (TREE_TYPE (result));
4340 if (TREE_CODE (result) == INTEGER_CST
4341 && TREE_CODE (overflow) == INTEGER_CST)
4342 result = build_complex (ctype, result, overflow);
4343 else
4344 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4345 ctype, result, overflow);
4347 if (!update_call_from_tree (gsi, result))
4348 gimplify_and_update_call_from_tree (gsi, result);
4349 changed = true;
4353 return changed;
4357 /* Return true whether NAME has a use on STMT. */
4359 static bool
4360 has_use_on_stmt (tree name, gimple *stmt)
4362 imm_use_iterator iter;
4363 use_operand_p use_p;
4364 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4365 if (USE_STMT (use_p) == stmt)
4366 return true;
4367 return false;
4370 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4371 gimple_simplify.
4373 Replaces *GSI with the simplification result in RCODE and OPS
4374 and the associated statements in *SEQ. Does the replacement
4375 according to INPLACE and returns true if the operation succeeded. */
4377 static bool
4378 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4379 code_helper rcode, tree *ops,
4380 gimple_seq *seq, bool inplace)
4382 gimple *stmt = gsi_stmt (*gsi);
4384 /* Play safe and do not allow abnormals to be mentioned in
4385 newly created statements. See also maybe_push_res_to_seq.
4386 As an exception allow such uses if there was a use of the
4387 same SSA name on the old stmt. */
4388 if ((TREE_CODE (ops[0]) == SSA_NAME
4389 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
4390 && !has_use_on_stmt (ops[0], stmt))
4391 || (ops[1]
4392 && TREE_CODE (ops[1]) == SSA_NAME
4393 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
4394 && !has_use_on_stmt (ops[1], stmt))
4395 || (ops[2]
4396 && TREE_CODE (ops[2]) == SSA_NAME
4397 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
4398 && !has_use_on_stmt (ops[2], stmt))
4399 || (COMPARISON_CLASS_P (ops[0])
4400 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
4401 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
4402 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
4403 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
4404 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
4405 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
4406 return false;
4408 /* Don't insert new statements when INPLACE is true, even if we could
4409 reuse STMT for the final statement. */
4410 if (inplace && !gimple_seq_empty_p (*seq))
4411 return false;
4413 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4415 gcc_assert (rcode.is_tree_code ());
4416 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
4417 /* GIMPLE_CONDs condition may not throw. */
4418 && (!flag_exceptions
4419 || !cfun->can_throw_non_call_exceptions
4420 || !operation_could_trap_p (rcode,
4421 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4422 false, NULL_TREE)))
4423 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
4424 else if (rcode == SSA_NAME)
4425 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4426 build_zero_cst (TREE_TYPE (ops[0])));
4427 else if (rcode == INTEGER_CST)
4429 if (integer_zerop (ops[0]))
4430 gimple_cond_make_false (cond_stmt);
4431 else
4432 gimple_cond_make_true (cond_stmt);
4434 else if (!inplace)
4436 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
4437 ops, seq);
4438 if (!res)
4439 return false;
4440 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4441 build_zero_cst (TREE_TYPE (res)));
4443 else
4444 return false;
4445 if (dump_file && (dump_flags & TDF_DETAILS))
4447 fprintf (dump_file, "gimple_simplified to ");
4448 if (!gimple_seq_empty_p (*seq))
4449 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4450 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4451 0, TDF_SLIM);
4453 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4454 return true;
4456 else if (is_gimple_assign (stmt)
4457 && rcode.is_tree_code ())
4459 if (!inplace
4460 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
4462 maybe_build_generic_op (rcode,
4463 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
4464 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
4465 if (dump_file && (dump_flags & TDF_DETAILS))
4467 fprintf (dump_file, "gimple_simplified to ");
4468 if (!gimple_seq_empty_p (*seq))
4469 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4470 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4471 0, TDF_SLIM);
4473 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4474 return true;
4477 else if (rcode.is_fn_code ()
4478 && gimple_call_combined_fn (stmt) == rcode)
4480 unsigned i;
4481 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4483 gcc_assert (ops[i] != NULL_TREE);
4484 gimple_call_set_arg (stmt, i, ops[i]);
4486 if (i < 3)
4487 gcc_assert (ops[i] == NULL_TREE);
4488 if (dump_file && (dump_flags & TDF_DETAILS))
4490 fprintf (dump_file, "gimple_simplified to ");
4491 if (!gimple_seq_empty_p (*seq))
4492 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4493 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4495 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4496 return true;
4498 else if (!inplace)
4500 if (gimple_has_lhs (stmt))
4502 tree lhs = gimple_get_lhs (stmt);
4503 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
4504 ops, seq, lhs))
4505 return false;
4506 if (dump_file && (dump_flags & TDF_DETAILS))
4508 fprintf (dump_file, "gimple_simplified to ");
4509 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4511 gsi_replace_with_seq_vops (gsi, *seq);
4512 return true;
4514 else
4515 gcc_unreachable ();
4518 return false;
4521 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4523 static bool
4524 maybe_canonicalize_mem_ref_addr (tree *t)
4526 bool res = false;
4528 if (TREE_CODE (*t) == ADDR_EXPR)
4529 t = &TREE_OPERAND (*t, 0);
4531 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4532 generic vector extension. The actual vector referenced is
4533 view-converted to an array type for this purpose. If the index
4534 is constant the canonical representation in the middle-end is a
4535 BIT_FIELD_REF so re-write the former to the latter here. */
4536 if (TREE_CODE (*t) == ARRAY_REF
4537 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4538 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4539 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4541 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4542 if (VECTOR_TYPE_P (vtype))
4544 tree low = array_ref_low_bound (*t);
4545 if (TREE_CODE (low) == INTEGER_CST)
4547 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4549 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4550 wi::to_widest (low));
4551 idx = wi::mul (idx, wi::to_widest
4552 (TYPE_SIZE (TREE_TYPE (*t))));
4553 widest_int ext
4554 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4555 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4557 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4558 TREE_TYPE (*t),
4559 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4560 TYPE_SIZE (TREE_TYPE (*t)),
4561 wide_int_to_tree (bitsizetype, idx));
4562 res = true;
4569 while (handled_component_p (*t))
4570 t = &TREE_OPERAND (*t, 0);
4572 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4573 of invariant addresses into a SSA name MEM_REF address. */
4574 if (TREE_CODE (*t) == MEM_REF
4575 || TREE_CODE (*t) == TARGET_MEM_REF)
4577 tree addr = TREE_OPERAND (*t, 0);
4578 if (TREE_CODE (addr) == ADDR_EXPR
4579 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4580 || handled_component_p (TREE_OPERAND (addr, 0))))
4582 tree base;
4583 poly_int64 coffset;
4584 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4585 &coffset);
4586 if (!base)
4587 gcc_unreachable ();
4589 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4590 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4591 TREE_OPERAND (*t, 1),
4592 size_int (coffset));
4593 res = true;
4595 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4596 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4599 /* Canonicalize back MEM_REFs to plain reference trees if the object
4600 accessed is a decl that has the same access semantics as the MEM_REF. */
4601 if (TREE_CODE (*t) == MEM_REF
4602 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4603 && integer_zerop (TREE_OPERAND (*t, 1))
4604 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4606 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4607 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4608 if (/* Same volatile qualification. */
4609 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4610 /* Same TBAA behavior with -fstrict-aliasing. */
4611 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4612 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4613 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4614 /* Same alignment. */
4615 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4616 /* We have to look out here to not drop a required conversion
4617 from the rhs to the lhs if *t appears on the lhs or vice-versa
4618 if it appears on the rhs. Thus require strict type
4619 compatibility. */
4620 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4622 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4623 res = true;
4627 /* Canonicalize TARGET_MEM_REF in particular with respect to
4628 the indexes becoming constant. */
4629 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4631 tree tem = maybe_fold_tmr (*t);
4632 if (tem)
4634 *t = tem;
4635 res = true;
4639 return res;
4642 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4643 distinguishes both cases. */
4645 static bool
4646 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4648 bool changed = false;
4649 gimple *stmt = gsi_stmt (*gsi);
4650 bool nowarning = gimple_no_warning_p (stmt);
4651 unsigned i;
4652 fold_defer_overflow_warnings ();
4654 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4655 after propagation.
4656 ??? This shouldn't be done in generic folding but in the
4657 propagation helpers which also know whether an address was
4658 propagated.
4659 Also canonicalize operand order. */
4660 switch (gimple_code (stmt))
4662 case GIMPLE_ASSIGN:
4663 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4665 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4666 if ((REFERENCE_CLASS_P (*rhs)
4667 || TREE_CODE (*rhs) == ADDR_EXPR)
4668 && maybe_canonicalize_mem_ref_addr (rhs))
4669 changed = true;
4670 tree *lhs = gimple_assign_lhs_ptr (stmt);
4671 if (REFERENCE_CLASS_P (*lhs)
4672 && maybe_canonicalize_mem_ref_addr (lhs))
4673 changed = true;
4675 else
4677 /* Canonicalize operand order. */
4678 enum tree_code code = gimple_assign_rhs_code (stmt);
4679 if (TREE_CODE_CLASS (code) == tcc_comparison
4680 || commutative_tree_code (code)
4681 || commutative_ternary_tree_code (code))
4683 tree rhs1 = gimple_assign_rhs1 (stmt);
4684 tree rhs2 = gimple_assign_rhs2 (stmt);
4685 if (tree_swap_operands_p (rhs1, rhs2))
4687 gimple_assign_set_rhs1 (stmt, rhs2);
4688 gimple_assign_set_rhs2 (stmt, rhs1);
4689 if (TREE_CODE_CLASS (code) == tcc_comparison)
4690 gimple_assign_set_rhs_code (stmt,
4691 swap_tree_comparison (code));
4692 changed = true;
4696 break;
4697 case GIMPLE_CALL:
4699 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4701 tree *arg = gimple_call_arg_ptr (stmt, i);
4702 if (REFERENCE_CLASS_P (*arg)
4703 && maybe_canonicalize_mem_ref_addr (arg))
4704 changed = true;
4706 tree *lhs = gimple_call_lhs_ptr (stmt);
4707 if (*lhs
4708 && REFERENCE_CLASS_P (*lhs)
4709 && maybe_canonicalize_mem_ref_addr (lhs))
4710 changed = true;
4711 break;
4713 case GIMPLE_ASM:
4715 gasm *asm_stmt = as_a <gasm *> (stmt);
4716 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4718 tree link = gimple_asm_output_op (asm_stmt, i);
4719 tree op = TREE_VALUE (link);
4720 if (REFERENCE_CLASS_P (op)
4721 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4722 changed = true;
4724 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4726 tree link = gimple_asm_input_op (asm_stmt, i);
4727 tree op = TREE_VALUE (link);
4728 if ((REFERENCE_CLASS_P (op)
4729 || TREE_CODE (op) == ADDR_EXPR)
4730 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4731 changed = true;
4734 break;
4735 case GIMPLE_DEBUG:
4736 if (gimple_debug_bind_p (stmt))
4738 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4739 if (*val
4740 && (REFERENCE_CLASS_P (*val)
4741 || TREE_CODE (*val) == ADDR_EXPR)
4742 && maybe_canonicalize_mem_ref_addr (val))
4743 changed = true;
4745 break;
4746 case GIMPLE_COND:
4748 /* Canonicalize operand order. */
4749 tree lhs = gimple_cond_lhs (stmt);
4750 tree rhs = gimple_cond_rhs (stmt);
4751 if (tree_swap_operands_p (lhs, rhs))
4753 gcond *gc = as_a <gcond *> (stmt);
4754 gimple_cond_set_lhs (gc, rhs);
4755 gimple_cond_set_rhs (gc, lhs);
4756 gimple_cond_set_code (gc,
4757 swap_tree_comparison (gimple_cond_code (gc)));
4758 changed = true;
4761 default:;
4764 /* Dispatch to pattern-based folding. */
4765 if (!inplace
4766 || is_gimple_assign (stmt)
4767 || gimple_code (stmt) == GIMPLE_COND)
4769 gimple_seq seq = NULL;
4770 code_helper rcode;
4771 tree ops[3] = {};
4772 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4773 valueize, valueize))
4775 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4776 changed = true;
4777 else
4778 gimple_seq_discard (seq);
4782 stmt = gsi_stmt (*gsi);
4784 /* Fold the main computation performed by the statement. */
4785 switch (gimple_code (stmt))
4787 case GIMPLE_ASSIGN:
4789 /* Try to canonicalize for boolean-typed X the comparisons
4790 X == 0, X == 1, X != 0, and X != 1. */
4791 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4792 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4794 tree lhs = gimple_assign_lhs (stmt);
4795 tree op1 = gimple_assign_rhs1 (stmt);
4796 tree op2 = gimple_assign_rhs2 (stmt);
4797 tree type = TREE_TYPE (op1);
4799 /* Check whether the comparison operands are of the same boolean
4800 type as the result type is.
4801 Check that second operand is an integer-constant with value
4802 one or zero. */
4803 if (TREE_CODE (op2) == INTEGER_CST
4804 && (integer_zerop (op2) || integer_onep (op2))
4805 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4807 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4808 bool is_logical_not = false;
4810 /* X == 0 and X != 1 is a logical-not.of X
4811 X == 1 and X != 0 is X */
4812 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4813 || (cmp_code == NE_EXPR && integer_onep (op2)))
4814 is_logical_not = true;
4816 if (is_logical_not == false)
4817 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4818 /* Only for one-bit precision typed X the transformation
4819 !X -> ~X is valied. */
4820 else if (TYPE_PRECISION (type) == 1)
4821 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4822 /* Otherwise we use !X -> X ^ 1. */
4823 else
4824 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4825 build_int_cst (type, 1));
4826 changed = true;
4827 break;
4831 unsigned old_num_ops = gimple_num_ops (stmt);
4832 tree lhs = gimple_assign_lhs (stmt);
4833 tree new_rhs = fold_gimple_assign (gsi);
4834 if (new_rhs
4835 && !useless_type_conversion_p (TREE_TYPE (lhs),
4836 TREE_TYPE (new_rhs)))
4837 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4838 if (new_rhs
4839 && (!inplace
4840 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4842 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4843 changed = true;
4845 break;
4848 case GIMPLE_CALL:
4849 changed |= gimple_fold_call (gsi, inplace);
4850 break;
4852 case GIMPLE_ASM:
4853 /* Fold *& in asm operands. */
4855 gasm *asm_stmt = as_a <gasm *> (stmt);
4856 size_t noutputs;
4857 const char **oconstraints;
4858 const char *constraint;
4859 bool allows_mem, allows_reg;
4861 noutputs = gimple_asm_noutputs (asm_stmt);
4862 oconstraints = XALLOCAVEC (const char *, noutputs);
4864 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4866 tree link = gimple_asm_output_op (asm_stmt, i);
4867 tree op = TREE_VALUE (link);
4868 oconstraints[i]
4869 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4870 if (REFERENCE_CLASS_P (op)
4871 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4873 TREE_VALUE (link) = op;
4874 changed = true;
4877 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4879 tree link = gimple_asm_input_op (asm_stmt, i);
4880 tree op = TREE_VALUE (link);
4881 constraint
4882 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4883 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4884 oconstraints, &allows_mem, &allows_reg);
4885 if (REFERENCE_CLASS_P (op)
4886 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4887 != NULL_TREE)
4889 TREE_VALUE (link) = op;
4890 changed = true;
4894 break;
4896 case GIMPLE_DEBUG:
4897 if (gimple_debug_bind_p (stmt))
4899 tree val = gimple_debug_bind_get_value (stmt);
4900 if (val
4901 && REFERENCE_CLASS_P (val))
4903 tree tem = maybe_fold_reference (val, false);
4904 if (tem)
4906 gimple_debug_bind_set_value (stmt, tem);
4907 changed = true;
4910 else if (val
4911 && TREE_CODE (val) == ADDR_EXPR)
4913 tree ref = TREE_OPERAND (val, 0);
4914 tree tem = maybe_fold_reference (ref, false);
4915 if (tem)
4917 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4918 gimple_debug_bind_set_value (stmt, tem);
4919 changed = true;
4923 break;
4925 case GIMPLE_RETURN:
4927 greturn *ret_stmt = as_a<greturn *> (stmt);
4928 tree ret = gimple_return_retval(ret_stmt);
4930 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4932 tree val = valueize (ret);
4933 if (val && val != ret
4934 && may_propagate_copy (ret, val))
4936 gimple_return_set_retval (ret_stmt, val);
4937 changed = true;
4941 break;
4943 default:;
4946 stmt = gsi_stmt (*gsi);
4948 /* Fold *& on the lhs. */
4949 if (gimple_has_lhs (stmt))
4951 tree lhs = gimple_get_lhs (stmt);
4952 if (lhs && REFERENCE_CLASS_P (lhs))
4954 tree new_lhs = maybe_fold_reference (lhs, true);
4955 if (new_lhs)
4957 gimple_set_lhs (stmt, new_lhs);
4958 changed = true;
4963 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4964 return changed;
4967 /* Valueziation callback that ends up not following SSA edges. */
4969 tree
4970 no_follow_ssa_edges (tree)
4972 return NULL_TREE;
4975 /* Valueization callback that ends up following single-use SSA edges only. */
4977 tree
4978 follow_single_use_edges (tree val)
4980 if (TREE_CODE (val) == SSA_NAME
4981 && !has_single_use (val))
4982 return NULL_TREE;
4983 return val;
4986 /* Fold the statement pointed to by GSI. In some cases, this function may
4987 replace the whole statement with a new one. Returns true iff folding
4988 makes any changes.
4989 The statement pointed to by GSI should be in valid gimple form but may
4990 be in unfolded state as resulting from for example constant propagation
4991 which can produce *&x = 0. */
4993 bool
4994 fold_stmt (gimple_stmt_iterator *gsi)
4996 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4999 bool
5000 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5002 return fold_stmt_1 (gsi, false, valueize);
5005 /* Perform the minimal folding on statement *GSI. Only operations like
5006 *&x created by constant propagation are handled. The statement cannot
5007 be replaced with a new one. Return true if the statement was
5008 changed, false otherwise.
5009 The statement *GSI should be in valid gimple form but may
5010 be in unfolded state as resulting from for example constant propagation
5011 which can produce *&x = 0. */
5013 bool
5014 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5016 gimple *stmt = gsi_stmt (*gsi);
5017 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5018 gcc_assert (gsi_stmt (*gsi) == stmt);
5019 return changed;
5022 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5023 if EXPR is null or we don't know how.
5024 If non-null, the result always has boolean type. */
5026 static tree
5027 canonicalize_bool (tree expr, bool invert)
5029 if (!expr)
5030 return NULL_TREE;
5031 else if (invert)
5033 if (integer_nonzerop (expr))
5034 return boolean_false_node;
5035 else if (integer_zerop (expr))
5036 return boolean_true_node;
5037 else if (TREE_CODE (expr) == SSA_NAME)
5038 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5039 build_int_cst (TREE_TYPE (expr), 0));
5040 else if (COMPARISON_CLASS_P (expr))
5041 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5042 boolean_type_node,
5043 TREE_OPERAND (expr, 0),
5044 TREE_OPERAND (expr, 1));
5045 else
5046 return NULL_TREE;
5048 else
5050 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5051 return expr;
5052 if (integer_nonzerop (expr))
5053 return boolean_true_node;
5054 else if (integer_zerop (expr))
5055 return boolean_false_node;
5056 else if (TREE_CODE (expr) == SSA_NAME)
5057 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5058 build_int_cst (TREE_TYPE (expr), 0));
5059 else if (COMPARISON_CLASS_P (expr))
5060 return fold_build2 (TREE_CODE (expr),
5061 boolean_type_node,
5062 TREE_OPERAND (expr, 0),
5063 TREE_OPERAND (expr, 1));
5064 else
5065 return NULL_TREE;
5069 /* Check to see if a boolean expression EXPR is logically equivalent to the
5070 comparison (OP1 CODE OP2). Check for various identities involving
5071 SSA_NAMEs. */
5073 static bool
5074 same_bool_comparison_p (const_tree expr, enum tree_code code,
5075 const_tree op1, const_tree op2)
5077 gimple *s;
5079 /* The obvious case. */
5080 if (TREE_CODE (expr) == code
5081 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5082 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5083 return true;
5085 /* Check for comparing (name, name != 0) and the case where expr
5086 is an SSA_NAME with a definition matching the comparison. */
5087 if (TREE_CODE (expr) == SSA_NAME
5088 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5090 if (operand_equal_p (expr, op1, 0))
5091 return ((code == NE_EXPR && integer_zerop (op2))
5092 || (code == EQ_EXPR && integer_nonzerop (op2)));
5093 s = SSA_NAME_DEF_STMT (expr);
5094 if (is_gimple_assign (s)
5095 && gimple_assign_rhs_code (s) == code
5096 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5097 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5098 return true;
5101 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5102 of name is a comparison, recurse. */
5103 if (TREE_CODE (op1) == SSA_NAME
5104 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5106 s = SSA_NAME_DEF_STMT (op1);
5107 if (is_gimple_assign (s)
5108 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5110 enum tree_code c = gimple_assign_rhs_code (s);
5111 if ((c == NE_EXPR && integer_zerop (op2))
5112 || (c == EQ_EXPR && integer_nonzerop (op2)))
5113 return same_bool_comparison_p (expr, c,
5114 gimple_assign_rhs1 (s),
5115 gimple_assign_rhs2 (s));
5116 if ((c == EQ_EXPR && integer_zerop (op2))
5117 || (c == NE_EXPR && integer_nonzerop (op2)))
5118 return same_bool_comparison_p (expr,
5119 invert_tree_comparison (c, false),
5120 gimple_assign_rhs1 (s),
5121 gimple_assign_rhs2 (s));
5124 return false;
5127 /* Check to see if two boolean expressions OP1 and OP2 are logically
5128 equivalent. */
5130 static bool
5131 same_bool_result_p (const_tree op1, const_tree op2)
5133 /* Simple cases first. */
5134 if (operand_equal_p (op1, op2, 0))
5135 return true;
5137 /* Check the cases where at least one of the operands is a comparison.
5138 These are a bit smarter than operand_equal_p in that they apply some
5139 identifies on SSA_NAMEs. */
5140 if (COMPARISON_CLASS_P (op2)
5141 && same_bool_comparison_p (op1, TREE_CODE (op2),
5142 TREE_OPERAND (op2, 0),
5143 TREE_OPERAND (op2, 1)))
5144 return true;
5145 if (COMPARISON_CLASS_P (op1)
5146 && same_bool_comparison_p (op2, TREE_CODE (op1),
5147 TREE_OPERAND (op1, 0),
5148 TREE_OPERAND (op1, 1)))
5149 return true;
5151 /* Default case. */
5152 return false;
5155 /* Forward declarations for some mutually recursive functions. */
5157 static tree
5158 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5159 enum tree_code code2, tree op2a, tree op2b);
5160 static tree
5161 and_var_with_comparison (tree var, bool invert,
5162 enum tree_code code2, tree op2a, tree op2b);
5163 static tree
5164 and_var_with_comparison_1 (gimple *stmt,
5165 enum tree_code code2, tree op2a, tree op2b);
5166 static tree
5167 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5168 enum tree_code code2, tree op2a, tree op2b);
5169 static tree
5170 or_var_with_comparison (tree var, bool invert,
5171 enum tree_code code2, tree op2a, tree op2b);
5172 static tree
5173 or_var_with_comparison_1 (gimple *stmt,
5174 enum tree_code code2, tree op2a, tree op2b);
5176 /* Helper function for and_comparisons_1: try to simplify the AND of the
5177 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5178 If INVERT is true, invert the value of the VAR before doing the AND.
5179 Return NULL_EXPR if we can't simplify this to a single expression. */
5181 static tree
5182 and_var_with_comparison (tree var, bool invert,
5183 enum tree_code code2, tree op2a, tree op2b)
5185 tree t;
5186 gimple *stmt = SSA_NAME_DEF_STMT (var);
5188 /* We can only deal with variables whose definitions are assignments. */
5189 if (!is_gimple_assign (stmt))
5190 return NULL_TREE;
5192 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5193 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5194 Then we only have to consider the simpler non-inverted cases. */
5195 if (invert)
5196 t = or_var_with_comparison_1 (stmt,
5197 invert_tree_comparison (code2, false),
5198 op2a, op2b);
5199 else
5200 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5201 return canonicalize_bool (t, invert);
5204 /* Try to simplify the AND of the ssa variable defined by the assignment
5205 STMT with the comparison specified by (OP2A CODE2 OP2B).
5206 Return NULL_EXPR if we can't simplify this to a single expression. */
5208 static tree
5209 and_var_with_comparison_1 (gimple *stmt,
5210 enum tree_code code2, tree op2a, tree op2b)
5212 tree var = gimple_assign_lhs (stmt);
5213 tree true_test_var = NULL_TREE;
5214 tree false_test_var = NULL_TREE;
5215 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5217 /* Check for identities like (var AND (var == 0)) => false. */
5218 if (TREE_CODE (op2a) == SSA_NAME
5219 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5221 if ((code2 == NE_EXPR && integer_zerop (op2b))
5222 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5224 true_test_var = op2a;
5225 if (var == true_test_var)
5226 return var;
5228 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5229 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5231 false_test_var = op2a;
5232 if (var == false_test_var)
5233 return boolean_false_node;
5237 /* If the definition is a comparison, recurse on it. */
5238 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5240 tree t = and_comparisons_1 (innercode,
5241 gimple_assign_rhs1 (stmt),
5242 gimple_assign_rhs2 (stmt),
5243 code2,
5244 op2a,
5245 op2b);
5246 if (t)
5247 return t;
5250 /* If the definition is an AND or OR expression, we may be able to
5251 simplify by reassociating. */
5252 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5253 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5255 tree inner1 = gimple_assign_rhs1 (stmt);
5256 tree inner2 = gimple_assign_rhs2 (stmt);
5257 gimple *s;
5258 tree t;
5259 tree partial = NULL_TREE;
5260 bool is_and = (innercode == BIT_AND_EXPR);
5262 /* Check for boolean identities that don't require recursive examination
5263 of inner1/inner2:
5264 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5265 inner1 AND (inner1 OR inner2) => inner1
5266 !inner1 AND (inner1 AND inner2) => false
5267 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5268 Likewise for similar cases involving inner2. */
5269 if (inner1 == true_test_var)
5270 return (is_and ? var : inner1);
5271 else if (inner2 == true_test_var)
5272 return (is_and ? var : inner2);
5273 else if (inner1 == false_test_var)
5274 return (is_and
5275 ? boolean_false_node
5276 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5277 else if (inner2 == false_test_var)
5278 return (is_and
5279 ? boolean_false_node
5280 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5282 /* Next, redistribute/reassociate the AND across the inner tests.
5283 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5284 if (TREE_CODE (inner1) == SSA_NAME
5285 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5286 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5287 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5288 gimple_assign_rhs1 (s),
5289 gimple_assign_rhs2 (s),
5290 code2, op2a, op2b)))
5292 /* Handle the AND case, where we are reassociating:
5293 (inner1 AND inner2) AND (op2a code2 op2b)
5294 => (t AND inner2)
5295 If the partial result t is a constant, we win. Otherwise
5296 continue on to try reassociating with the other inner test. */
5297 if (is_and)
5299 if (integer_onep (t))
5300 return inner2;
5301 else if (integer_zerop (t))
5302 return boolean_false_node;
5305 /* Handle the OR case, where we are redistributing:
5306 (inner1 OR inner2) AND (op2a code2 op2b)
5307 => (t OR (inner2 AND (op2a code2 op2b))) */
5308 else if (integer_onep (t))
5309 return boolean_true_node;
5311 /* Save partial result for later. */
5312 partial = t;
5315 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5316 if (TREE_CODE (inner2) == SSA_NAME
5317 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5318 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5319 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5320 gimple_assign_rhs1 (s),
5321 gimple_assign_rhs2 (s),
5322 code2, op2a, op2b)))
5324 /* Handle the AND case, where we are reassociating:
5325 (inner1 AND inner2) AND (op2a code2 op2b)
5326 => (inner1 AND t) */
5327 if (is_and)
5329 if (integer_onep (t))
5330 return inner1;
5331 else if (integer_zerop (t))
5332 return boolean_false_node;
5333 /* If both are the same, we can apply the identity
5334 (x AND x) == x. */
5335 else if (partial && same_bool_result_p (t, partial))
5336 return t;
5339 /* Handle the OR case. where we are redistributing:
5340 (inner1 OR inner2) AND (op2a code2 op2b)
5341 => (t OR (inner1 AND (op2a code2 op2b)))
5342 => (t OR partial) */
5343 else
5345 if (integer_onep (t))
5346 return boolean_true_node;
5347 else if (partial)
5349 /* We already got a simplification for the other
5350 operand to the redistributed OR expression. The
5351 interesting case is when at least one is false.
5352 Or, if both are the same, we can apply the identity
5353 (x OR x) == x. */
5354 if (integer_zerop (partial))
5355 return t;
5356 else if (integer_zerop (t))
5357 return partial;
5358 else if (same_bool_result_p (t, partial))
5359 return t;
5364 return NULL_TREE;
5367 /* Try to simplify the AND of two comparisons defined by
5368 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5369 If this can be done without constructing an intermediate value,
5370 return the resulting tree; otherwise NULL_TREE is returned.
5371 This function is deliberately asymmetric as it recurses on SSA_DEFs
5372 in the first comparison but not the second. */
5374 static tree
5375 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5376 enum tree_code code2, tree op2a, tree op2b)
5378 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5380 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5381 if (operand_equal_p (op1a, op2a, 0)
5382 && operand_equal_p (op1b, op2b, 0))
5384 /* Result will be either NULL_TREE, or a combined comparison. */
5385 tree t = combine_comparisons (UNKNOWN_LOCATION,
5386 TRUTH_ANDIF_EXPR, code1, code2,
5387 truth_type, op1a, op1b);
5388 if (t)
5389 return t;
5392 /* Likewise the swapped case of the above. */
5393 if (operand_equal_p (op1a, op2b, 0)
5394 && operand_equal_p (op1b, op2a, 0))
5396 /* Result will be either NULL_TREE, or a combined comparison. */
5397 tree t = combine_comparisons (UNKNOWN_LOCATION,
5398 TRUTH_ANDIF_EXPR, code1,
5399 swap_tree_comparison (code2),
5400 truth_type, op1a, op1b);
5401 if (t)
5402 return t;
5405 /* If both comparisons are of the same value against constants, we might
5406 be able to merge them. */
5407 if (operand_equal_p (op1a, op2a, 0)
5408 && TREE_CODE (op1b) == INTEGER_CST
5409 && TREE_CODE (op2b) == INTEGER_CST)
5411 int cmp = tree_int_cst_compare (op1b, op2b);
5413 /* If we have (op1a == op1b), we should either be able to
5414 return that or FALSE, depending on whether the constant op1b
5415 also satisfies the other comparison against op2b. */
5416 if (code1 == EQ_EXPR)
5418 bool done = true;
5419 bool val;
5420 switch (code2)
5422 case EQ_EXPR: val = (cmp == 0); break;
5423 case NE_EXPR: val = (cmp != 0); break;
5424 case LT_EXPR: val = (cmp < 0); break;
5425 case GT_EXPR: val = (cmp > 0); break;
5426 case LE_EXPR: val = (cmp <= 0); break;
5427 case GE_EXPR: val = (cmp >= 0); break;
5428 default: done = false;
5430 if (done)
5432 if (val)
5433 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5434 else
5435 return boolean_false_node;
5438 /* Likewise if the second comparison is an == comparison. */
5439 else if (code2 == EQ_EXPR)
5441 bool done = true;
5442 bool val;
5443 switch (code1)
5445 case EQ_EXPR: val = (cmp == 0); break;
5446 case NE_EXPR: val = (cmp != 0); break;
5447 case LT_EXPR: val = (cmp > 0); break;
5448 case GT_EXPR: val = (cmp < 0); break;
5449 case LE_EXPR: val = (cmp >= 0); break;
5450 case GE_EXPR: val = (cmp <= 0); break;
5451 default: done = false;
5453 if (done)
5455 if (val)
5456 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5457 else
5458 return boolean_false_node;
5462 /* Same business with inequality tests. */
5463 else if (code1 == NE_EXPR)
5465 bool val;
5466 switch (code2)
5468 case EQ_EXPR: val = (cmp != 0); break;
5469 case NE_EXPR: val = (cmp == 0); break;
5470 case LT_EXPR: val = (cmp >= 0); break;
5471 case GT_EXPR: val = (cmp <= 0); break;
5472 case LE_EXPR: val = (cmp > 0); break;
5473 case GE_EXPR: val = (cmp < 0); break;
5474 default:
5475 val = false;
5477 if (val)
5478 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5480 else if (code2 == NE_EXPR)
5482 bool val;
5483 switch (code1)
5485 case EQ_EXPR: val = (cmp == 0); break;
5486 case NE_EXPR: val = (cmp != 0); break;
5487 case LT_EXPR: val = (cmp <= 0); break;
5488 case GT_EXPR: val = (cmp >= 0); break;
5489 case LE_EXPR: val = (cmp < 0); break;
5490 case GE_EXPR: val = (cmp > 0); break;
5491 default:
5492 val = false;
5494 if (val)
5495 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5498 /* Chose the more restrictive of two < or <= comparisons. */
5499 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5500 && (code2 == LT_EXPR || code2 == LE_EXPR))
5502 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5503 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5504 else
5505 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5508 /* Likewise chose the more restrictive of two > or >= comparisons. */
5509 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5510 && (code2 == GT_EXPR || code2 == GE_EXPR))
5512 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5513 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5514 else
5515 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5518 /* Check for singleton ranges. */
5519 else if (cmp == 0
5520 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5521 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5522 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5524 /* Check for disjoint ranges. */
5525 else if (cmp <= 0
5526 && (code1 == LT_EXPR || code1 == LE_EXPR)
5527 && (code2 == GT_EXPR || code2 == GE_EXPR))
5528 return boolean_false_node;
5529 else if (cmp >= 0
5530 && (code1 == GT_EXPR || code1 == GE_EXPR)
5531 && (code2 == LT_EXPR || code2 == LE_EXPR))
5532 return boolean_false_node;
5535 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5536 NAME's definition is a truth value. See if there are any simplifications
5537 that can be done against the NAME's definition. */
5538 if (TREE_CODE (op1a) == SSA_NAME
5539 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5540 && (integer_zerop (op1b) || integer_onep (op1b)))
5542 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5543 || (code1 == NE_EXPR && integer_onep (op1b)));
5544 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5545 switch (gimple_code (stmt))
5547 case GIMPLE_ASSIGN:
5548 /* Try to simplify by copy-propagating the definition. */
5549 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5551 case GIMPLE_PHI:
5552 /* If every argument to the PHI produces the same result when
5553 ANDed with the second comparison, we win.
5554 Do not do this unless the type is bool since we need a bool
5555 result here anyway. */
5556 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5558 tree result = NULL_TREE;
5559 unsigned i;
5560 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5562 tree arg = gimple_phi_arg_def (stmt, i);
5564 /* If this PHI has itself as an argument, ignore it.
5565 If all the other args produce the same result,
5566 we're still OK. */
5567 if (arg == gimple_phi_result (stmt))
5568 continue;
5569 else if (TREE_CODE (arg) == INTEGER_CST)
5571 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5573 if (!result)
5574 result = boolean_false_node;
5575 else if (!integer_zerop (result))
5576 return NULL_TREE;
5578 else if (!result)
5579 result = fold_build2 (code2, boolean_type_node,
5580 op2a, op2b);
5581 else if (!same_bool_comparison_p (result,
5582 code2, op2a, op2b))
5583 return NULL_TREE;
5585 else if (TREE_CODE (arg) == SSA_NAME
5586 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5588 tree temp;
5589 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5590 /* In simple cases we can look through PHI nodes,
5591 but we have to be careful with loops.
5592 See PR49073. */
5593 if (! dom_info_available_p (CDI_DOMINATORS)
5594 || gimple_bb (def_stmt) == gimple_bb (stmt)
5595 || dominated_by_p (CDI_DOMINATORS,
5596 gimple_bb (def_stmt),
5597 gimple_bb (stmt)))
5598 return NULL_TREE;
5599 temp = and_var_with_comparison (arg, invert, code2,
5600 op2a, op2b);
5601 if (!temp)
5602 return NULL_TREE;
5603 else if (!result)
5604 result = temp;
5605 else if (!same_bool_result_p (result, temp))
5606 return NULL_TREE;
5608 else
5609 return NULL_TREE;
5611 return result;
5614 default:
5615 break;
5618 return NULL_TREE;
5621 /* Try to simplify the AND of two comparisons, specified by
5622 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5623 If this can be simplified to a single expression (without requiring
5624 introducing more SSA variables to hold intermediate values),
5625 return the resulting tree. Otherwise return NULL_TREE.
5626 If the result expression is non-null, it has boolean type. */
5628 tree
5629 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5630 enum tree_code code2, tree op2a, tree op2b)
5632 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5633 if (t)
5634 return t;
5635 else
5636 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5639 /* Helper function for or_comparisons_1: try to simplify the OR of the
5640 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5641 If INVERT is true, invert the value of VAR before doing the OR.
5642 Return NULL_EXPR if we can't simplify this to a single expression. */
5644 static tree
5645 or_var_with_comparison (tree var, bool invert,
5646 enum tree_code code2, tree op2a, tree op2b)
5648 tree t;
5649 gimple *stmt = SSA_NAME_DEF_STMT (var);
5651 /* We can only deal with variables whose definitions are assignments. */
5652 if (!is_gimple_assign (stmt))
5653 return NULL_TREE;
5655 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5656 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5657 Then we only have to consider the simpler non-inverted cases. */
5658 if (invert)
5659 t = and_var_with_comparison_1 (stmt,
5660 invert_tree_comparison (code2, false),
5661 op2a, op2b);
5662 else
5663 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5664 return canonicalize_bool (t, invert);
5667 /* Try to simplify the OR of the ssa variable defined by the assignment
5668 STMT with the comparison specified by (OP2A CODE2 OP2B).
5669 Return NULL_EXPR if we can't simplify this to a single expression. */
5671 static tree
5672 or_var_with_comparison_1 (gimple *stmt,
5673 enum tree_code code2, tree op2a, tree op2b)
5675 tree var = gimple_assign_lhs (stmt);
5676 tree true_test_var = NULL_TREE;
5677 tree false_test_var = NULL_TREE;
5678 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5680 /* Check for identities like (var OR (var != 0)) => true . */
5681 if (TREE_CODE (op2a) == SSA_NAME
5682 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5684 if ((code2 == NE_EXPR && integer_zerop (op2b))
5685 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5687 true_test_var = op2a;
5688 if (var == true_test_var)
5689 return var;
5691 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5692 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5694 false_test_var = op2a;
5695 if (var == false_test_var)
5696 return boolean_true_node;
5700 /* If the definition is a comparison, recurse on it. */
5701 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5703 tree t = or_comparisons_1 (innercode,
5704 gimple_assign_rhs1 (stmt),
5705 gimple_assign_rhs2 (stmt),
5706 code2,
5707 op2a,
5708 op2b);
5709 if (t)
5710 return t;
5713 /* If the definition is an AND or OR expression, we may be able to
5714 simplify by reassociating. */
5715 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5716 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5718 tree inner1 = gimple_assign_rhs1 (stmt);
5719 tree inner2 = gimple_assign_rhs2 (stmt);
5720 gimple *s;
5721 tree t;
5722 tree partial = NULL_TREE;
5723 bool is_or = (innercode == BIT_IOR_EXPR);
5725 /* Check for boolean identities that don't require recursive examination
5726 of inner1/inner2:
5727 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5728 inner1 OR (inner1 AND inner2) => inner1
5729 !inner1 OR (inner1 OR inner2) => true
5730 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5732 if (inner1 == true_test_var)
5733 return (is_or ? var : inner1);
5734 else if (inner2 == true_test_var)
5735 return (is_or ? var : inner2);
5736 else if (inner1 == false_test_var)
5737 return (is_or
5738 ? boolean_true_node
5739 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5740 else if (inner2 == false_test_var)
5741 return (is_or
5742 ? boolean_true_node
5743 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5745 /* Next, redistribute/reassociate the OR across the inner tests.
5746 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5747 if (TREE_CODE (inner1) == SSA_NAME
5748 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5749 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5750 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5751 gimple_assign_rhs1 (s),
5752 gimple_assign_rhs2 (s),
5753 code2, op2a, op2b)))
5755 /* Handle the OR case, where we are reassociating:
5756 (inner1 OR inner2) OR (op2a code2 op2b)
5757 => (t OR inner2)
5758 If the partial result t is a constant, we win. Otherwise
5759 continue on to try reassociating with the other inner test. */
5760 if (is_or)
5762 if (integer_onep (t))
5763 return boolean_true_node;
5764 else if (integer_zerop (t))
5765 return inner2;
5768 /* Handle the AND case, where we are redistributing:
5769 (inner1 AND inner2) OR (op2a code2 op2b)
5770 => (t AND (inner2 OR (op2a code op2b))) */
5771 else if (integer_zerop (t))
5772 return boolean_false_node;
5774 /* Save partial result for later. */
5775 partial = t;
5778 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5779 if (TREE_CODE (inner2) == SSA_NAME
5780 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5781 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5782 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5783 gimple_assign_rhs1 (s),
5784 gimple_assign_rhs2 (s),
5785 code2, op2a, op2b)))
5787 /* Handle the OR case, where we are reassociating:
5788 (inner1 OR inner2) OR (op2a code2 op2b)
5789 => (inner1 OR t)
5790 => (t OR partial) */
5791 if (is_or)
5793 if (integer_zerop (t))
5794 return inner1;
5795 else if (integer_onep (t))
5796 return boolean_true_node;
5797 /* If both are the same, we can apply the identity
5798 (x OR x) == x. */
5799 else if (partial && same_bool_result_p (t, partial))
5800 return t;
5803 /* Handle the AND case, where we are redistributing:
5804 (inner1 AND inner2) OR (op2a code2 op2b)
5805 => (t AND (inner1 OR (op2a code2 op2b)))
5806 => (t AND partial) */
5807 else
5809 if (integer_zerop (t))
5810 return boolean_false_node;
5811 else if (partial)
5813 /* We already got a simplification for the other
5814 operand to the redistributed AND expression. The
5815 interesting case is when at least one is true.
5816 Or, if both are the same, we can apply the identity
5817 (x AND x) == x. */
5818 if (integer_onep (partial))
5819 return t;
5820 else if (integer_onep (t))
5821 return partial;
5822 else if (same_bool_result_p (t, partial))
5823 return t;
5828 return NULL_TREE;
5831 /* Try to simplify the OR of two comparisons defined by
5832 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5833 If this can be done without constructing an intermediate value,
5834 return the resulting tree; otherwise NULL_TREE is returned.
5835 This function is deliberately asymmetric as it recurses on SSA_DEFs
5836 in the first comparison but not the second. */
5838 static tree
5839 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5840 enum tree_code code2, tree op2a, tree op2b)
5842 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5844 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5845 if (operand_equal_p (op1a, op2a, 0)
5846 && operand_equal_p (op1b, op2b, 0))
5848 /* Result will be either NULL_TREE, or a combined comparison. */
5849 tree t = combine_comparisons (UNKNOWN_LOCATION,
5850 TRUTH_ORIF_EXPR, code1, code2,
5851 truth_type, op1a, op1b);
5852 if (t)
5853 return t;
5856 /* Likewise the swapped case of the above. */
5857 if (operand_equal_p (op1a, op2b, 0)
5858 && operand_equal_p (op1b, op2a, 0))
5860 /* Result will be either NULL_TREE, or a combined comparison. */
5861 tree t = combine_comparisons (UNKNOWN_LOCATION,
5862 TRUTH_ORIF_EXPR, code1,
5863 swap_tree_comparison (code2),
5864 truth_type, op1a, op1b);
5865 if (t)
5866 return t;
5869 /* If both comparisons are of the same value against constants, we might
5870 be able to merge them. */
5871 if (operand_equal_p (op1a, op2a, 0)
5872 && TREE_CODE (op1b) == INTEGER_CST
5873 && TREE_CODE (op2b) == INTEGER_CST)
5875 int cmp = tree_int_cst_compare (op1b, op2b);
5877 /* If we have (op1a != op1b), we should either be able to
5878 return that or TRUE, depending on whether the constant op1b
5879 also satisfies the other comparison against op2b. */
5880 if (code1 == NE_EXPR)
5882 bool done = true;
5883 bool val;
5884 switch (code2)
5886 case EQ_EXPR: val = (cmp == 0); break;
5887 case NE_EXPR: val = (cmp != 0); break;
5888 case LT_EXPR: val = (cmp < 0); break;
5889 case GT_EXPR: val = (cmp > 0); break;
5890 case LE_EXPR: val = (cmp <= 0); break;
5891 case GE_EXPR: val = (cmp >= 0); break;
5892 default: done = false;
5894 if (done)
5896 if (val)
5897 return boolean_true_node;
5898 else
5899 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5902 /* Likewise if the second comparison is a != comparison. */
5903 else if (code2 == NE_EXPR)
5905 bool done = true;
5906 bool val;
5907 switch (code1)
5909 case EQ_EXPR: val = (cmp == 0); break;
5910 case NE_EXPR: val = (cmp != 0); break;
5911 case LT_EXPR: val = (cmp > 0); break;
5912 case GT_EXPR: val = (cmp < 0); break;
5913 case LE_EXPR: val = (cmp >= 0); break;
5914 case GE_EXPR: val = (cmp <= 0); break;
5915 default: done = false;
5917 if (done)
5919 if (val)
5920 return boolean_true_node;
5921 else
5922 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5926 /* See if an equality test is redundant with the other comparison. */
5927 else if (code1 == EQ_EXPR)
5929 bool val;
5930 switch (code2)
5932 case EQ_EXPR: val = (cmp == 0); break;
5933 case NE_EXPR: val = (cmp != 0); break;
5934 case LT_EXPR: val = (cmp < 0); break;
5935 case GT_EXPR: val = (cmp > 0); break;
5936 case LE_EXPR: val = (cmp <= 0); break;
5937 case GE_EXPR: val = (cmp >= 0); break;
5938 default:
5939 val = false;
5941 if (val)
5942 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5944 else if (code2 == EQ_EXPR)
5946 bool val;
5947 switch (code1)
5949 case EQ_EXPR: val = (cmp == 0); break;
5950 case NE_EXPR: val = (cmp != 0); break;
5951 case LT_EXPR: val = (cmp > 0); break;
5952 case GT_EXPR: val = (cmp < 0); break;
5953 case LE_EXPR: val = (cmp >= 0); break;
5954 case GE_EXPR: val = (cmp <= 0); break;
5955 default:
5956 val = false;
5958 if (val)
5959 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5962 /* Chose the less restrictive of two < or <= comparisons. */
5963 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5964 && (code2 == LT_EXPR || code2 == LE_EXPR))
5966 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5967 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5968 else
5969 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5972 /* Likewise chose the less restrictive of two > or >= comparisons. */
5973 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5974 && (code2 == GT_EXPR || code2 == GE_EXPR))
5976 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5977 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5978 else
5979 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5982 /* Check for singleton ranges. */
5983 else if (cmp == 0
5984 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5985 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5986 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5988 /* Check for less/greater pairs that don't restrict the range at all. */
5989 else if (cmp >= 0
5990 && (code1 == LT_EXPR || code1 == LE_EXPR)
5991 && (code2 == GT_EXPR || code2 == GE_EXPR))
5992 return boolean_true_node;
5993 else if (cmp <= 0
5994 && (code1 == GT_EXPR || code1 == GE_EXPR)
5995 && (code2 == LT_EXPR || code2 == LE_EXPR))
5996 return boolean_true_node;
5999 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6000 NAME's definition is a truth value. See if there are any simplifications
6001 that can be done against the NAME's definition. */
6002 if (TREE_CODE (op1a) == SSA_NAME
6003 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6004 && (integer_zerop (op1b) || integer_onep (op1b)))
6006 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6007 || (code1 == NE_EXPR && integer_onep (op1b)));
6008 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6009 switch (gimple_code (stmt))
6011 case GIMPLE_ASSIGN:
6012 /* Try to simplify by copy-propagating the definition. */
6013 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
6015 case GIMPLE_PHI:
6016 /* If every argument to the PHI produces the same result when
6017 ORed with the second comparison, we win.
6018 Do not do this unless the type is bool since we need a bool
6019 result here anyway. */
6020 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6022 tree result = NULL_TREE;
6023 unsigned i;
6024 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6026 tree arg = gimple_phi_arg_def (stmt, i);
6028 /* If this PHI has itself as an argument, ignore it.
6029 If all the other args produce the same result,
6030 we're still OK. */
6031 if (arg == gimple_phi_result (stmt))
6032 continue;
6033 else if (TREE_CODE (arg) == INTEGER_CST)
6035 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6037 if (!result)
6038 result = boolean_true_node;
6039 else if (!integer_onep (result))
6040 return NULL_TREE;
6042 else if (!result)
6043 result = fold_build2 (code2, boolean_type_node,
6044 op2a, op2b);
6045 else if (!same_bool_comparison_p (result,
6046 code2, op2a, op2b))
6047 return NULL_TREE;
6049 else if (TREE_CODE (arg) == SSA_NAME
6050 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6052 tree temp;
6053 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6054 /* In simple cases we can look through PHI nodes,
6055 but we have to be careful with loops.
6056 See PR49073. */
6057 if (! dom_info_available_p (CDI_DOMINATORS)
6058 || gimple_bb (def_stmt) == gimple_bb (stmt)
6059 || dominated_by_p (CDI_DOMINATORS,
6060 gimple_bb (def_stmt),
6061 gimple_bb (stmt)))
6062 return NULL_TREE;
6063 temp = or_var_with_comparison (arg, invert, code2,
6064 op2a, op2b);
6065 if (!temp)
6066 return NULL_TREE;
6067 else if (!result)
6068 result = temp;
6069 else if (!same_bool_result_p (result, temp))
6070 return NULL_TREE;
6072 else
6073 return NULL_TREE;
6075 return result;
6078 default:
6079 break;
6082 return NULL_TREE;
6085 /* Try to simplify the OR of two comparisons, specified by
6086 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6087 If this can be simplified to a single expression (without requiring
6088 introducing more SSA variables to hold intermediate values),
6089 return the resulting tree. Otherwise return NULL_TREE.
6090 If the result expression is non-null, it has boolean type. */
6092 tree
6093 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6094 enum tree_code code2, tree op2a, tree op2b)
6096 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6097 if (t)
6098 return t;
6099 else
6100 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6104 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6106 Either NULL_TREE, a simplified but non-constant or a constant
6107 is returned.
6109 ??? This should go into a gimple-fold-inline.h file to be eventually
6110 privatized with the single valueize function used in the various TUs
6111 to avoid the indirect function call overhead. */
6113 tree
6114 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6115 tree (*gvalueize) (tree))
6117 code_helper rcode;
6118 tree ops[3] = {};
6119 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6120 edges if there are intermediate VARYING defs. For this reason
6121 do not follow SSA edges here even though SCCVN can technically
6122 just deal fine with that. */
6123 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
6125 tree res = NULL_TREE;
6126 if (gimple_simplified_result_is_gimple_val (rcode, ops))
6127 res = ops[0];
6128 else if (mprts_hook)
6129 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
6130 if (res)
6132 if (dump_file && dump_flags & TDF_DETAILS)
6134 fprintf (dump_file, "Match-and-simplified ");
6135 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6136 fprintf (dump_file, " to ");
6137 print_generic_expr (dump_file, res);
6138 fprintf (dump_file, "\n");
6140 return res;
6144 location_t loc = gimple_location (stmt);
6145 switch (gimple_code (stmt))
6147 case GIMPLE_ASSIGN:
6149 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6151 switch (get_gimple_rhs_class (subcode))
6153 case GIMPLE_SINGLE_RHS:
6155 tree rhs = gimple_assign_rhs1 (stmt);
6156 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6158 if (TREE_CODE (rhs) == SSA_NAME)
6160 /* If the RHS is an SSA_NAME, return its known constant value,
6161 if any. */
6162 return (*valueize) (rhs);
6164 /* Handle propagating invariant addresses into address
6165 operations. */
6166 else if (TREE_CODE (rhs) == ADDR_EXPR
6167 && !is_gimple_min_invariant (rhs))
6169 poly_int64 offset = 0;
6170 tree base;
6171 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6172 &offset,
6173 valueize);
6174 if (base
6175 && (CONSTANT_CLASS_P (base)
6176 || decl_address_invariant_p (base)))
6177 return build_invariant_address (TREE_TYPE (rhs),
6178 base, offset);
6180 else if (TREE_CODE (rhs) == CONSTRUCTOR
6181 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6182 && known_eq (CONSTRUCTOR_NELTS (rhs),
6183 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6185 unsigned i, nelts;
6186 tree val;
6188 nelts = CONSTRUCTOR_NELTS (rhs);
6189 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6190 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6192 val = (*valueize) (val);
6193 if (TREE_CODE (val) == INTEGER_CST
6194 || TREE_CODE (val) == REAL_CST
6195 || TREE_CODE (val) == FIXED_CST)
6196 vec.quick_push (val);
6197 else
6198 return NULL_TREE;
6201 return vec.build ();
6203 if (subcode == OBJ_TYPE_REF)
6205 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6206 /* If callee is constant, we can fold away the wrapper. */
6207 if (is_gimple_min_invariant (val))
6208 return val;
6211 if (kind == tcc_reference)
6213 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6214 || TREE_CODE (rhs) == REALPART_EXPR
6215 || TREE_CODE (rhs) == IMAGPART_EXPR)
6216 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6218 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6219 return fold_unary_loc (EXPR_LOCATION (rhs),
6220 TREE_CODE (rhs),
6221 TREE_TYPE (rhs), val);
6223 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6224 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6226 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6227 return fold_ternary_loc (EXPR_LOCATION (rhs),
6228 TREE_CODE (rhs),
6229 TREE_TYPE (rhs), val,
6230 TREE_OPERAND (rhs, 1),
6231 TREE_OPERAND (rhs, 2));
6233 else if (TREE_CODE (rhs) == MEM_REF
6234 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6236 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6237 if (TREE_CODE (val) == ADDR_EXPR
6238 && is_gimple_min_invariant (val))
6240 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6241 unshare_expr (val),
6242 TREE_OPERAND (rhs, 1));
6243 if (tem)
6244 rhs = tem;
6247 return fold_const_aggregate_ref_1 (rhs, valueize);
6249 else if (kind == tcc_declaration)
6250 return get_symbol_constant_value (rhs);
6251 return rhs;
6254 case GIMPLE_UNARY_RHS:
6255 return NULL_TREE;
6257 case GIMPLE_BINARY_RHS:
6258 /* Translate &x + CST into an invariant form suitable for
6259 further propagation. */
6260 if (subcode == POINTER_PLUS_EXPR)
6262 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6263 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6264 if (TREE_CODE (op0) == ADDR_EXPR
6265 && TREE_CODE (op1) == INTEGER_CST)
6267 tree off = fold_convert (ptr_type_node, op1);
6268 return build_fold_addr_expr_loc
6269 (loc,
6270 fold_build2 (MEM_REF,
6271 TREE_TYPE (TREE_TYPE (op0)),
6272 unshare_expr (op0), off));
6275 /* Canonicalize bool != 0 and bool == 0 appearing after
6276 valueization. While gimple_simplify handles this
6277 it can get confused by the ~X == 1 -> X == 0 transform
6278 which we cant reduce to a SSA name or a constant
6279 (and we have no way to tell gimple_simplify to not
6280 consider those transforms in the first place). */
6281 else if (subcode == EQ_EXPR
6282 || subcode == NE_EXPR)
6284 tree lhs = gimple_assign_lhs (stmt);
6285 tree op0 = gimple_assign_rhs1 (stmt);
6286 if (useless_type_conversion_p (TREE_TYPE (lhs),
6287 TREE_TYPE (op0)))
6289 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6290 op0 = (*valueize) (op0);
6291 if (TREE_CODE (op0) == INTEGER_CST)
6292 std::swap (op0, op1);
6293 if (TREE_CODE (op1) == INTEGER_CST
6294 && ((subcode == NE_EXPR && integer_zerop (op1))
6295 || (subcode == EQ_EXPR && integer_onep (op1))))
6296 return op0;
6299 return NULL_TREE;
6301 case GIMPLE_TERNARY_RHS:
6303 /* Handle ternary operators that can appear in GIMPLE form. */
6304 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6305 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6306 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6307 return fold_ternary_loc (loc, subcode,
6308 gimple_expr_type (stmt), op0, op1, op2);
6311 default:
6312 gcc_unreachable ();
6316 case GIMPLE_CALL:
6318 tree fn;
6319 gcall *call_stmt = as_a <gcall *> (stmt);
6321 if (gimple_call_internal_p (stmt))
6323 enum tree_code subcode = ERROR_MARK;
6324 switch (gimple_call_internal_fn (stmt))
6326 case IFN_UBSAN_CHECK_ADD:
6327 subcode = PLUS_EXPR;
6328 break;
6329 case IFN_UBSAN_CHECK_SUB:
6330 subcode = MINUS_EXPR;
6331 break;
6332 case IFN_UBSAN_CHECK_MUL:
6333 subcode = MULT_EXPR;
6334 break;
6335 case IFN_BUILTIN_EXPECT:
6337 tree arg0 = gimple_call_arg (stmt, 0);
6338 tree op0 = (*valueize) (arg0);
6339 if (TREE_CODE (op0) == INTEGER_CST)
6340 return op0;
6341 return NULL_TREE;
6343 default:
6344 return NULL_TREE;
6346 tree arg0 = gimple_call_arg (stmt, 0);
6347 tree arg1 = gimple_call_arg (stmt, 1);
6348 tree op0 = (*valueize) (arg0);
6349 tree op1 = (*valueize) (arg1);
6351 if (TREE_CODE (op0) != INTEGER_CST
6352 || TREE_CODE (op1) != INTEGER_CST)
6354 switch (subcode)
6356 case MULT_EXPR:
6357 /* x * 0 = 0 * x = 0 without overflow. */
6358 if (integer_zerop (op0) || integer_zerop (op1))
6359 return build_zero_cst (TREE_TYPE (arg0));
6360 break;
6361 case MINUS_EXPR:
6362 /* y - y = 0 without overflow. */
6363 if (operand_equal_p (op0, op1, 0))
6364 return build_zero_cst (TREE_TYPE (arg0));
6365 break;
6366 default:
6367 break;
6370 tree res
6371 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6372 if (res
6373 && TREE_CODE (res) == INTEGER_CST
6374 && !TREE_OVERFLOW (res))
6375 return res;
6376 return NULL_TREE;
6379 fn = (*valueize) (gimple_call_fn (stmt));
6380 if (TREE_CODE (fn) == ADDR_EXPR
6381 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6382 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6383 && gimple_builtin_call_types_compatible_p (stmt,
6384 TREE_OPERAND (fn, 0)))
6386 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6387 tree retval;
6388 unsigned i;
6389 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6390 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6391 retval = fold_builtin_call_array (loc,
6392 gimple_call_return_type (call_stmt),
6393 fn, gimple_call_num_args (stmt), args);
6394 if (retval)
6396 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6397 STRIP_NOPS (retval);
6398 retval = fold_convert (gimple_call_return_type (call_stmt),
6399 retval);
6401 return retval;
6403 return NULL_TREE;
6406 default:
6407 return NULL_TREE;
6411 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6412 Returns NULL_TREE if folding to a constant is not possible, otherwise
6413 returns a constant according to is_gimple_min_invariant. */
6415 tree
6416 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6418 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6419 if (res && is_gimple_min_invariant (res))
6420 return res;
6421 return NULL_TREE;
6425 /* The following set of functions are supposed to fold references using
6426 their constant initializers. */
6428 /* See if we can find constructor defining value of BASE.
6429 When we know the consructor with constant offset (such as
6430 base is array[40] and we do know constructor of array), then
6431 BIT_OFFSET is adjusted accordingly.
6433 As a special case, return error_mark_node when constructor
6434 is not explicitly available, but it is known to be zero
6435 such as 'static const int a;'. */
6436 static tree
6437 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6438 tree (*valueize)(tree))
6440 poly_int64 bit_offset2, size, max_size;
6441 bool reverse;
6443 if (TREE_CODE (base) == MEM_REF)
6445 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6446 if (!boff.to_shwi (bit_offset))
6447 return NULL_TREE;
6449 if (valueize
6450 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6451 base = valueize (TREE_OPERAND (base, 0));
6452 if (!base || TREE_CODE (base) != ADDR_EXPR)
6453 return NULL_TREE;
6454 base = TREE_OPERAND (base, 0);
6456 else if (valueize
6457 && TREE_CODE (base) == SSA_NAME)
6458 base = valueize (base);
6460 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6461 DECL_INITIAL. If BASE is a nested reference into another
6462 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6463 the inner reference. */
6464 switch (TREE_CODE (base))
6466 case VAR_DECL:
6467 case CONST_DECL:
6469 tree init = ctor_for_folding (base);
6471 /* Our semantic is exact opposite of ctor_for_folding;
6472 NULL means unknown, while error_mark_node is 0. */
6473 if (init == error_mark_node)
6474 return NULL_TREE;
6475 if (!init)
6476 return error_mark_node;
6477 return init;
6480 case VIEW_CONVERT_EXPR:
6481 return get_base_constructor (TREE_OPERAND (base, 0),
6482 bit_offset, valueize);
6484 case ARRAY_REF:
6485 case COMPONENT_REF:
6486 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6487 &reverse);
6488 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6489 return NULL_TREE;
6490 *bit_offset += bit_offset2;
6491 return get_base_constructor (base, bit_offset, valueize);
6493 case CONSTRUCTOR:
6494 return base;
6496 default:
6497 if (CONSTANT_CLASS_P (base))
6498 return base;
6500 return NULL_TREE;
6504 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6505 SIZE to the memory at bit OFFSET. */
6507 static tree
6508 fold_array_ctor_reference (tree type, tree ctor,
6509 unsigned HOST_WIDE_INT offset,
6510 unsigned HOST_WIDE_INT size,
6511 tree from_decl)
6513 offset_int low_bound;
6514 offset_int elt_size;
6515 offset_int access_index;
6516 tree domain_type = NULL_TREE;
6517 HOST_WIDE_INT inner_offset;
6519 /* Compute low bound and elt size. */
6520 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6521 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6522 if (domain_type && TYPE_MIN_VALUE (domain_type))
6524 /* Static constructors for variably sized objects makes no sense. */
6525 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6526 return NULL_TREE;
6527 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6529 else
6530 low_bound = 0;
6531 /* Static constructors for variably sized objects makes no sense. */
6532 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6533 return NULL_TREE;
6534 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6536 /* We can handle only constantly sized accesses that are known to not
6537 be larger than size of array element. */
6538 if (!TYPE_SIZE_UNIT (type)
6539 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6540 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6541 || elt_size == 0)
6542 return NULL_TREE;
6544 /* Compute the array index we look for. */
6545 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6546 elt_size);
6547 access_index += low_bound;
6549 /* And offset within the access. */
6550 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6552 /* See if the array field is large enough to span whole access. We do not
6553 care to fold accesses spanning multiple array indexes. */
6554 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6555 return NULL_TREE;
6556 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6557 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
6559 /* When memory is not explicitely mentioned in constructor,
6560 it is 0 (or out of range). */
6561 return build_zero_cst (type);
6564 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6565 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6567 static tree
6568 fold_nonarray_ctor_reference (tree type, tree ctor,
6569 unsigned HOST_WIDE_INT offset,
6570 unsigned HOST_WIDE_INT size,
6571 tree from_decl)
6573 unsigned HOST_WIDE_INT cnt;
6574 tree cfield, cval;
6576 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6577 cval)
6579 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6580 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6581 tree field_size = DECL_SIZE (cfield);
6582 offset_int bitoffset;
6583 offset_int bitoffset_end, access_end;
6585 /* Variable sized objects in static constructors makes no sense,
6586 but field_size can be NULL for flexible array members. */
6587 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6588 && TREE_CODE (byte_offset) == INTEGER_CST
6589 && (field_size != NULL_TREE
6590 ? TREE_CODE (field_size) == INTEGER_CST
6591 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6593 /* Compute bit offset of the field. */
6594 bitoffset = (wi::to_offset (field_offset)
6595 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6596 /* Compute bit offset where the field ends. */
6597 if (field_size != NULL_TREE)
6598 bitoffset_end = bitoffset + wi::to_offset (field_size);
6599 else
6600 bitoffset_end = 0;
6602 access_end = offset_int (offset) + size;
6604 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6605 [BITOFFSET, BITOFFSET_END)? */
6606 if (wi::cmps (access_end, bitoffset) > 0
6607 && (field_size == NULL_TREE
6608 || wi::lts_p (offset, bitoffset_end)))
6610 offset_int inner_offset = offset_int (offset) - bitoffset;
6611 /* We do have overlap. Now see if field is large enough to
6612 cover the access. Give up for accesses spanning multiple
6613 fields. */
6614 if (wi::cmps (access_end, bitoffset_end) > 0)
6615 return NULL_TREE;
6616 if (offset < bitoffset)
6617 return NULL_TREE;
6618 return fold_ctor_reference (type, cval,
6619 inner_offset.to_uhwi (), size,
6620 from_decl);
6623 /* When memory is not explicitely mentioned in constructor, it is 0. */
6624 return build_zero_cst (type);
6627 /* CTOR is value initializing memory, fold reference of type TYPE and
6628 size POLY_SIZE to the memory at bit POLY_OFFSET. */
6630 tree
6631 fold_ctor_reference (tree type, tree ctor, poly_uint64 poly_offset,
6632 poly_uint64 poly_size, tree from_decl)
6634 tree ret;
6636 /* We found the field with exact match. */
6637 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6638 && known_eq (poly_offset, 0U))
6639 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6641 /* The remaining optimizations need a constant size and offset. */
6642 unsigned HOST_WIDE_INT size, offset;
6643 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6644 return NULL_TREE;
6646 /* We are at the end of walk, see if we can view convert the
6647 result. */
6648 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6649 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6650 && !compare_tree_int (TYPE_SIZE (type), size)
6651 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6653 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6654 if (ret)
6656 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6657 if (ret)
6658 STRIP_USELESS_TYPE_CONVERSION (ret);
6660 return ret;
6662 /* For constants and byte-aligned/sized reads try to go through
6663 native_encode/interpret. */
6664 if (CONSTANT_CLASS_P (ctor)
6665 && BITS_PER_UNIT == 8
6666 && offset % BITS_PER_UNIT == 0
6667 && size % BITS_PER_UNIT == 0
6668 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6670 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6671 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6672 offset / BITS_PER_UNIT);
6673 if (len > 0)
6674 return native_interpret_expr (type, buf, len);
6676 if (TREE_CODE (ctor) == CONSTRUCTOR)
6679 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6680 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6681 return fold_array_ctor_reference (type, ctor, offset, size,
6682 from_decl);
6683 else
6684 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6685 from_decl);
6688 return NULL_TREE;
6691 /* Return the tree representing the element referenced by T if T is an
6692 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6693 names using VALUEIZE. Return NULL_TREE otherwise. */
6695 tree
6696 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6698 tree ctor, idx, base;
6699 poly_int64 offset, size, max_size;
6700 tree tem;
6701 bool reverse;
6703 if (TREE_THIS_VOLATILE (t))
6704 return NULL_TREE;
6706 if (DECL_P (t))
6707 return get_symbol_constant_value (t);
6709 tem = fold_read_from_constant_string (t);
6710 if (tem)
6711 return tem;
6713 switch (TREE_CODE (t))
6715 case ARRAY_REF:
6716 case ARRAY_RANGE_REF:
6717 /* Constant indexes are handled well by get_base_constructor.
6718 Only special case variable offsets.
6719 FIXME: This code can't handle nested references with variable indexes
6720 (they will be handled only by iteration of ccp). Perhaps we can bring
6721 get_ref_base_and_extent here and make it use a valueize callback. */
6722 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6723 && valueize
6724 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6725 && poly_int_tree_p (idx))
6727 tree low_bound, unit_size;
6729 /* If the resulting bit-offset is constant, track it. */
6730 if ((low_bound = array_ref_low_bound (t),
6731 poly_int_tree_p (low_bound))
6732 && (unit_size = array_ref_element_size (t),
6733 tree_fits_uhwi_p (unit_size)))
6735 poly_offset_int woffset
6736 = wi::sext (wi::to_poly_offset (idx)
6737 - wi::to_poly_offset (low_bound),
6738 TYPE_PRECISION (TREE_TYPE (idx)));
6740 if (woffset.to_shwi (&offset))
6742 /* TODO: This code seems wrong, multiply then check
6743 to see if it fits. */
6744 offset *= tree_to_uhwi (unit_size);
6745 offset *= BITS_PER_UNIT;
6747 base = TREE_OPERAND (t, 0);
6748 ctor = get_base_constructor (base, &offset, valueize);
6749 /* Empty constructor. Always fold to 0. */
6750 if (ctor == error_mark_node)
6751 return build_zero_cst (TREE_TYPE (t));
6752 /* Out of bound array access. Value is undefined,
6753 but don't fold. */
6754 if (maybe_lt (offset, 0))
6755 return NULL_TREE;
6756 /* We can not determine ctor. */
6757 if (!ctor)
6758 return NULL_TREE;
6759 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6760 tree_to_uhwi (unit_size)
6761 * BITS_PER_UNIT,
6762 base);
6766 /* Fallthru. */
6768 case COMPONENT_REF:
6769 case BIT_FIELD_REF:
6770 case TARGET_MEM_REF:
6771 case MEM_REF:
6772 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6773 ctor = get_base_constructor (base, &offset, valueize);
6775 /* Empty constructor. Always fold to 0. */
6776 if (ctor == error_mark_node)
6777 return build_zero_cst (TREE_TYPE (t));
6778 /* We do not know precise address. */
6779 if (!known_size_p (max_size) || maybe_ne (max_size, size))
6780 return NULL_TREE;
6781 /* We can not determine ctor. */
6782 if (!ctor)
6783 return NULL_TREE;
6785 /* Out of bound array access. Value is undefined, but don't fold. */
6786 if (maybe_lt (offset, 0))
6787 return NULL_TREE;
6789 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6790 base);
6792 case REALPART_EXPR:
6793 case IMAGPART_EXPR:
6795 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6796 if (c && TREE_CODE (c) == COMPLEX_CST)
6797 return fold_build1_loc (EXPR_LOCATION (t),
6798 TREE_CODE (t), TREE_TYPE (t), c);
6799 break;
6802 default:
6803 break;
6806 return NULL_TREE;
6809 tree
6810 fold_const_aggregate_ref (tree t)
6812 return fold_const_aggregate_ref_1 (t, NULL);
6815 /* Lookup virtual method with index TOKEN in a virtual table V
6816 at OFFSET.
6817 Set CAN_REFER if non-NULL to false if method
6818 is not referable or if the virtual table is ill-formed (such as rewriten
6819 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6821 tree
6822 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6823 tree v,
6824 unsigned HOST_WIDE_INT offset,
6825 bool *can_refer)
6827 tree vtable = v, init, fn;
6828 unsigned HOST_WIDE_INT size;
6829 unsigned HOST_WIDE_INT elt_size, access_index;
6830 tree domain_type;
6832 if (can_refer)
6833 *can_refer = true;
6835 /* First of all double check we have virtual table. */
6836 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6838 /* Pass down that we lost track of the target. */
6839 if (can_refer)
6840 *can_refer = false;
6841 return NULL_TREE;
6844 init = ctor_for_folding (v);
6846 /* The virtual tables should always be born with constructors
6847 and we always should assume that they are avaialble for
6848 folding. At the moment we do not stream them in all cases,
6849 but it should never happen that ctor seem unreachable. */
6850 gcc_assert (init);
6851 if (init == error_mark_node)
6853 /* Pass down that we lost track of the target. */
6854 if (can_refer)
6855 *can_refer = false;
6856 return NULL_TREE;
6858 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6859 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6860 offset *= BITS_PER_UNIT;
6861 offset += token * size;
6863 /* Lookup the value in the constructor that is assumed to be array.
6864 This is equivalent to
6865 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6866 offset, size, NULL);
6867 but in a constant time. We expect that frontend produced a simple
6868 array without indexed initializers. */
6870 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6871 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6872 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6873 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6875 access_index = offset / BITS_PER_UNIT / elt_size;
6876 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6878 /* This code makes an assumption that there are no
6879 indexed fileds produced by C++ FE, so we can directly index the array. */
6880 if (access_index < CONSTRUCTOR_NELTS (init))
6882 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6883 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6884 STRIP_NOPS (fn);
6886 else
6887 fn = NULL;
6889 /* For type inconsistent program we may end up looking up virtual method
6890 in virtual table that does not contain TOKEN entries. We may overrun
6891 the virtual table and pick up a constant or RTTI info pointer.
6892 In any case the call is undefined. */
6893 if (!fn
6894 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6895 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6896 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6897 else
6899 fn = TREE_OPERAND (fn, 0);
6901 /* When cgraph node is missing and function is not public, we cannot
6902 devirtualize. This can happen in WHOPR when the actual method
6903 ends up in other partition, because we found devirtualization
6904 possibility too late. */
6905 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6907 if (can_refer)
6909 *can_refer = false;
6910 return fn;
6912 return NULL_TREE;
6916 /* Make sure we create a cgraph node for functions we'll reference.
6917 They can be non-existent if the reference comes from an entry
6918 of an external vtable for example. */
6919 cgraph_node::get_create (fn);
6921 return fn;
6924 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6925 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6926 KNOWN_BINFO carries the binfo describing the true type of
6927 OBJ_TYPE_REF_OBJECT(REF).
6928 Set CAN_REFER if non-NULL to false if method
6929 is not referable or if the virtual table is ill-formed (such as rewriten
6930 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6932 tree
6933 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6934 bool *can_refer)
6936 unsigned HOST_WIDE_INT offset;
6937 tree v;
6939 v = BINFO_VTABLE (known_binfo);
6940 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6941 if (!v)
6942 return NULL_TREE;
6944 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6946 if (can_refer)
6947 *can_refer = false;
6948 return NULL_TREE;
6950 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6953 /* Given a pointer value T, return a simplified version of an
6954 indirection through T, or NULL_TREE if no simplification is
6955 possible. Note that the resulting type may be different from
6956 the type pointed to in the sense that it is still compatible
6957 from the langhooks point of view. */
6959 tree
6960 gimple_fold_indirect_ref (tree t)
6962 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6963 tree sub = t;
6964 tree subtype;
6966 STRIP_NOPS (sub);
6967 subtype = TREE_TYPE (sub);
6968 if (!POINTER_TYPE_P (subtype)
6969 || TYPE_REF_CAN_ALIAS_ALL (ptype))
6970 return NULL_TREE;
6972 if (TREE_CODE (sub) == ADDR_EXPR)
6974 tree op = TREE_OPERAND (sub, 0);
6975 tree optype = TREE_TYPE (op);
6976 /* *&p => p */
6977 if (useless_type_conversion_p (type, optype))
6978 return op;
6980 /* *(foo *)&fooarray => fooarray[0] */
6981 if (TREE_CODE (optype) == ARRAY_TYPE
6982 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6983 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6985 tree type_domain = TYPE_DOMAIN (optype);
6986 tree min_val = size_zero_node;
6987 if (type_domain && TYPE_MIN_VALUE (type_domain))
6988 min_val = TYPE_MIN_VALUE (type_domain);
6989 if (TREE_CODE (min_val) == INTEGER_CST)
6990 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6992 /* *(foo *)&complexfoo => __real__ complexfoo */
6993 else if (TREE_CODE (optype) == COMPLEX_TYPE
6994 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6995 return fold_build1 (REALPART_EXPR, type, op);
6996 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6997 else if (TREE_CODE (optype) == VECTOR_TYPE
6998 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7000 tree part_width = TYPE_SIZE (type);
7001 tree index = bitsize_int (0);
7002 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7006 /* *(p + CST) -> ... */
7007 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7008 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7010 tree addr = TREE_OPERAND (sub, 0);
7011 tree off = TREE_OPERAND (sub, 1);
7012 tree addrtype;
7014 STRIP_NOPS (addr);
7015 addrtype = TREE_TYPE (addr);
7017 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7018 if (TREE_CODE (addr) == ADDR_EXPR
7019 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7020 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7021 && tree_fits_uhwi_p (off))
7023 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7024 tree part_width = TYPE_SIZE (type);
7025 unsigned HOST_WIDE_INT part_widthi
7026 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7027 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7028 tree index = bitsize_int (indexi);
7029 if (known_lt (offset / part_widthi,
7030 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7031 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7032 part_width, index);
7035 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7036 if (TREE_CODE (addr) == ADDR_EXPR
7037 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7038 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7040 tree size = TYPE_SIZE_UNIT (type);
7041 if (tree_int_cst_equal (size, off))
7042 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7045 /* *(p + CST) -> MEM_REF <p, CST>. */
7046 if (TREE_CODE (addr) != ADDR_EXPR
7047 || DECL_P (TREE_OPERAND (addr, 0)))
7048 return fold_build2 (MEM_REF, type,
7049 addr,
7050 wide_int_to_tree (ptype, wi::to_wide (off)));
7053 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7054 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7055 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7056 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7058 tree type_domain;
7059 tree min_val = size_zero_node;
7060 tree osub = sub;
7061 sub = gimple_fold_indirect_ref (sub);
7062 if (! sub)
7063 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7064 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7065 if (type_domain && TYPE_MIN_VALUE (type_domain))
7066 min_val = TYPE_MIN_VALUE (type_domain);
7067 if (TREE_CODE (min_val) == INTEGER_CST)
7068 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7071 return NULL_TREE;
7074 /* Return true if CODE is an operation that when operating on signed
7075 integer types involves undefined behavior on overflow and the
7076 operation can be expressed with unsigned arithmetic. */
7078 bool
7079 arith_code_with_undefined_signed_overflow (tree_code code)
7081 switch (code)
7083 case PLUS_EXPR:
7084 case MINUS_EXPR:
7085 case MULT_EXPR:
7086 case NEGATE_EXPR:
7087 case POINTER_PLUS_EXPR:
7088 return true;
7089 default:
7090 return false;
7094 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7095 operation that can be transformed to unsigned arithmetic by converting
7096 its operand, carrying out the operation in the corresponding unsigned
7097 type and converting the result back to the original type.
7099 Returns a sequence of statements that replace STMT and also contain
7100 a modified form of STMT itself. */
7102 gimple_seq
7103 rewrite_to_defined_overflow (gimple *stmt)
7105 if (dump_file && (dump_flags & TDF_DETAILS))
7107 fprintf (dump_file, "rewriting stmt with undefined signed "
7108 "overflow ");
7109 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7112 tree lhs = gimple_assign_lhs (stmt);
7113 tree type = unsigned_type_for (TREE_TYPE (lhs));
7114 gimple_seq stmts = NULL;
7115 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7117 tree op = gimple_op (stmt, i);
7118 op = gimple_convert (&stmts, type, op);
7119 gimple_set_op (stmt, i, op);
7121 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7122 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7123 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7124 gimple_seq_add_stmt (&stmts, stmt);
7125 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7126 gimple_seq_add_stmt (&stmts, cvt);
7128 return stmts;
7132 /* The valueization hook we use for the gimple_build API simplification.
7133 This makes us match fold_buildN behavior by only combining with
7134 statements in the sequence(s) we are currently building. */
7136 static tree
7137 gimple_build_valueize (tree op)
7139 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7140 return op;
7141 return NULL_TREE;
7144 /* Build the expression CODE OP0 of type TYPE with location LOC,
7145 simplifying it first if possible. Returns the built
7146 expression value and appends statements possibly defining it
7147 to SEQ. */
7149 tree
7150 gimple_build (gimple_seq *seq, location_t loc,
7151 enum tree_code code, tree type, tree op0)
7153 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7154 if (!res)
7156 res = create_tmp_reg_or_ssa_name (type);
7157 gimple *stmt;
7158 if (code == REALPART_EXPR
7159 || code == IMAGPART_EXPR
7160 || code == VIEW_CONVERT_EXPR)
7161 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7162 else
7163 stmt = gimple_build_assign (res, code, op0);
7164 gimple_set_location (stmt, loc);
7165 gimple_seq_add_stmt_without_update (seq, stmt);
7167 return res;
7170 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7171 simplifying it first if possible. Returns the built
7172 expression value and appends statements possibly defining it
7173 to SEQ. */
7175 tree
7176 gimple_build (gimple_seq *seq, location_t loc,
7177 enum tree_code code, tree type, tree op0, tree op1)
7179 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7180 if (!res)
7182 res = create_tmp_reg_or_ssa_name (type);
7183 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7184 gimple_set_location (stmt, loc);
7185 gimple_seq_add_stmt_without_update (seq, stmt);
7187 return res;
7190 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7191 simplifying it first if possible. Returns the built
7192 expression value and appends statements possibly defining it
7193 to SEQ. */
7195 tree
7196 gimple_build (gimple_seq *seq, location_t loc,
7197 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7199 tree res = gimple_simplify (code, type, op0, op1, op2,
7200 seq, gimple_build_valueize);
7201 if (!res)
7203 res = create_tmp_reg_or_ssa_name (type);
7204 gimple *stmt;
7205 if (code == BIT_FIELD_REF)
7206 stmt = gimple_build_assign (res, code,
7207 build3 (code, type, op0, op1, op2));
7208 else
7209 stmt = gimple_build_assign (res, code, op0, op1, op2);
7210 gimple_set_location (stmt, loc);
7211 gimple_seq_add_stmt_without_update (seq, stmt);
7213 return res;
7216 /* Build the call FN (ARG0) with a result of type TYPE
7217 (or no result if TYPE is void) with location LOC,
7218 simplifying it first if possible. Returns the built
7219 expression value (or NULL_TREE if TYPE is void) and appends
7220 statements possibly defining it to SEQ. */
7222 tree
7223 gimple_build (gimple_seq *seq, location_t loc,
7224 enum built_in_function fn, tree type, tree arg0)
7226 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7227 if (!res)
7229 tree decl = builtin_decl_implicit (fn);
7230 gimple *stmt = gimple_build_call (decl, 1, arg0);
7231 if (!VOID_TYPE_P (type))
7233 res = create_tmp_reg_or_ssa_name (type);
7234 gimple_call_set_lhs (stmt, res);
7236 gimple_set_location (stmt, loc);
7237 gimple_seq_add_stmt_without_update (seq, stmt);
7239 return res;
7242 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7243 (or no result if TYPE is void) with location LOC,
7244 simplifying it first if possible. Returns the built
7245 expression value (or NULL_TREE if TYPE is void) and appends
7246 statements possibly defining it to SEQ. */
7248 tree
7249 gimple_build (gimple_seq *seq, location_t loc,
7250 enum built_in_function fn, tree type, tree arg0, tree arg1)
7252 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7253 if (!res)
7255 tree decl = builtin_decl_implicit (fn);
7256 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
7257 if (!VOID_TYPE_P (type))
7259 res = create_tmp_reg_or_ssa_name (type);
7260 gimple_call_set_lhs (stmt, res);
7262 gimple_set_location (stmt, loc);
7263 gimple_seq_add_stmt_without_update (seq, stmt);
7265 return res;
7268 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7269 (or no result if TYPE is void) with location LOC,
7270 simplifying it first if possible. Returns the built
7271 expression value (or NULL_TREE if TYPE is void) and appends
7272 statements possibly defining it to SEQ. */
7274 tree
7275 gimple_build (gimple_seq *seq, location_t loc,
7276 enum built_in_function fn, tree type,
7277 tree arg0, tree arg1, tree arg2)
7279 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7280 seq, gimple_build_valueize);
7281 if (!res)
7283 tree decl = builtin_decl_implicit (fn);
7284 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7285 if (!VOID_TYPE_P (type))
7287 res = create_tmp_reg_or_ssa_name (type);
7288 gimple_call_set_lhs (stmt, res);
7290 gimple_set_location (stmt, loc);
7291 gimple_seq_add_stmt_without_update (seq, stmt);
7293 return res;
7296 /* Build the conversion (TYPE) OP with a result of type TYPE
7297 with location LOC if such conversion is neccesary in GIMPLE,
7298 simplifying it first.
7299 Returns the built expression value and appends
7300 statements possibly defining it to SEQ. */
7302 tree
7303 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7305 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7306 return op;
7307 return gimple_build (seq, loc, NOP_EXPR, type, op);
7310 /* Build the conversion (ptrofftype) OP with a result of a type
7311 compatible with ptrofftype with location LOC if such conversion
7312 is neccesary in GIMPLE, simplifying it first.
7313 Returns the built expression value and appends
7314 statements possibly defining it to SEQ. */
7316 tree
7317 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7319 if (ptrofftype_p (TREE_TYPE (op)))
7320 return op;
7321 return gimple_convert (seq, loc, sizetype, op);
7324 /* Build a vector of type TYPE in which each element has the value OP.
7325 Return a gimple value for the result, appending any new statements
7326 to SEQ. */
7328 tree
7329 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7330 tree op)
7332 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7333 && !CONSTANT_CLASS_P (op))
7334 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7336 tree res, vec = build_vector_from_val (type, op);
7337 if (is_gimple_val (vec))
7338 return vec;
7339 if (gimple_in_ssa_p (cfun))
7340 res = make_ssa_name (type);
7341 else
7342 res = create_tmp_reg (type);
7343 gimple *stmt = gimple_build_assign (res, vec);
7344 gimple_set_location (stmt, loc);
7345 gimple_seq_add_stmt_without_update (seq, stmt);
7346 return res;
7349 /* Build a vector from BUILDER, handling the case in which some elements
7350 are non-constant. Return a gimple value for the result, appending any
7351 new instructions to SEQ.
7353 BUILDER must not have a stepped encoding on entry. This is because
7354 the function is not geared up to handle the arithmetic that would
7355 be needed in the variable case, and any code building a vector that
7356 is known to be constant should use BUILDER->build () directly. */
7358 tree
7359 gimple_build_vector (gimple_seq *seq, location_t loc,
7360 tree_vector_builder *builder)
7362 gcc_assert (builder->nelts_per_pattern () <= 2);
7363 unsigned int encoded_nelts = builder->encoded_nelts ();
7364 for (unsigned int i = 0; i < encoded_nelts; ++i)
7365 if (!TREE_CONSTANT ((*builder)[i]))
7367 tree type = builder->type ();
7368 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7369 vec<constructor_elt, va_gc> *v;
7370 vec_alloc (v, nelts);
7371 for (i = 0; i < nelts; ++i)
7372 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7374 tree res;
7375 if (gimple_in_ssa_p (cfun))
7376 res = make_ssa_name (type);
7377 else
7378 res = create_tmp_reg (type);
7379 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7380 gimple_set_location (stmt, loc);
7381 gimple_seq_add_stmt_without_update (seq, stmt);
7382 return res;
7384 return builder->build ();
7387 /* Return true if the result of assignment STMT is known to be non-negative.
7388 If the return value is based on the assumption that signed overflow is
7389 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7390 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7392 static bool
7393 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7394 int depth)
7396 enum tree_code code = gimple_assign_rhs_code (stmt);
7397 switch (get_gimple_rhs_class (code))
7399 case GIMPLE_UNARY_RHS:
7400 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7401 gimple_expr_type (stmt),
7402 gimple_assign_rhs1 (stmt),
7403 strict_overflow_p, depth);
7404 case GIMPLE_BINARY_RHS:
7405 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7406 gimple_expr_type (stmt),
7407 gimple_assign_rhs1 (stmt),
7408 gimple_assign_rhs2 (stmt),
7409 strict_overflow_p, depth);
7410 case GIMPLE_TERNARY_RHS:
7411 return false;
7412 case GIMPLE_SINGLE_RHS:
7413 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7414 strict_overflow_p, depth);
7415 case GIMPLE_INVALID_RHS:
7416 break;
7418 gcc_unreachable ();
7421 /* Return true if return value of call STMT is known to be non-negative.
7422 If the return value is based on the assumption that signed overflow is
7423 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7424 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7426 static bool
7427 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7428 int depth)
7430 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7431 gimple_call_arg (stmt, 0) : NULL_TREE;
7432 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7433 gimple_call_arg (stmt, 1) : NULL_TREE;
7435 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7436 gimple_call_combined_fn (stmt),
7437 arg0,
7438 arg1,
7439 strict_overflow_p, depth);
7442 /* Return true if return value of call STMT is known to be non-negative.
7443 If the return value is based on the assumption that signed overflow is
7444 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7445 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7447 static bool
7448 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7449 int depth)
7451 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7453 tree arg = gimple_phi_arg_def (stmt, i);
7454 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7455 return false;
7457 return true;
7460 /* Return true if STMT is known to compute a non-negative value.
7461 If the return value is based on the assumption that signed overflow is
7462 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7463 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7465 bool
7466 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7467 int depth)
7469 switch (gimple_code (stmt))
7471 case GIMPLE_ASSIGN:
7472 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7473 depth);
7474 case GIMPLE_CALL:
7475 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7476 depth);
7477 case GIMPLE_PHI:
7478 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7479 depth);
7480 default:
7481 return false;
7485 /* Return true if the floating-point value computed by assignment STMT
7486 is known to have an integer value. We also allow +Inf, -Inf and NaN
7487 to be considered integer values. Return false for signaling NaN.
7489 DEPTH is the current nesting depth of the query. */
7491 static bool
7492 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7494 enum tree_code code = gimple_assign_rhs_code (stmt);
7495 switch (get_gimple_rhs_class (code))
7497 case GIMPLE_UNARY_RHS:
7498 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7499 gimple_assign_rhs1 (stmt), depth);
7500 case GIMPLE_BINARY_RHS:
7501 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7502 gimple_assign_rhs1 (stmt),
7503 gimple_assign_rhs2 (stmt), depth);
7504 case GIMPLE_TERNARY_RHS:
7505 return false;
7506 case GIMPLE_SINGLE_RHS:
7507 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7508 case GIMPLE_INVALID_RHS:
7509 break;
7511 gcc_unreachable ();
7514 /* Return true if the floating-point value computed by call STMT is known
7515 to have an integer value. We also allow +Inf, -Inf and NaN to be
7516 considered integer values. Return false for signaling NaN.
7518 DEPTH is the current nesting depth of the query. */
7520 static bool
7521 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7523 tree arg0 = (gimple_call_num_args (stmt) > 0
7524 ? gimple_call_arg (stmt, 0)
7525 : NULL_TREE);
7526 tree arg1 = (gimple_call_num_args (stmt) > 1
7527 ? gimple_call_arg (stmt, 1)
7528 : NULL_TREE);
7529 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7530 arg0, arg1, depth);
7533 /* Return true if the floating-point result of phi STMT is known to have
7534 an integer value. We also allow +Inf, -Inf and NaN to be considered
7535 integer values. Return false for signaling NaN.
7537 DEPTH is the current nesting depth of the query. */
7539 static bool
7540 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7542 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7544 tree arg = gimple_phi_arg_def (stmt, i);
7545 if (!integer_valued_real_single_p (arg, depth + 1))
7546 return false;
7548 return true;
7551 /* Return true if the floating-point value computed by STMT is known
7552 to have an integer value. We also allow +Inf, -Inf and NaN to be
7553 considered integer values. Return false for signaling NaN.
7555 DEPTH is the current nesting depth of the query. */
7557 bool
7558 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7560 switch (gimple_code (stmt))
7562 case GIMPLE_ASSIGN:
7563 return gimple_assign_integer_valued_real_p (stmt, depth);
7564 case GIMPLE_CALL:
7565 return gimple_call_integer_valued_real_p (stmt, depth);
7566 case GIMPLE_PHI:
7567 return gimple_phi_integer_valued_real_p (stmt, depth);
7568 default:
7569 return false;