Avoid is_constant calls in vectorizable_bswap
[official-gcc.git] / gcc / gimple-fold.c
blob07341ebe66fc9ea20c4b6a249c74bf8e1d983005
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
69 /* Return true when DECL can be referenced from current unit.
70 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
71 We can get declarations that are not possible to reference for various
72 reasons:
74 1) When analyzing C++ virtual tables.
75 C++ virtual tables do have known constructors even
76 when they are keyed to other compilation unit.
77 Those tables can contain pointers to methods and vars
78 in other units. Those methods have both STATIC and EXTERNAL
79 set.
80 2) In WHOPR mode devirtualization might lead to reference
81 to method that was partitioned elsehwere.
82 In this case we have static VAR_DECL or FUNCTION_DECL
83 that has no corresponding callgraph/varpool node
84 declaring the body.
85 3) COMDAT functions referred by external vtables that
86 we devirtualize only during final compilation stage.
87 At this time we already decided that we will not output
88 the function body and thus we can't reference the symbol
89 directly. */
91 static bool
92 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
94 varpool_node *vnode;
95 struct cgraph_node *node;
96 symtab_node *snode;
98 if (DECL_ABSTRACT_P (decl))
99 return false;
101 /* We are concerned only about static/external vars and functions. */
102 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
103 || !VAR_OR_FUNCTION_DECL_P (decl))
104 return true;
106 /* Static objects can be referred only if they was not optimized out yet. */
107 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
109 /* Before we start optimizing unreachable code we can be sure all
110 static objects are defined. */
111 if (symtab->function_flags_ready)
112 return true;
113 snode = symtab_node::get (decl);
114 if (!snode || !snode->definition)
115 return false;
116 node = dyn_cast <cgraph_node *> (snode);
117 return !node || !node->global.inlined_to;
120 /* We will later output the initializer, so we can refer to it.
121 So we are concerned only when DECL comes from initializer of
122 external var or var that has been optimized out. */
123 if (!from_decl
124 || !VAR_P (from_decl)
125 || (!DECL_EXTERNAL (from_decl)
126 && (vnode = varpool_node::get (from_decl)) != NULL
127 && vnode->definition)
128 || (flag_ltrans
129 && (vnode = varpool_node::get (from_decl)) != NULL
130 && vnode->in_other_partition))
131 return true;
132 /* We are folding reference from external vtable. The vtable may reffer
133 to a symbol keyed to other compilation unit. The other compilation
134 unit may be in separate DSO and the symbol may be hidden. */
135 if (DECL_VISIBILITY_SPECIFIED (decl)
136 && DECL_EXTERNAL (decl)
137 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
138 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
139 return false;
140 /* When function is public, we always can introduce new reference.
141 Exception are the COMDAT functions where introducing a direct
142 reference imply need to include function body in the curren tunit. */
143 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
144 return true;
145 /* We have COMDAT. We are going to check if we still have definition
146 or if the definition is going to be output in other partition.
147 Bypass this when gimplifying; all needed functions will be produced.
149 As observed in PR20991 for already optimized out comdat virtual functions
150 it may be tempting to not necessarily give up because the copy will be
151 output elsewhere when corresponding vtable is output.
152 This is however not possible - ABI specify that COMDATs are output in
153 units where they are used and when the other unit was compiled with LTO
154 it is possible that vtable was kept public while the function itself
155 was privatized. */
156 if (!symtab->function_flags_ready)
157 return true;
159 snode = symtab_node::get (decl);
160 if (!snode
161 || ((!snode->definition || DECL_EXTERNAL (decl))
162 && (!snode->in_other_partition
163 || (!snode->forced_by_abi && !snode->force_output))))
164 return false;
165 node = dyn_cast <cgraph_node *> (snode);
166 return !node || !node->global.inlined_to;
169 /* Create a temporary for TYPE for a statement STMT. If the current function
170 is in SSA form, a SSA name is created. Otherwise a temporary register
171 is made. */
173 tree
174 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
176 if (gimple_in_ssa_p (cfun))
177 return make_ssa_name (type, stmt);
178 else
179 return create_tmp_reg (type);
182 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
183 acceptable form for is_gimple_min_invariant.
184 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
186 tree
187 canonicalize_constructor_val (tree cval, tree from_decl)
189 tree orig_cval = cval;
190 STRIP_NOPS (cval);
191 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
192 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
194 tree ptr = TREE_OPERAND (cval, 0);
195 if (is_gimple_min_invariant (ptr))
196 cval = build1_loc (EXPR_LOCATION (cval),
197 ADDR_EXPR, TREE_TYPE (ptr),
198 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
199 ptr,
200 fold_convert (ptr_type_node,
201 TREE_OPERAND (cval, 1))));
203 if (TREE_CODE (cval) == ADDR_EXPR)
205 tree base = NULL_TREE;
206 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
208 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
209 if (base)
210 TREE_OPERAND (cval, 0) = base;
212 else
213 base = get_base_address (TREE_OPERAND (cval, 0));
214 if (!base)
215 return NULL_TREE;
217 if (VAR_OR_FUNCTION_DECL_P (base)
218 && !can_refer_decl_in_current_unit_p (base, from_decl))
219 return NULL_TREE;
220 if (TREE_TYPE (base) == error_mark_node)
221 return NULL_TREE;
222 if (VAR_P (base))
223 TREE_ADDRESSABLE (base) = 1;
224 else if (TREE_CODE (base) == FUNCTION_DECL)
226 /* Make sure we create a cgraph node for functions we'll reference.
227 They can be non-existent if the reference comes from an entry
228 of an external vtable for example. */
229 cgraph_node::get_create (base);
231 /* Fixup types in global initializers. */
232 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
233 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
235 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
236 cval = fold_convert (TREE_TYPE (orig_cval), cval);
237 return cval;
239 if (TREE_OVERFLOW_P (cval))
240 return drop_tree_overflow (cval);
241 return orig_cval;
244 /* If SYM is a constant variable with known value, return the value.
245 NULL_TREE is returned otherwise. */
247 tree
248 get_symbol_constant_value (tree sym)
250 tree val = ctor_for_folding (sym);
251 if (val != error_mark_node)
253 if (val)
255 val = canonicalize_constructor_val (unshare_expr (val), sym);
256 if (val && is_gimple_min_invariant (val))
257 return val;
258 else
259 return NULL_TREE;
261 /* Variables declared 'const' without an initializer
262 have zero as the initializer if they may not be
263 overridden at link or run time. */
264 if (!val
265 && is_gimple_reg_type (TREE_TYPE (sym)))
266 return build_zero_cst (TREE_TYPE (sym));
269 return NULL_TREE;
274 /* Subroutine of fold_stmt. We perform several simplifications of the
275 memory reference tree EXPR and make sure to re-gimplify them properly
276 after propagation of constant addresses. IS_LHS is true if the
277 reference is supposed to be an lvalue. */
279 static tree
280 maybe_fold_reference (tree expr, bool is_lhs)
282 tree result;
284 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
285 || TREE_CODE (expr) == REALPART_EXPR
286 || TREE_CODE (expr) == IMAGPART_EXPR)
287 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
288 return fold_unary_loc (EXPR_LOCATION (expr),
289 TREE_CODE (expr),
290 TREE_TYPE (expr),
291 TREE_OPERAND (expr, 0));
292 else if (TREE_CODE (expr) == BIT_FIELD_REF
293 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
294 return fold_ternary_loc (EXPR_LOCATION (expr),
295 TREE_CODE (expr),
296 TREE_TYPE (expr),
297 TREE_OPERAND (expr, 0),
298 TREE_OPERAND (expr, 1),
299 TREE_OPERAND (expr, 2));
301 if (!is_lhs
302 && (result = fold_const_aggregate_ref (expr))
303 && is_gimple_min_invariant (result))
304 return result;
306 return NULL_TREE;
310 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
311 replacement rhs for the statement or NULL_TREE if no simplification
312 could be made. It is assumed that the operands have been previously
313 folded. */
315 static tree
316 fold_gimple_assign (gimple_stmt_iterator *si)
318 gimple *stmt = gsi_stmt (*si);
319 enum tree_code subcode = gimple_assign_rhs_code (stmt);
320 location_t loc = gimple_location (stmt);
322 tree result = NULL_TREE;
324 switch (get_gimple_rhs_class (subcode))
326 case GIMPLE_SINGLE_RHS:
328 tree rhs = gimple_assign_rhs1 (stmt);
330 if (TREE_CLOBBER_P (rhs))
331 return NULL_TREE;
333 if (REFERENCE_CLASS_P (rhs))
334 return maybe_fold_reference (rhs, false);
336 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
338 tree val = OBJ_TYPE_REF_EXPR (rhs);
339 if (is_gimple_min_invariant (val))
340 return val;
341 else if (flag_devirtualize && virtual_method_call_p (rhs))
343 bool final;
344 vec <cgraph_node *>targets
345 = possible_polymorphic_call_targets (rhs, stmt, &final);
346 if (final && targets.length () <= 1 && dbg_cnt (devirt))
348 if (dump_enabled_p ())
350 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
351 "resolving virtual function address "
352 "reference to function %s\n",
353 targets.length () == 1
354 ? targets[0]->name ()
355 : "NULL");
357 if (targets.length () == 1)
359 val = fold_convert (TREE_TYPE (val),
360 build_fold_addr_expr_loc
361 (loc, targets[0]->decl));
362 STRIP_USELESS_TYPE_CONVERSION (val);
364 else
365 /* We can not use __builtin_unreachable here because it
366 can not have address taken. */
367 val = build_int_cst (TREE_TYPE (val), 0);
368 return val;
373 else if (TREE_CODE (rhs) == ADDR_EXPR)
375 tree ref = TREE_OPERAND (rhs, 0);
376 tree tem = maybe_fold_reference (ref, true);
377 if (tem
378 && TREE_CODE (tem) == MEM_REF
379 && integer_zerop (TREE_OPERAND (tem, 1)))
380 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
381 else if (tem)
382 result = fold_convert (TREE_TYPE (rhs),
383 build_fold_addr_expr_loc (loc, tem));
384 else if (TREE_CODE (ref) == MEM_REF
385 && integer_zerop (TREE_OPERAND (ref, 1)))
386 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
388 if (result)
390 /* Strip away useless type conversions. Both the
391 NON_LVALUE_EXPR that may have been added by fold, and
392 "useless" type conversions that might now be apparent
393 due to propagation. */
394 STRIP_USELESS_TYPE_CONVERSION (result);
396 if (result != rhs && valid_gimple_rhs_p (result))
397 return result;
401 else if (TREE_CODE (rhs) == CONSTRUCTOR
402 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
404 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
405 unsigned i;
406 tree val;
408 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
409 if (! CONSTANT_CLASS_P (val))
410 return NULL_TREE;
412 return build_vector_from_ctor (TREE_TYPE (rhs),
413 CONSTRUCTOR_ELTS (rhs));
416 else if (DECL_P (rhs))
417 return get_symbol_constant_value (rhs);
419 break;
421 case GIMPLE_UNARY_RHS:
422 break;
424 case GIMPLE_BINARY_RHS:
425 break;
427 case GIMPLE_TERNARY_RHS:
428 result = fold_ternary_loc (loc, subcode,
429 TREE_TYPE (gimple_assign_lhs (stmt)),
430 gimple_assign_rhs1 (stmt),
431 gimple_assign_rhs2 (stmt),
432 gimple_assign_rhs3 (stmt));
434 if (result)
436 STRIP_USELESS_TYPE_CONVERSION (result);
437 if (valid_gimple_rhs_p (result))
438 return result;
440 break;
442 case GIMPLE_INVALID_RHS:
443 gcc_unreachable ();
446 return NULL_TREE;
450 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
451 adjusting the replacement stmts location and virtual operands.
452 If the statement has a lhs the last stmt in the sequence is expected
453 to assign to that lhs. */
455 static void
456 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
458 gimple *stmt = gsi_stmt (*si_p);
460 if (gimple_has_location (stmt))
461 annotate_all_with_location (stmts, gimple_location (stmt));
463 /* First iterate over the replacement statements backward, assigning
464 virtual operands to their defining statements. */
465 gimple *laststore = NULL;
466 for (gimple_stmt_iterator i = gsi_last (stmts);
467 !gsi_end_p (i); gsi_prev (&i))
469 gimple *new_stmt = gsi_stmt (i);
470 if ((gimple_assign_single_p (new_stmt)
471 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
472 || (is_gimple_call (new_stmt)
473 && (gimple_call_flags (new_stmt)
474 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
476 tree vdef;
477 if (!laststore)
478 vdef = gimple_vdef (stmt);
479 else
480 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
481 gimple_set_vdef (new_stmt, vdef);
482 if (vdef && TREE_CODE (vdef) == SSA_NAME)
483 SSA_NAME_DEF_STMT (vdef) = new_stmt;
484 laststore = new_stmt;
488 /* Second iterate over the statements forward, assigning virtual
489 operands to their uses. */
490 tree reaching_vuse = gimple_vuse (stmt);
491 for (gimple_stmt_iterator i = gsi_start (stmts);
492 !gsi_end_p (i); gsi_next (&i))
494 gimple *new_stmt = gsi_stmt (i);
495 /* If the new statement possibly has a VUSE, update it with exact SSA
496 name we know will reach this one. */
497 if (gimple_has_mem_ops (new_stmt))
498 gimple_set_vuse (new_stmt, reaching_vuse);
499 gimple_set_modified (new_stmt, true);
500 if (gimple_vdef (new_stmt))
501 reaching_vuse = gimple_vdef (new_stmt);
504 /* If the new sequence does not do a store release the virtual
505 definition of the original statement. */
506 if (reaching_vuse
507 && reaching_vuse == gimple_vuse (stmt))
509 tree vdef = gimple_vdef (stmt);
510 if (vdef
511 && TREE_CODE (vdef) == SSA_NAME)
513 unlink_stmt_vdef (stmt);
514 release_ssa_name (vdef);
518 /* Finally replace the original statement with the sequence. */
519 gsi_replace_with_seq (si_p, stmts, false);
522 /* Convert EXPR into a GIMPLE value suitable for substitution on the
523 RHS of an assignment. Insert the necessary statements before
524 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
525 is replaced. If the call is expected to produces a result, then it
526 is replaced by an assignment of the new RHS to the result variable.
527 If the result is to be ignored, then the call is replaced by a
528 GIMPLE_NOP. A proper VDEF chain is retained by making the first
529 VUSE and the last VDEF of the whole sequence be the same as the replaced
530 statement and using new SSA names for stores in between. */
532 void
533 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
535 tree lhs;
536 gimple *stmt, *new_stmt;
537 gimple_stmt_iterator i;
538 gimple_seq stmts = NULL;
540 stmt = gsi_stmt (*si_p);
542 gcc_assert (is_gimple_call (stmt));
544 push_gimplify_context (gimple_in_ssa_p (cfun));
546 lhs = gimple_call_lhs (stmt);
547 if (lhs == NULL_TREE)
549 gimplify_and_add (expr, &stmts);
550 /* We can end up with folding a memcpy of an empty class assignment
551 which gets optimized away by C++ gimplification. */
552 if (gimple_seq_empty_p (stmts))
554 pop_gimplify_context (NULL);
555 if (gimple_in_ssa_p (cfun))
557 unlink_stmt_vdef (stmt);
558 release_defs (stmt);
560 gsi_replace (si_p, gimple_build_nop (), false);
561 return;
564 else
566 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
567 new_stmt = gimple_build_assign (lhs, tmp);
568 i = gsi_last (stmts);
569 gsi_insert_after_without_update (&i, new_stmt,
570 GSI_CONTINUE_LINKING);
573 pop_gimplify_context (NULL);
575 gsi_replace_with_seq_vops (si_p, stmts);
579 /* Replace the call at *GSI with the gimple value VAL. */
581 void
582 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
584 gimple *stmt = gsi_stmt (*gsi);
585 tree lhs = gimple_call_lhs (stmt);
586 gimple *repl;
587 if (lhs)
589 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
590 val = fold_convert (TREE_TYPE (lhs), val);
591 repl = gimple_build_assign (lhs, val);
593 else
594 repl = gimple_build_nop ();
595 tree vdef = gimple_vdef (stmt);
596 if (vdef && TREE_CODE (vdef) == SSA_NAME)
598 unlink_stmt_vdef (stmt);
599 release_ssa_name (vdef);
601 gsi_replace (gsi, repl, false);
604 /* Replace the call at *GSI with the new call REPL and fold that
605 again. */
607 static void
608 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
610 gimple *stmt = gsi_stmt (*gsi);
611 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
612 gimple_set_location (repl, gimple_location (stmt));
613 if (gimple_vdef (stmt)
614 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
616 gimple_set_vdef (repl, gimple_vdef (stmt));
617 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
619 if (gimple_vuse (stmt))
620 gimple_set_vuse (repl, gimple_vuse (stmt));
621 gsi_replace (gsi, repl, false);
622 fold_stmt (gsi);
625 /* Return true if VAR is a VAR_DECL or a component thereof. */
627 static bool
628 var_decl_component_p (tree var)
630 tree inner = var;
631 while (handled_component_p (inner))
632 inner = TREE_OPERAND (inner, 0);
633 return (DECL_P (inner)
634 || (TREE_CODE (inner) == MEM_REF
635 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
638 /* If the SIZE argument representing the size of an object is in a range
639 of values of which exactly one is valid (and that is zero), return
640 true, otherwise false. */
642 static bool
643 size_must_be_zero_p (tree size)
645 if (integer_zerop (size))
646 return true;
648 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
649 return false;
651 wide_int min, max;
652 enum value_range_type rtype = get_range_info (size, &min, &max);
653 if (rtype != VR_ANTI_RANGE)
654 return false;
656 tree type = TREE_TYPE (size);
657 int prec = TYPE_PRECISION (type);
659 wide_int wone = wi::one (prec);
661 /* Compute the value of SSIZE_MAX, the largest positive value that
662 can be stored in ssize_t, the signed counterpart of size_t. */
663 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
665 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
668 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
669 diagnose (otherwise undefined) overlapping copies without preventing
670 folding. When folded, GCC guarantees that overlapping memcpy has
671 the same semantics as memmove. Call to the library memcpy need not
672 provide the same guarantee. Return false if no simplification can
673 be made. */
675 static bool
676 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
677 tree dest, tree src, int endp)
679 gimple *stmt = gsi_stmt (*gsi);
680 tree lhs = gimple_call_lhs (stmt);
681 tree len = gimple_call_arg (stmt, 2);
682 tree destvar, srcvar;
683 location_t loc = gimple_location (stmt);
685 bool nowarn = gimple_no_warning_p (stmt);
687 /* If the LEN parameter is a constant zero or in range where
688 the only valid value is zero, return DEST. */
689 if (size_must_be_zero_p (len))
691 gimple *repl;
692 if (gimple_call_lhs (stmt))
693 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
694 else
695 repl = gimple_build_nop ();
696 tree vdef = gimple_vdef (stmt);
697 if (vdef && TREE_CODE (vdef) == SSA_NAME)
699 unlink_stmt_vdef (stmt);
700 release_ssa_name (vdef);
702 gsi_replace (gsi, repl, false);
703 return true;
706 /* If SRC and DEST are the same (and not volatile), return
707 DEST{,+LEN,+LEN-1}. */
708 if (operand_equal_p (src, dest, 0))
710 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
711 It's safe and may even be emitted by GCC itself (see bug
712 32667). */
713 unlink_stmt_vdef (stmt);
714 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
715 release_ssa_name (gimple_vdef (stmt));
716 if (!lhs)
718 gsi_replace (gsi, gimple_build_nop (), false);
719 return true;
721 goto done;
723 else
725 tree srctype, desttype;
726 unsigned int src_align, dest_align;
727 tree off0;
729 /* Build accesses at offset zero with a ref-all character type. */
730 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
731 ptr_mode, true), 0);
733 /* If we can perform the copy efficiently with first doing all loads
734 and then all stores inline it that way. Currently efficiently
735 means that we can load all the memory into a single integer
736 register which is what MOVE_MAX gives us. */
737 src_align = get_pointer_alignment (src);
738 dest_align = get_pointer_alignment (dest);
739 if (tree_fits_uhwi_p (len)
740 && compare_tree_int (len, MOVE_MAX) <= 0
741 /* ??? Don't transform copies from strings with known length this
742 confuses the tree-ssa-strlen.c. This doesn't handle
743 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
744 reason. */
745 && !c_strlen (src, 2))
747 unsigned ilen = tree_to_uhwi (len);
748 if (pow2p_hwi (ilen))
750 /* Detect invalid bounds and overlapping copies and issue
751 either -Warray-bounds or -Wrestrict. */
752 if (!nowarn
753 && check_bounds_or_overlap (as_a <gcall *>(stmt),
754 dest, src, len, len))
755 gimple_set_no_warning (stmt, true);
757 scalar_int_mode mode;
758 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
759 if (type
760 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
761 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
762 /* If the destination pointer is not aligned we must be able
763 to emit an unaligned store. */
764 && (dest_align >= GET_MODE_ALIGNMENT (mode)
765 || !targetm.slow_unaligned_access (mode, dest_align)
766 || (optab_handler (movmisalign_optab, mode)
767 != CODE_FOR_nothing)))
769 tree srctype = type;
770 tree desttype = type;
771 if (src_align < GET_MODE_ALIGNMENT (mode))
772 srctype = build_aligned_type (type, src_align);
773 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
774 tree tem = fold_const_aggregate_ref (srcmem);
775 if (tem)
776 srcmem = tem;
777 else if (src_align < GET_MODE_ALIGNMENT (mode)
778 && targetm.slow_unaligned_access (mode, src_align)
779 && (optab_handler (movmisalign_optab, mode)
780 == CODE_FOR_nothing))
781 srcmem = NULL_TREE;
782 if (srcmem)
784 gimple *new_stmt;
785 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
787 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
788 srcmem
789 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
790 new_stmt);
791 gimple_assign_set_lhs (new_stmt, srcmem);
792 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
793 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
795 if (dest_align < GET_MODE_ALIGNMENT (mode))
796 desttype = build_aligned_type (type, dest_align);
797 new_stmt
798 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
799 dest, off0),
800 srcmem);
801 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
802 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
803 if (gimple_vdef (new_stmt)
804 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
805 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
806 if (!lhs)
808 gsi_replace (gsi, new_stmt, false);
809 return true;
811 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
812 goto done;
818 if (endp == 3)
820 /* Both DEST and SRC must be pointer types.
821 ??? This is what old code did. Is the testing for pointer types
822 really mandatory?
824 If either SRC is readonly or length is 1, we can use memcpy. */
825 if (!dest_align || !src_align)
826 return false;
827 if (readonly_data_expr (src)
828 || (tree_fits_uhwi_p (len)
829 && (MIN (src_align, dest_align) / BITS_PER_UNIT
830 >= tree_to_uhwi (len))))
832 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
833 if (!fn)
834 return false;
835 gimple_call_set_fndecl (stmt, fn);
836 gimple_call_set_arg (stmt, 0, dest);
837 gimple_call_set_arg (stmt, 1, src);
838 fold_stmt (gsi);
839 return true;
842 /* If *src and *dest can't overlap, optimize into memcpy as well. */
843 if (TREE_CODE (src) == ADDR_EXPR
844 && TREE_CODE (dest) == ADDR_EXPR)
846 tree src_base, dest_base, fn;
847 poly_int64 src_offset = 0, dest_offset = 0;
848 poly_uint64 maxsize;
850 srcvar = TREE_OPERAND (src, 0);
851 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
852 if (src_base == NULL)
853 src_base = srcvar;
854 destvar = TREE_OPERAND (dest, 0);
855 dest_base = get_addr_base_and_unit_offset (destvar,
856 &dest_offset);
857 if (dest_base == NULL)
858 dest_base = destvar;
859 if (!poly_int_tree_p (len, &maxsize))
860 maxsize = -1;
861 if (SSA_VAR_P (src_base)
862 && SSA_VAR_P (dest_base))
864 if (operand_equal_p (src_base, dest_base, 0)
865 && ranges_maybe_overlap_p (src_offset, maxsize,
866 dest_offset, maxsize))
867 return false;
869 else if (TREE_CODE (src_base) == MEM_REF
870 && TREE_CODE (dest_base) == MEM_REF)
872 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
873 TREE_OPERAND (dest_base, 0), 0))
874 return false;
875 poly_offset_int full_src_offset
876 = mem_ref_offset (src_base) + src_offset;
877 poly_offset_int full_dest_offset
878 = mem_ref_offset (dest_base) + dest_offset;
879 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
880 full_dest_offset, maxsize))
881 return false;
883 else
884 return false;
886 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
887 if (!fn)
888 return false;
889 gimple_call_set_fndecl (stmt, fn);
890 gimple_call_set_arg (stmt, 0, dest);
891 gimple_call_set_arg (stmt, 1, src);
892 fold_stmt (gsi);
893 return true;
896 /* If the destination and source do not alias optimize into
897 memcpy as well. */
898 if ((is_gimple_min_invariant (dest)
899 || TREE_CODE (dest) == SSA_NAME)
900 && (is_gimple_min_invariant (src)
901 || TREE_CODE (src) == SSA_NAME))
903 ao_ref destr, srcr;
904 ao_ref_init_from_ptr_and_size (&destr, dest, len);
905 ao_ref_init_from_ptr_and_size (&srcr, src, len);
906 if (!refs_may_alias_p_1 (&destr, &srcr, false))
908 tree fn;
909 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
910 if (!fn)
911 return false;
912 gimple_call_set_fndecl (stmt, fn);
913 gimple_call_set_arg (stmt, 0, dest);
914 gimple_call_set_arg (stmt, 1, src);
915 fold_stmt (gsi);
916 return true;
920 return false;
923 if (!tree_fits_shwi_p (len))
924 return false;
925 if (!POINTER_TYPE_P (TREE_TYPE (src))
926 || !POINTER_TYPE_P (TREE_TYPE (dest)))
927 return false;
928 /* In the following try to find a type that is most natural to be
929 used for the memcpy source and destination and that allows
930 the most optimization when memcpy is turned into a plain assignment
931 using that type. In theory we could always use a char[len] type
932 but that only gains us that the destination and source possibly
933 no longer will have their address taken. */
934 srctype = TREE_TYPE (TREE_TYPE (src));
935 if (TREE_CODE (srctype) == ARRAY_TYPE
936 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
937 srctype = TREE_TYPE (srctype);
938 desttype = TREE_TYPE (TREE_TYPE (dest));
939 if (TREE_CODE (desttype) == ARRAY_TYPE
940 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
941 desttype = TREE_TYPE (desttype);
942 if (TREE_ADDRESSABLE (srctype)
943 || TREE_ADDRESSABLE (desttype))
944 return false;
946 /* Make sure we are not copying using a floating-point mode or
947 a type whose size possibly does not match its precision. */
948 if (FLOAT_MODE_P (TYPE_MODE (desttype))
949 || TREE_CODE (desttype) == BOOLEAN_TYPE
950 || TREE_CODE (desttype) == ENUMERAL_TYPE)
951 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
952 if (FLOAT_MODE_P (TYPE_MODE (srctype))
953 || TREE_CODE (srctype) == BOOLEAN_TYPE
954 || TREE_CODE (srctype) == ENUMERAL_TYPE)
955 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
956 if (!srctype)
957 srctype = desttype;
958 if (!desttype)
959 desttype = srctype;
960 if (!srctype)
961 return false;
963 src_align = get_pointer_alignment (src);
964 dest_align = get_pointer_alignment (dest);
965 if (dest_align < TYPE_ALIGN (desttype)
966 || src_align < TYPE_ALIGN (srctype))
967 return false;
969 destvar = NULL_TREE;
970 if (TREE_CODE (dest) == ADDR_EXPR
971 && var_decl_component_p (TREE_OPERAND (dest, 0))
972 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
973 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
975 srcvar = NULL_TREE;
976 if (TREE_CODE (src) == ADDR_EXPR
977 && var_decl_component_p (TREE_OPERAND (src, 0))
978 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
980 if (!destvar
981 || src_align >= TYPE_ALIGN (desttype))
982 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
983 src, off0);
984 else if (!STRICT_ALIGNMENT)
986 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
987 src_align);
988 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
992 if (srcvar == NULL_TREE && destvar == NULL_TREE)
993 return false;
995 if (srcvar == NULL_TREE)
997 if (src_align >= TYPE_ALIGN (desttype))
998 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
999 else
1001 if (STRICT_ALIGNMENT)
1002 return false;
1003 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1004 src_align);
1005 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1008 else if (destvar == NULL_TREE)
1010 if (dest_align >= TYPE_ALIGN (srctype))
1011 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1012 else
1014 if (STRICT_ALIGNMENT)
1015 return false;
1016 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1017 dest_align);
1018 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1022 /* Detect invalid bounds and overlapping copies and issue either
1023 -Warray-bounds or -Wrestrict. */
1024 if (!nowarn)
1025 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1027 gimple *new_stmt;
1028 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1030 tree tem = fold_const_aggregate_ref (srcvar);
1031 if (tem)
1032 srcvar = tem;
1033 if (! is_gimple_min_invariant (srcvar))
1035 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1036 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1037 new_stmt);
1038 gimple_assign_set_lhs (new_stmt, srcvar);
1039 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1040 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1042 new_stmt = gimple_build_assign (destvar, srcvar);
1043 goto set_vop_and_replace;
1046 /* We get an aggregate copy. Use an unsigned char[] type to
1047 perform the copying to preserve padding and to avoid any issues
1048 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1049 desttype = build_array_type_nelts (unsigned_char_type_node,
1050 tree_to_uhwi (len));
1051 srctype = desttype;
1052 if (src_align > TYPE_ALIGN (srctype))
1053 srctype = build_aligned_type (srctype, src_align);
1054 if (dest_align > TYPE_ALIGN (desttype))
1055 desttype = build_aligned_type (desttype, dest_align);
1056 new_stmt
1057 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1058 fold_build2 (MEM_REF, srctype, src, off0));
1059 set_vop_and_replace:
1060 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1061 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1062 if (gimple_vdef (new_stmt)
1063 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1064 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1065 if (!lhs)
1067 gsi_replace (gsi, new_stmt, false);
1068 return true;
1070 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1073 done:
1074 gimple_seq stmts = NULL;
1075 if (endp == 0 || endp == 3)
1076 len = NULL_TREE;
1077 else if (endp == 2)
1078 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1079 ssize_int (1));
1080 if (endp == 2 || endp == 1)
1082 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1083 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1084 TREE_TYPE (dest), dest, len);
1087 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1088 gimple *repl = gimple_build_assign (lhs, dest);
1089 gsi_replace (gsi, repl, false);
1090 return true;
1093 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1094 to built-in memcmp (a, b, len). */
1096 static bool
1097 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1099 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1101 if (!fn)
1102 return false;
1104 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1106 gimple *stmt = gsi_stmt (*gsi);
1107 tree a = gimple_call_arg (stmt, 0);
1108 tree b = gimple_call_arg (stmt, 1);
1109 tree len = gimple_call_arg (stmt, 2);
1111 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1112 replace_call_with_call_and_fold (gsi, repl);
1114 return true;
1117 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1118 to built-in memmove (dest, src, len). */
1120 static bool
1121 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1123 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1125 if (!fn)
1126 return false;
1128 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1129 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1130 len) into memmove (dest, src, len). */
1132 gimple *stmt = gsi_stmt (*gsi);
1133 tree src = gimple_call_arg (stmt, 0);
1134 tree dest = gimple_call_arg (stmt, 1);
1135 tree len = gimple_call_arg (stmt, 2);
1137 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1138 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1139 replace_call_with_call_and_fold (gsi, repl);
1141 return true;
1144 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1145 to built-in memset (dest, 0, len). */
1147 static bool
1148 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1150 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1152 if (!fn)
1153 return false;
1155 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1157 gimple *stmt = gsi_stmt (*gsi);
1158 tree dest = gimple_call_arg (stmt, 0);
1159 tree len = gimple_call_arg (stmt, 1);
1161 gimple_seq seq = NULL;
1162 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1163 gimple_seq_add_stmt_without_update (&seq, repl);
1164 gsi_replace_with_seq_vops (gsi, seq);
1165 fold_stmt (gsi);
1167 return true;
1170 /* Fold function call to builtin memset or bzero at *GSI setting the
1171 memory of size LEN to VAL. Return whether a simplification was made. */
1173 static bool
1174 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1176 gimple *stmt = gsi_stmt (*gsi);
1177 tree etype;
1178 unsigned HOST_WIDE_INT length, cval;
1180 /* If the LEN parameter is zero, return DEST. */
1181 if (integer_zerop (len))
1183 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1184 return true;
1187 if (! tree_fits_uhwi_p (len))
1188 return false;
1190 if (TREE_CODE (c) != INTEGER_CST)
1191 return false;
1193 tree dest = gimple_call_arg (stmt, 0);
1194 tree var = dest;
1195 if (TREE_CODE (var) != ADDR_EXPR)
1196 return false;
1198 var = TREE_OPERAND (var, 0);
1199 if (TREE_THIS_VOLATILE (var))
1200 return false;
1202 etype = TREE_TYPE (var);
1203 if (TREE_CODE (etype) == ARRAY_TYPE)
1204 etype = TREE_TYPE (etype);
1206 if (!INTEGRAL_TYPE_P (etype)
1207 && !POINTER_TYPE_P (etype))
1208 return NULL_TREE;
1210 if (! var_decl_component_p (var))
1211 return NULL_TREE;
1213 length = tree_to_uhwi (len);
1214 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1215 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1216 return NULL_TREE;
1218 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1219 return NULL_TREE;
1221 if (integer_zerop (c))
1222 cval = 0;
1223 else
1225 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1226 return NULL_TREE;
1228 cval = TREE_INT_CST_LOW (c);
1229 cval &= 0xff;
1230 cval |= cval << 8;
1231 cval |= cval << 16;
1232 cval |= (cval << 31) << 1;
1235 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1236 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1237 gimple_set_vuse (store, gimple_vuse (stmt));
1238 tree vdef = gimple_vdef (stmt);
1239 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1241 gimple_set_vdef (store, gimple_vdef (stmt));
1242 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1244 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1245 if (gimple_call_lhs (stmt))
1247 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1248 gsi_replace (gsi, asgn, false);
1250 else
1252 gimple_stmt_iterator gsi2 = *gsi;
1253 gsi_prev (gsi);
1254 gsi_remove (&gsi2, true);
1257 return true;
1261 /* Obtain the minimum and maximum string length or minimum and maximum
1262 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1263 If ARG is an SSA name variable, follow its use-def chains. When
1264 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1265 if we are unable to determine the length or value, return false.
1266 VISITED is a bitmap of visited variables.
1267 TYPE is 0 if string length should be obtained, 1 for maximum string
1268 length and 2 for maximum value ARG can have.
1269 When FUZZY is non-zero and the length of a string cannot be determined,
1270 the function instead considers as the maximum possible length the
1271 size of a character array it may refer to. If FUZZY is 2, it will handle
1272 PHIs and COND_EXPRs optimistically, if we can determine string length
1273 minimum and maximum, it will use the minimum from the ones where it
1274 can be determined.
1275 Set *FLEXP to true if the range of the string lengths has been
1276 obtained from the upper bound of an array at the end of a struct.
1277 Such an array may hold a string that's longer than its upper bound
1278 due to it being used as a poor-man's flexible array member.
1279 ELTSIZE is 1 for normal single byte character strings, and 2 or
1280 4 for wide characer strings. ELTSIZE is by default 1. */
1282 static bool
1283 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1284 int fuzzy, bool *flexp, unsigned eltsize = 1)
1286 tree var, val = NULL_TREE;
1287 gimple *def_stmt;
1289 /* The minimum and maximum length. */
1290 tree *const minlen = length;
1291 tree *const maxlen = length + 1;
1293 if (TREE_CODE (arg) != SSA_NAME)
1295 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1296 if (TREE_CODE (arg) == ADDR_EXPR
1297 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1299 tree op = TREE_OPERAND (arg, 0);
1300 if (integer_zerop (TREE_OPERAND (op, 1)))
1302 tree aop0 = TREE_OPERAND (op, 0);
1303 if (TREE_CODE (aop0) == INDIRECT_REF
1304 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1305 return get_range_strlen (TREE_OPERAND (aop0, 0), length,
1306 visited, type, fuzzy, flexp, eltsize);
1308 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF && fuzzy)
1310 /* Fail if an array is the last member of a struct object
1311 since it could be treated as a (fake) flexible array
1312 member. */
1313 tree idx = TREE_OPERAND (op, 1);
1315 arg = TREE_OPERAND (op, 0);
1316 tree optype = TREE_TYPE (arg);
1317 if (tree dom = TYPE_DOMAIN (optype))
1318 if (tree bound = TYPE_MAX_VALUE (dom))
1319 if (TREE_CODE (bound) == INTEGER_CST
1320 && TREE_CODE (idx) == INTEGER_CST
1321 && tree_int_cst_lt (bound, idx))
1322 return false;
1326 if (type == 2)
1328 val = arg;
1329 if (TREE_CODE (val) != INTEGER_CST
1330 || tree_int_cst_sgn (val) < 0)
1331 return false;
1333 else
1334 val = c_strlen (arg, 1, eltsize);
1336 if (!val && fuzzy)
1338 if (TREE_CODE (arg) == ADDR_EXPR)
1339 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1340 visited, type, fuzzy, flexp, eltsize);
1342 if (TREE_CODE (arg) == ARRAY_REF)
1344 tree type = TREE_TYPE (TREE_OPERAND (arg, 0));
1346 /* Determine the "innermost" array type. */
1347 while (TREE_CODE (type) == ARRAY_TYPE
1348 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1349 type = TREE_TYPE (type);
1351 /* Avoid arrays of pointers. */
1352 tree eltype = TREE_TYPE (type);
1353 if (TREE_CODE (type) != ARRAY_TYPE
1354 || !INTEGRAL_TYPE_P (eltype))
1355 return false;
1357 val = TYPE_SIZE_UNIT (type);
1358 if (!val || integer_zerop (val))
1359 return false;
1361 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1362 integer_one_node);
1363 /* Set the minimum size to zero since the string in
1364 the array could have zero length. */
1365 *minlen = ssize_int (0);
1367 if (TREE_CODE (TREE_OPERAND (arg, 0)) == COMPONENT_REF
1368 && type == TREE_TYPE (TREE_OPERAND (arg, 0))
1369 && array_at_struct_end_p (TREE_OPERAND (arg, 0)))
1370 *flexp = true;
1372 else if (TREE_CODE (arg) == COMPONENT_REF
1373 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1374 == ARRAY_TYPE))
1376 /* Use the type of the member array to determine the upper
1377 bound on the length of the array. This may be overly
1378 optimistic if the array itself isn't NUL-terminated and
1379 the caller relies on the subsequent member to contain
1380 the NUL but that would only be considered valid if
1381 the array were the last member of a struct.
1382 Set *FLEXP to true if the array whose bound is being
1383 used is at the end of a struct. */
1384 if (array_at_struct_end_p (arg))
1385 *flexp = true;
1387 arg = TREE_OPERAND (arg, 1);
1389 tree type = TREE_TYPE (arg);
1391 while (TREE_CODE (type) == ARRAY_TYPE
1392 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1393 type = TREE_TYPE (type);
1395 /* Fail when the array bound is unknown or zero. */
1396 val = TYPE_SIZE_UNIT (type);
1397 if (!val || integer_zerop (val))
1398 return false;
1399 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1400 integer_one_node);
1401 /* Set the minimum size to zero since the string in
1402 the array could have zero length. */
1403 *minlen = ssize_int (0);
1406 if (VAR_P (arg))
1408 tree type = TREE_TYPE (arg);
1409 if (POINTER_TYPE_P (type))
1410 type = TREE_TYPE (type);
1412 if (TREE_CODE (type) == ARRAY_TYPE)
1414 val = TYPE_SIZE_UNIT (type);
1415 if (!val
1416 || TREE_CODE (val) != INTEGER_CST
1417 || integer_zerop (val))
1418 return false;
1419 val = wide_int_to_tree (TREE_TYPE (val),
1420 wi::sub (wi::to_wide (val), 1));
1421 /* Set the minimum size to zero since the string in
1422 the array could have zero length. */
1423 *minlen = ssize_int (0);
1428 if (!val)
1429 return false;
1431 if (!*minlen
1432 || (type > 0
1433 && TREE_CODE (*minlen) == INTEGER_CST
1434 && TREE_CODE (val) == INTEGER_CST
1435 && tree_int_cst_lt (val, *minlen)))
1436 *minlen = val;
1438 if (*maxlen)
1440 if (type > 0)
1442 if (TREE_CODE (*maxlen) != INTEGER_CST
1443 || TREE_CODE (val) != INTEGER_CST)
1444 return false;
1446 if (tree_int_cst_lt (*maxlen, val))
1447 *maxlen = val;
1448 return true;
1450 else if (simple_cst_equal (val, *maxlen) != 1)
1451 return false;
1454 *maxlen = val;
1455 return true;
1458 /* If ARG is registered for SSA update we cannot look at its defining
1459 statement. */
1460 if (name_registered_for_update_p (arg))
1461 return false;
1463 /* If we were already here, break the infinite cycle. */
1464 if (!*visited)
1465 *visited = BITMAP_ALLOC (NULL);
1466 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1467 return true;
1469 var = arg;
1470 def_stmt = SSA_NAME_DEF_STMT (var);
1472 switch (gimple_code (def_stmt))
1474 case GIMPLE_ASSIGN:
1475 /* The RHS of the statement defining VAR must either have a
1476 constant length or come from another SSA_NAME with a constant
1477 length. */
1478 if (gimple_assign_single_p (def_stmt)
1479 || gimple_assign_unary_nop_p (def_stmt))
1481 tree rhs = gimple_assign_rhs1 (def_stmt);
1482 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp,
1483 eltsize);
1485 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1487 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1488 gimple_assign_rhs3 (def_stmt) };
1490 for (unsigned int i = 0; i < 2; i++)
1491 if (!get_range_strlen (ops[i], length, visited, type, fuzzy,
1492 flexp, eltsize))
1494 if (fuzzy == 2)
1495 *maxlen = build_all_ones_cst (size_type_node);
1496 else
1497 return false;
1499 return true;
1501 return false;
1503 case GIMPLE_PHI:
1504 /* All the arguments of the PHI node must have the same constant
1505 length. */
1506 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1508 tree arg = gimple_phi_arg (def_stmt, i)->def;
1510 /* If this PHI has itself as an argument, we cannot
1511 determine the string length of this argument. However,
1512 if we can find a constant string length for the other
1513 PHI args then we can still be sure that this is a
1514 constant string length. So be optimistic and just
1515 continue with the next argument. */
1516 if (arg == gimple_phi_result (def_stmt))
1517 continue;
1519 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp,
1520 eltsize))
1522 if (fuzzy == 2)
1523 *maxlen = build_all_ones_cst (size_type_node);
1524 else
1525 return false;
1528 return true;
1530 default:
1531 return false;
1535 /* Determine the minimum and maximum value or string length that ARG
1536 refers to and store each in the first two elements of MINMAXLEN.
1537 For expressions that point to strings of unknown lengths that are
1538 character arrays, use the upper bound of the array as the maximum
1539 length. For example, given an expression like 'x ? array : "xyz"'
1540 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1541 to 0 and MINMAXLEN[1] to 7, the longest string that could be
1542 stored in array.
1543 Return true if the range of the string lengths has been obtained
1544 from the upper bound of an array at the end of a struct. Such
1545 an array may hold a string that's longer than its upper bound
1546 due to it being used as a poor-man's flexible array member.
1548 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1549 and false if PHIs and COND_EXPRs are to be handled optimistically,
1550 if we can determine string length minimum and maximum; it will use
1551 the minimum from the ones where it can be determined.
1552 STRICT false should be only used for warning code.
1554 ELTSIZE is 1 for normal single byte character strings, and 2 or
1555 4 for wide characer strings. ELTSIZE is by default 1. */
1557 bool
1558 get_range_strlen (tree arg, tree minmaxlen[2], unsigned eltsize, bool strict)
1560 bitmap visited = NULL;
1562 minmaxlen[0] = NULL_TREE;
1563 minmaxlen[1] = NULL_TREE;
1565 bool flexarray = false;
1566 if (!get_range_strlen (arg, minmaxlen, &visited, 1, strict ? 1 : 2,
1567 &flexarray, eltsize))
1569 minmaxlen[0] = NULL_TREE;
1570 minmaxlen[1] = NULL_TREE;
1573 if (visited)
1574 BITMAP_FREE (visited);
1576 return flexarray;
1579 tree
1580 get_maxval_strlen (tree arg, int type)
1582 bitmap visited = NULL;
1583 tree len[2] = { NULL_TREE, NULL_TREE };
1585 bool dummy;
1586 if (!get_range_strlen (arg, len, &visited, type, 0, &dummy))
1587 len[1] = NULL_TREE;
1588 if (visited)
1589 BITMAP_FREE (visited);
1591 return len[1];
1595 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1596 If LEN is not NULL, it represents the length of the string to be
1597 copied. Return NULL_TREE if no simplification can be made. */
1599 static bool
1600 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1601 tree dest, tree src)
1603 gimple *stmt = gsi_stmt (*gsi);
1604 location_t loc = gimple_location (stmt);
1605 tree fn;
1607 /* If SRC and DEST are the same (and not volatile), return DEST. */
1608 if (operand_equal_p (src, dest, 0))
1610 /* Issue -Wrestrict unless the pointers are null (those do
1611 not point to objects and so do not indicate an overlap;
1612 such calls could be the result of sanitization and jump
1613 threading). */
1614 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1616 tree func = gimple_call_fndecl (stmt);
1618 warning_at (loc, OPT_Wrestrict,
1619 "%qD source argument is the same as destination",
1620 func);
1623 replace_call_with_value (gsi, dest);
1624 return true;
1627 if (optimize_function_for_size_p (cfun))
1628 return false;
1630 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1631 if (!fn)
1632 return false;
1634 tree len = get_maxval_strlen (src, 0);
1635 if (!len)
1636 return false;
1638 len = fold_convert_loc (loc, size_type_node, len);
1639 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1640 len = force_gimple_operand_gsi (gsi, len, true,
1641 NULL_TREE, true, GSI_SAME_STMT);
1642 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1643 replace_call_with_call_and_fold (gsi, repl);
1644 return true;
1647 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1648 If SLEN is not NULL, it represents the length of the source string.
1649 Return NULL_TREE if no simplification can be made. */
1651 static bool
1652 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1653 tree dest, tree src, tree len)
1655 gimple *stmt = gsi_stmt (*gsi);
1656 location_t loc = gimple_location (stmt);
1657 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1659 /* If the LEN parameter is zero, return DEST. */
1660 if (integer_zerop (len))
1662 /* Avoid warning if the destination refers to a an array/pointer
1663 decorate with attribute nonstring. */
1664 if (!nonstring)
1666 tree fndecl = gimple_call_fndecl (stmt);
1668 /* Warn about the lack of nul termination: the result is not
1669 a (nul-terminated) string. */
1670 tree slen = get_maxval_strlen (src, 0);
1671 if (slen && !integer_zerop (slen))
1672 warning_at (loc, OPT_Wstringop_truncation,
1673 "%G%qD destination unchanged after copying no bytes "
1674 "from a string of length %E",
1675 stmt, fndecl, slen);
1676 else
1677 warning_at (loc, OPT_Wstringop_truncation,
1678 "%G%qD destination unchanged after copying no bytes",
1679 stmt, fndecl);
1682 replace_call_with_value (gsi, dest);
1683 return true;
1686 /* We can't compare slen with len as constants below if len is not a
1687 constant. */
1688 if (TREE_CODE (len) != INTEGER_CST)
1689 return false;
1691 /* Now, we must be passed a constant src ptr parameter. */
1692 tree slen = get_maxval_strlen (src, 0);
1693 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1694 return false;
1696 /* The size of the source string including the terminating nul. */
1697 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1699 /* We do not support simplification of this case, though we do
1700 support it when expanding trees into RTL. */
1701 /* FIXME: generate a call to __builtin_memset. */
1702 if (tree_int_cst_lt (ssize, len))
1703 return false;
1705 /* Diagnose truncation that leaves the copy unterminated. */
1706 maybe_diag_stxncpy_trunc (*gsi, src, len);
1708 /* OK transform into builtin memcpy. */
1709 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1710 if (!fn)
1711 return false;
1713 len = fold_convert_loc (loc, size_type_node, len);
1714 len = force_gimple_operand_gsi (gsi, len, true,
1715 NULL_TREE, true, GSI_SAME_STMT);
1716 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1717 replace_call_with_call_and_fold (gsi, repl);
1719 return true;
1722 /* Fold function call to builtin strchr or strrchr.
1723 If both arguments are constant, evaluate and fold the result,
1724 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1725 In general strlen is significantly faster than strchr
1726 due to being a simpler operation. */
1727 static bool
1728 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1730 gimple *stmt = gsi_stmt (*gsi);
1731 tree str = gimple_call_arg (stmt, 0);
1732 tree c = gimple_call_arg (stmt, 1);
1733 location_t loc = gimple_location (stmt);
1734 const char *p;
1735 char ch;
1737 if (!gimple_call_lhs (stmt))
1738 return false;
1740 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1742 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1744 if (p1 == NULL)
1746 replace_call_with_value (gsi, integer_zero_node);
1747 return true;
1750 tree len = build_int_cst (size_type_node, p1 - p);
1751 gimple_seq stmts = NULL;
1752 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1753 POINTER_PLUS_EXPR, str, len);
1754 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1755 gsi_replace_with_seq_vops (gsi, stmts);
1756 return true;
1759 if (!integer_zerop (c))
1760 return false;
1762 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1763 if (is_strrchr && optimize_function_for_size_p (cfun))
1765 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1767 if (strchr_fn)
1769 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1770 replace_call_with_call_and_fold (gsi, repl);
1771 return true;
1774 return false;
1777 tree len;
1778 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1780 if (!strlen_fn)
1781 return false;
1783 /* Create newstr = strlen (str). */
1784 gimple_seq stmts = NULL;
1785 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1786 gimple_set_location (new_stmt, loc);
1787 len = create_tmp_reg_or_ssa_name (size_type_node);
1788 gimple_call_set_lhs (new_stmt, len);
1789 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1791 /* Create (str p+ strlen (str)). */
1792 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1793 POINTER_PLUS_EXPR, str, len);
1794 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1795 gsi_replace_with_seq_vops (gsi, stmts);
1796 /* gsi now points at the assignment to the lhs, get a
1797 stmt iterator to the strlen.
1798 ??? We can't use gsi_for_stmt as that doesn't work when the
1799 CFG isn't built yet. */
1800 gimple_stmt_iterator gsi2 = *gsi;
1801 gsi_prev (&gsi2);
1802 fold_stmt (&gsi2);
1803 return true;
1806 /* Fold function call to builtin strstr.
1807 If both arguments are constant, evaluate and fold the result,
1808 additionally fold strstr (x, "") into x and strstr (x, "c")
1809 into strchr (x, 'c'). */
1810 static bool
1811 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1813 gimple *stmt = gsi_stmt (*gsi);
1814 tree haystack = gimple_call_arg (stmt, 0);
1815 tree needle = gimple_call_arg (stmt, 1);
1816 const char *p, *q;
1818 if (!gimple_call_lhs (stmt))
1819 return false;
1821 q = c_getstr (needle);
1822 if (q == NULL)
1823 return false;
1825 if ((p = c_getstr (haystack)))
1827 const char *r = strstr (p, q);
1829 if (r == NULL)
1831 replace_call_with_value (gsi, integer_zero_node);
1832 return true;
1835 tree len = build_int_cst (size_type_node, r - p);
1836 gimple_seq stmts = NULL;
1837 gimple *new_stmt
1838 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1839 haystack, len);
1840 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1841 gsi_replace_with_seq_vops (gsi, stmts);
1842 return true;
1845 /* For strstr (x, "") return x. */
1846 if (q[0] == '\0')
1848 replace_call_with_value (gsi, haystack);
1849 return true;
1852 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1853 if (q[1] == '\0')
1855 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1856 if (strchr_fn)
1858 tree c = build_int_cst (integer_type_node, q[0]);
1859 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1860 replace_call_with_call_and_fold (gsi, repl);
1861 return true;
1865 return false;
1868 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1869 to the call.
1871 Return NULL_TREE if no simplification was possible, otherwise return the
1872 simplified form of the call as a tree.
1874 The simplified form may be a constant or other expression which
1875 computes the same value, but in a more efficient manner (including
1876 calls to other builtin functions).
1878 The call may contain arguments which need to be evaluated, but
1879 which are not useful to determine the result of the call. In
1880 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1881 COMPOUND_EXPR will be an argument which must be evaluated.
1882 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1883 COMPOUND_EXPR in the chain will contain the tree for the simplified
1884 form of the builtin function call. */
1886 static bool
1887 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1889 gimple *stmt = gsi_stmt (*gsi);
1890 location_t loc = gimple_location (stmt);
1892 const char *p = c_getstr (src);
1894 /* If the string length is zero, return the dst parameter. */
1895 if (p && *p == '\0')
1897 replace_call_with_value (gsi, dst);
1898 return true;
1901 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1902 return false;
1904 /* See if we can store by pieces into (dst + strlen(dst)). */
1905 tree newdst;
1906 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1907 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1909 if (!strlen_fn || !memcpy_fn)
1910 return false;
1912 /* If the length of the source string isn't computable don't
1913 split strcat into strlen and memcpy. */
1914 tree len = get_maxval_strlen (src, 0);
1915 if (! len)
1916 return false;
1918 /* Create strlen (dst). */
1919 gimple_seq stmts = NULL, stmts2;
1920 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1921 gimple_set_location (repl, loc);
1922 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1923 gimple_call_set_lhs (repl, newdst);
1924 gimple_seq_add_stmt_without_update (&stmts, repl);
1926 /* Create (dst p+ strlen (dst)). */
1927 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1928 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1929 gimple_seq_add_seq_without_update (&stmts, stmts2);
1931 len = fold_convert_loc (loc, size_type_node, len);
1932 len = size_binop_loc (loc, PLUS_EXPR, len,
1933 build_int_cst (size_type_node, 1));
1934 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1935 gimple_seq_add_seq_without_update (&stmts, stmts2);
1937 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1938 gimple_seq_add_stmt_without_update (&stmts, repl);
1939 if (gimple_call_lhs (stmt))
1941 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1942 gimple_seq_add_stmt_without_update (&stmts, repl);
1943 gsi_replace_with_seq_vops (gsi, stmts);
1944 /* gsi now points at the assignment to the lhs, get a
1945 stmt iterator to the memcpy call.
1946 ??? We can't use gsi_for_stmt as that doesn't work when the
1947 CFG isn't built yet. */
1948 gimple_stmt_iterator gsi2 = *gsi;
1949 gsi_prev (&gsi2);
1950 fold_stmt (&gsi2);
1952 else
1954 gsi_replace_with_seq_vops (gsi, stmts);
1955 fold_stmt (gsi);
1957 return true;
1960 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1961 are the arguments to the call. */
1963 static bool
1964 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1966 gimple *stmt = gsi_stmt (*gsi);
1967 tree dest = gimple_call_arg (stmt, 0);
1968 tree src = gimple_call_arg (stmt, 1);
1969 tree size = gimple_call_arg (stmt, 2);
1970 tree fn;
1971 const char *p;
1974 p = c_getstr (src);
1975 /* If the SRC parameter is "", return DEST. */
1976 if (p && *p == '\0')
1978 replace_call_with_value (gsi, dest);
1979 return true;
1982 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1983 return false;
1985 /* If __builtin_strcat_chk is used, assume strcat is available. */
1986 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1987 if (!fn)
1988 return false;
1990 gimple *repl = gimple_build_call (fn, 2, dest, src);
1991 replace_call_with_call_and_fold (gsi, repl);
1992 return true;
1995 /* Simplify a call to the strncat builtin. */
1997 static bool
1998 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2000 gimple *stmt = gsi_stmt (*gsi);
2001 tree dst = gimple_call_arg (stmt, 0);
2002 tree src = gimple_call_arg (stmt, 1);
2003 tree len = gimple_call_arg (stmt, 2);
2005 const char *p = c_getstr (src);
2007 /* If the requested length is zero, or the src parameter string
2008 length is zero, return the dst parameter. */
2009 if (integer_zerop (len) || (p && *p == '\0'))
2011 replace_call_with_value (gsi, dst);
2012 return true;
2015 if (TREE_CODE (len) != INTEGER_CST || !p)
2016 return false;
2018 unsigned srclen = strlen (p);
2020 int cmpsrc = compare_tree_int (len, srclen);
2022 /* Return early if the requested len is less than the string length.
2023 Warnings will be issued elsewhere later. */
2024 if (cmpsrc < 0)
2025 return false;
2027 unsigned HOST_WIDE_INT dstsize;
2029 bool nowarn = gimple_no_warning_p (stmt);
2031 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2033 int cmpdst = compare_tree_int (len, dstsize);
2035 if (cmpdst >= 0)
2037 tree fndecl = gimple_call_fndecl (stmt);
2039 /* Strncat copies (at most) LEN bytes and always appends
2040 the terminating NUL so the specified bound should never
2041 be equal to (or greater than) the size of the destination.
2042 If it is, the copy could overflow. */
2043 location_t loc = gimple_location (stmt);
2044 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2045 cmpdst == 0
2046 ? G_("%G%qD specified bound %E equals "
2047 "destination size")
2048 : G_("%G%qD specified bound %E exceeds "
2049 "destination size %wu"),
2050 stmt, fndecl, len, dstsize);
2051 if (nowarn)
2052 gimple_set_no_warning (stmt, true);
2056 if (!nowarn && cmpsrc == 0)
2058 tree fndecl = gimple_call_fndecl (stmt);
2059 location_t loc = gimple_location (stmt);
2061 /* To avoid possible overflow the specified bound should also
2062 not be equal to the length of the source, even when the size
2063 of the destination is unknown (it's not an uncommon mistake
2064 to specify as the bound to strncpy the length of the source). */
2065 if (warning_at (loc, OPT_Wstringop_overflow_,
2066 "%G%qD specified bound %E equals source length",
2067 stmt, fndecl, len))
2068 gimple_set_no_warning (stmt, true);
2071 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2073 /* If the replacement _DECL isn't initialized, don't do the
2074 transformation. */
2075 if (!fn)
2076 return false;
2078 /* Otherwise, emit a call to strcat. */
2079 gcall *repl = gimple_build_call (fn, 2, dst, src);
2080 replace_call_with_call_and_fold (gsi, repl);
2081 return true;
2084 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2085 LEN, and SIZE. */
2087 static bool
2088 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2090 gimple *stmt = gsi_stmt (*gsi);
2091 tree dest = gimple_call_arg (stmt, 0);
2092 tree src = gimple_call_arg (stmt, 1);
2093 tree len = gimple_call_arg (stmt, 2);
2094 tree size = gimple_call_arg (stmt, 3);
2095 tree fn;
2096 const char *p;
2098 p = c_getstr (src);
2099 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2100 if ((p && *p == '\0')
2101 || integer_zerop (len))
2103 replace_call_with_value (gsi, dest);
2104 return true;
2107 if (! tree_fits_uhwi_p (size))
2108 return false;
2110 if (! integer_all_onesp (size))
2112 tree src_len = c_strlen (src, 1);
2113 if (src_len
2114 && tree_fits_uhwi_p (src_len)
2115 && tree_fits_uhwi_p (len)
2116 && ! tree_int_cst_lt (len, src_len))
2118 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2119 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2120 if (!fn)
2121 return false;
2123 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2124 replace_call_with_call_and_fold (gsi, repl);
2125 return true;
2127 return false;
2130 /* If __builtin_strncat_chk is used, assume strncat is available. */
2131 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2132 if (!fn)
2133 return false;
2135 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2136 replace_call_with_call_and_fold (gsi, repl);
2137 return true;
2140 /* Build and append gimple statements to STMTS that would load a first
2141 character of a memory location identified by STR. LOC is location
2142 of the statement. */
2144 static tree
2145 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2147 tree var;
2149 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2150 tree cst_uchar_ptr_node
2151 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2152 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2154 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2155 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2156 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2158 gimple_assign_set_lhs (stmt, var);
2159 gimple_seq_add_stmt_without_update (stmts, stmt);
2161 return var;
2164 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2165 FCODE is the name of the builtin. */
2167 static bool
2168 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2170 gimple *stmt = gsi_stmt (*gsi);
2171 tree callee = gimple_call_fndecl (stmt);
2172 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2174 tree type = integer_type_node;
2175 tree str1 = gimple_call_arg (stmt, 0);
2176 tree str2 = gimple_call_arg (stmt, 1);
2177 tree lhs = gimple_call_lhs (stmt);
2178 HOST_WIDE_INT length = -1;
2180 /* Handle strncmp and strncasecmp functions. */
2181 if (gimple_call_num_args (stmt) == 3)
2183 tree len = gimple_call_arg (stmt, 2);
2184 if (tree_fits_uhwi_p (len))
2185 length = tree_to_uhwi (len);
2188 /* If the LEN parameter is zero, return zero. */
2189 if (length == 0)
2191 replace_call_with_value (gsi, integer_zero_node);
2192 return true;
2195 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2196 if (operand_equal_p (str1, str2, 0))
2198 replace_call_with_value (gsi, integer_zero_node);
2199 return true;
2202 const char *p1 = c_getstr (str1);
2203 const char *p2 = c_getstr (str2);
2205 /* For known strings, return an immediate value. */
2206 if (p1 && p2)
2208 int r = 0;
2209 bool known_result = false;
2211 switch (fcode)
2213 case BUILT_IN_STRCMP:
2214 case BUILT_IN_STRCMP_EQ:
2216 r = strcmp (p1, p2);
2217 known_result = true;
2218 break;
2220 case BUILT_IN_STRNCMP:
2221 case BUILT_IN_STRNCMP_EQ:
2223 if (length == -1)
2224 break;
2225 r = strncmp (p1, p2, length);
2226 known_result = true;
2227 break;
2229 /* Only handleable situation is where the string are equal (result 0),
2230 which is already handled by operand_equal_p case. */
2231 case BUILT_IN_STRCASECMP:
2232 break;
2233 case BUILT_IN_STRNCASECMP:
2235 if (length == -1)
2236 break;
2237 r = strncmp (p1, p2, length);
2238 if (r == 0)
2239 known_result = true;
2240 break;
2242 default:
2243 gcc_unreachable ();
2246 if (known_result)
2248 replace_call_with_value (gsi, build_cmp_result (type, r));
2249 return true;
2253 bool nonzero_length = length >= 1
2254 || fcode == BUILT_IN_STRCMP
2255 || fcode == BUILT_IN_STRCMP_EQ
2256 || fcode == BUILT_IN_STRCASECMP;
2258 location_t loc = gimple_location (stmt);
2260 /* If the second arg is "", return *(const unsigned char*)arg1. */
2261 if (p2 && *p2 == '\0' && nonzero_length)
2263 gimple_seq stmts = NULL;
2264 tree var = gimple_load_first_char (loc, str1, &stmts);
2265 if (lhs)
2267 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2268 gimple_seq_add_stmt_without_update (&stmts, stmt);
2271 gsi_replace_with_seq_vops (gsi, stmts);
2272 return true;
2275 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2276 if (p1 && *p1 == '\0' && nonzero_length)
2278 gimple_seq stmts = NULL;
2279 tree var = gimple_load_first_char (loc, str2, &stmts);
2281 if (lhs)
2283 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2284 stmt = gimple_build_assign (c, NOP_EXPR, var);
2285 gimple_seq_add_stmt_without_update (&stmts, stmt);
2287 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2288 gimple_seq_add_stmt_without_update (&stmts, stmt);
2291 gsi_replace_with_seq_vops (gsi, stmts);
2292 return true;
2295 /* If len parameter is one, return an expression corresponding to
2296 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2297 if (fcode == BUILT_IN_STRNCMP && length == 1)
2299 gimple_seq stmts = NULL;
2300 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2301 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2303 if (lhs)
2305 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2306 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2307 gimple_seq_add_stmt_without_update (&stmts, convert1);
2309 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2310 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2311 gimple_seq_add_stmt_without_update (&stmts, convert2);
2313 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2314 gimple_seq_add_stmt_without_update (&stmts, stmt);
2317 gsi_replace_with_seq_vops (gsi, stmts);
2318 return true;
2321 /* If length is larger than the length of one constant string,
2322 replace strncmp with corresponding strcmp */
2323 if (fcode == BUILT_IN_STRNCMP
2324 && length > 0
2325 && ((p2 && (size_t) length > strlen (p2))
2326 || (p1 && (size_t) length > strlen (p1))))
2328 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2329 if (!fn)
2330 return false;
2331 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2332 replace_call_with_call_and_fold (gsi, repl);
2333 return true;
2336 return false;
2339 /* Fold a call to the memchr pointed by GSI iterator. */
2341 static bool
2342 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2344 gimple *stmt = gsi_stmt (*gsi);
2345 tree lhs = gimple_call_lhs (stmt);
2346 tree arg1 = gimple_call_arg (stmt, 0);
2347 tree arg2 = gimple_call_arg (stmt, 1);
2348 tree len = gimple_call_arg (stmt, 2);
2350 /* If the LEN parameter is zero, return zero. */
2351 if (integer_zerop (len))
2353 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2354 return true;
2357 char c;
2358 if (TREE_CODE (arg2) != INTEGER_CST
2359 || !tree_fits_uhwi_p (len)
2360 || !target_char_cst_p (arg2, &c))
2361 return false;
2363 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2364 unsigned HOST_WIDE_INT string_length;
2365 const char *p1 = c_getstr (arg1, &string_length);
2367 if (p1)
2369 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2370 if (r == NULL)
2372 if (length <= string_length)
2374 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2375 return true;
2378 else
2380 unsigned HOST_WIDE_INT offset = r - p1;
2381 gimple_seq stmts = NULL;
2382 if (lhs != NULL_TREE)
2384 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2385 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2386 arg1, offset_cst);
2387 gimple_seq_add_stmt_without_update (&stmts, stmt);
2389 else
2390 gimple_seq_add_stmt_without_update (&stmts,
2391 gimple_build_nop ());
2393 gsi_replace_with_seq_vops (gsi, stmts);
2394 return true;
2398 return false;
2401 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2402 to the call. IGNORE is true if the value returned
2403 by the builtin will be ignored. UNLOCKED is true is true if this
2404 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2405 the known length of the string. Return NULL_TREE if no simplification
2406 was possible. */
2408 static bool
2409 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2410 tree arg0, tree arg1,
2411 bool unlocked)
2413 gimple *stmt = gsi_stmt (*gsi);
2415 /* If we're using an unlocked function, assume the other unlocked
2416 functions exist explicitly. */
2417 tree const fn_fputc = (unlocked
2418 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2419 : builtin_decl_implicit (BUILT_IN_FPUTC));
2420 tree const fn_fwrite = (unlocked
2421 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2422 : builtin_decl_implicit (BUILT_IN_FWRITE));
2424 /* If the return value is used, don't do the transformation. */
2425 if (gimple_call_lhs (stmt))
2426 return false;
2428 /* Get the length of the string passed to fputs. If the length
2429 can't be determined, punt. */
2430 tree len = get_maxval_strlen (arg0, 0);
2431 if (!len
2432 || TREE_CODE (len) != INTEGER_CST)
2433 return false;
2435 switch (compare_tree_int (len, 1))
2437 case -1: /* length is 0, delete the call entirely . */
2438 replace_call_with_value (gsi, integer_zero_node);
2439 return true;
2441 case 0: /* length is 1, call fputc. */
2443 const char *p = c_getstr (arg0);
2444 if (p != NULL)
2446 if (!fn_fputc)
2447 return false;
2449 gimple *repl = gimple_build_call (fn_fputc, 2,
2450 build_int_cst
2451 (integer_type_node, p[0]), arg1);
2452 replace_call_with_call_and_fold (gsi, repl);
2453 return true;
2456 /* FALLTHROUGH */
2457 case 1: /* length is greater than 1, call fwrite. */
2459 /* If optimizing for size keep fputs. */
2460 if (optimize_function_for_size_p (cfun))
2461 return false;
2462 /* New argument list transforming fputs(string, stream) to
2463 fwrite(string, 1, len, stream). */
2464 if (!fn_fwrite)
2465 return false;
2467 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2468 size_one_node, len, arg1);
2469 replace_call_with_call_and_fold (gsi, repl);
2470 return true;
2472 default:
2473 gcc_unreachable ();
2475 return false;
2478 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2479 DEST, SRC, LEN, and SIZE are the arguments to the call.
2480 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2481 code of the builtin. If MAXLEN is not NULL, it is maximum length
2482 passed as third argument. */
2484 static bool
2485 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2486 tree dest, tree src, tree len, tree size,
2487 enum built_in_function fcode)
2489 gimple *stmt = gsi_stmt (*gsi);
2490 location_t loc = gimple_location (stmt);
2491 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2492 tree fn;
2494 /* If SRC and DEST are the same (and not volatile), return DEST
2495 (resp. DEST+LEN for __mempcpy_chk). */
2496 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2498 if (fcode != BUILT_IN_MEMPCPY_CHK)
2500 replace_call_with_value (gsi, dest);
2501 return true;
2503 else
2505 gimple_seq stmts = NULL;
2506 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2507 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2508 TREE_TYPE (dest), dest, len);
2509 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2510 replace_call_with_value (gsi, temp);
2511 return true;
2515 if (! tree_fits_uhwi_p (size))
2516 return false;
2518 tree maxlen = get_maxval_strlen (len, 2);
2519 if (! integer_all_onesp (size))
2521 if (! tree_fits_uhwi_p (len))
2523 /* If LEN is not constant, try MAXLEN too.
2524 For MAXLEN only allow optimizing into non-_ocs function
2525 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2526 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2528 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2530 /* (void) __mempcpy_chk () can be optimized into
2531 (void) __memcpy_chk (). */
2532 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2533 if (!fn)
2534 return false;
2536 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2537 replace_call_with_call_and_fold (gsi, repl);
2538 return true;
2540 return false;
2543 else
2544 maxlen = len;
2546 if (tree_int_cst_lt (size, maxlen))
2547 return false;
2550 fn = NULL_TREE;
2551 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2552 mem{cpy,pcpy,move,set} is available. */
2553 switch (fcode)
2555 case BUILT_IN_MEMCPY_CHK:
2556 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2557 break;
2558 case BUILT_IN_MEMPCPY_CHK:
2559 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2560 break;
2561 case BUILT_IN_MEMMOVE_CHK:
2562 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2563 break;
2564 case BUILT_IN_MEMSET_CHK:
2565 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2566 break;
2567 default:
2568 break;
2571 if (!fn)
2572 return false;
2574 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2575 replace_call_with_call_and_fold (gsi, repl);
2576 return true;
2579 /* Fold a call to the __st[rp]cpy_chk builtin.
2580 DEST, SRC, and SIZE are the arguments to the call.
2581 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2582 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2583 strings passed as second argument. */
2585 static bool
2586 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2587 tree dest,
2588 tree src, tree size,
2589 enum built_in_function fcode)
2591 gimple *stmt = gsi_stmt (*gsi);
2592 location_t loc = gimple_location (stmt);
2593 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2594 tree len, fn;
2596 /* If SRC and DEST are the same (and not volatile), return DEST. */
2597 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2599 /* Issue -Wrestrict unless the pointers are null (those do
2600 not point to objects and so do not indicate an overlap;
2601 such calls could be the result of sanitization and jump
2602 threading). */
2603 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2605 tree func = gimple_call_fndecl (stmt);
2607 warning_at (loc, OPT_Wrestrict,
2608 "%qD source argument is the same as destination",
2609 func);
2612 replace_call_with_value (gsi, dest);
2613 return true;
2616 if (! tree_fits_uhwi_p (size))
2617 return false;
2619 tree maxlen = get_maxval_strlen (src, 1);
2620 if (! integer_all_onesp (size))
2622 len = c_strlen (src, 1);
2623 if (! len || ! tree_fits_uhwi_p (len))
2625 /* If LEN is not constant, try MAXLEN too.
2626 For MAXLEN only allow optimizing into non-_ocs function
2627 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2628 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2630 if (fcode == BUILT_IN_STPCPY_CHK)
2632 if (! ignore)
2633 return false;
2635 /* If return value of __stpcpy_chk is ignored,
2636 optimize into __strcpy_chk. */
2637 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2638 if (!fn)
2639 return false;
2641 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2642 replace_call_with_call_and_fold (gsi, repl);
2643 return true;
2646 if (! len || TREE_SIDE_EFFECTS (len))
2647 return false;
2649 /* If c_strlen returned something, but not a constant,
2650 transform __strcpy_chk into __memcpy_chk. */
2651 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2652 if (!fn)
2653 return false;
2655 gimple_seq stmts = NULL;
2656 len = gimple_convert (&stmts, loc, size_type_node, len);
2657 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2658 build_int_cst (size_type_node, 1));
2659 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2660 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2661 replace_call_with_call_and_fold (gsi, repl);
2662 return true;
2665 else
2666 maxlen = len;
2668 if (! tree_int_cst_lt (maxlen, size))
2669 return false;
2672 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2673 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2674 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2675 if (!fn)
2676 return false;
2678 gimple *repl = gimple_build_call (fn, 2, dest, src);
2679 replace_call_with_call_and_fold (gsi, repl);
2680 return true;
2683 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2684 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2685 length passed as third argument. IGNORE is true if return value can be
2686 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2688 static bool
2689 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2690 tree dest, tree src,
2691 tree len, tree size,
2692 enum built_in_function fcode)
2694 gimple *stmt = gsi_stmt (*gsi);
2695 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2696 tree fn;
2698 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2700 /* If return value of __stpncpy_chk is ignored,
2701 optimize into __strncpy_chk. */
2702 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2703 if (fn)
2705 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2706 replace_call_with_call_and_fold (gsi, repl);
2707 return true;
2711 if (! tree_fits_uhwi_p (size))
2712 return false;
2714 tree maxlen = get_maxval_strlen (len, 2);
2715 if (! integer_all_onesp (size))
2717 if (! tree_fits_uhwi_p (len))
2719 /* If LEN is not constant, try MAXLEN too.
2720 For MAXLEN only allow optimizing into non-_ocs function
2721 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2722 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2723 return false;
2725 else
2726 maxlen = len;
2728 if (tree_int_cst_lt (size, maxlen))
2729 return false;
2732 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2733 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2734 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2735 if (!fn)
2736 return false;
2738 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2739 replace_call_with_call_and_fold (gsi, repl);
2740 return true;
2743 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2744 Return NULL_TREE if no simplification can be made. */
2746 static bool
2747 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2749 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2750 location_t loc = gimple_location (stmt);
2751 tree dest = gimple_call_arg (stmt, 0);
2752 tree src = gimple_call_arg (stmt, 1);
2753 tree fn, len, lenp1;
2755 /* If the result is unused, replace stpcpy with strcpy. */
2756 if (gimple_call_lhs (stmt) == NULL_TREE)
2758 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2759 if (!fn)
2760 return false;
2761 gimple_call_set_fndecl (stmt, fn);
2762 fold_stmt (gsi);
2763 return true;
2766 len = c_strlen (src, 1);
2767 if (!len
2768 || TREE_CODE (len) != INTEGER_CST)
2769 return false;
2771 if (optimize_function_for_size_p (cfun)
2772 /* If length is zero it's small enough. */
2773 && !integer_zerop (len))
2774 return false;
2776 /* If the source has a known length replace stpcpy with memcpy. */
2777 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2778 if (!fn)
2779 return false;
2781 gimple_seq stmts = NULL;
2782 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2783 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2784 tem, build_int_cst (size_type_node, 1));
2785 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2786 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2787 gimple_set_vuse (repl, gimple_vuse (stmt));
2788 gimple_set_vdef (repl, gimple_vdef (stmt));
2789 if (gimple_vdef (repl)
2790 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2791 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2792 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2793 /* Replace the result with dest + len. */
2794 stmts = NULL;
2795 tem = gimple_convert (&stmts, loc, sizetype, len);
2796 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2797 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2798 POINTER_PLUS_EXPR, dest, tem);
2799 gsi_replace (gsi, ret, false);
2800 /* Finally fold the memcpy call. */
2801 gimple_stmt_iterator gsi2 = *gsi;
2802 gsi_prev (&gsi2);
2803 fold_stmt (&gsi2);
2804 return true;
2807 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2808 NULL_TREE if a normal call should be emitted rather than expanding
2809 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2810 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2811 passed as second argument. */
2813 static bool
2814 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2815 enum built_in_function fcode)
2817 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2818 tree dest, size, len, fn, fmt, flag;
2819 const char *fmt_str;
2821 /* Verify the required arguments in the original call. */
2822 if (gimple_call_num_args (stmt) < 5)
2823 return false;
2825 dest = gimple_call_arg (stmt, 0);
2826 len = gimple_call_arg (stmt, 1);
2827 flag = gimple_call_arg (stmt, 2);
2828 size = gimple_call_arg (stmt, 3);
2829 fmt = gimple_call_arg (stmt, 4);
2831 if (! tree_fits_uhwi_p (size))
2832 return false;
2834 if (! integer_all_onesp (size))
2836 tree maxlen = get_maxval_strlen (len, 2);
2837 if (! tree_fits_uhwi_p (len))
2839 /* If LEN is not constant, try MAXLEN too.
2840 For MAXLEN only allow optimizing into non-_ocs function
2841 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2842 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2843 return false;
2845 else
2846 maxlen = len;
2848 if (tree_int_cst_lt (size, maxlen))
2849 return false;
2852 if (!init_target_chars ())
2853 return false;
2855 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2856 or if format doesn't contain % chars or is "%s". */
2857 if (! integer_zerop (flag))
2859 fmt_str = c_getstr (fmt);
2860 if (fmt_str == NULL)
2861 return false;
2862 if (strchr (fmt_str, target_percent) != NULL
2863 && strcmp (fmt_str, target_percent_s))
2864 return false;
2867 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2868 available. */
2869 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2870 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2871 if (!fn)
2872 return false;
2874 /* Replace the called function and the first 5 argument by 3 retaining
2875 trailing varargs. */
2876 gimple_call_set_fndecl (stmt, fn);
2877 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2878 gimple_call_set_arg (stmt, 0, dest);
2879 gimple_call_set_arg (stmt, 1, len);
2880 gimple_call_set_arg (stmt, 2, fmt);
2881 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2882 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2883 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2884 fold_stmt (gsi);
2885 return true;
2888 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2889 Return NULL_TREE if a normal call should be emitted rather than
2890 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2891 or BUILT_IN_VSPRINTF_CHK. */
2893 static bool
2894 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2895 enum built_in_function fcode)
2897 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2898 tree dest, size, len, fn, fmt, flag;
2899 const char *fmt_str;
2900 unsigned nargs = gimple_call_num_args (stmt);
2902 /* Verify the required arguments in the original call. */
2903 if (nargs < 4)
2904 return false;
2905 dest = gimple_call_arg (stmt, 0);
2906 flag = gimple_call_arg (stmt, 1);
2907 size = gimple_call_arg (stmt, 2);
2908 fmt = gimple_call_arg (stmt, 3);
2910 if (! tree_fits_uhwi_p (size))
2911 return false;
2913 len = NULL_TREE;
2915 if (!init_target_chars ())
2916 return false;
2918 /* Check whether the format is a literal string constant. */
2919 fmt_str = c_getstr (fmt);
2920 if (fmt_str != NULL)
2922 /* If the format doesn't contain % args or %%, we know the size. */
2923 if (strchr (fmt_str, target_percent) == 0)
2925 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2926 len = build_int_cstu (size_type_node, strlen (fmt_str));
2928 /* If the format is "%s" and first ... argument is a string literal,
2929 we know the size too. */
2930 else if (fcode == BUILT_IN_SPRINTF_CHK
2931 && strcmp (fmt_str, target_percent_s) == 0)
2933 tree arg;
2935 if (nargs == 5)
2937 arg = gimple_call_arg (stmt, 4);
2938 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2940 len = c_strlen (arg, 1);
2941 if (! len || ! tree_fits_uhwi_p (len))
2942 len = NULL_TREE;
2948 if (! integer_all_onesp (size))
2950 if (! len || ! tree_int_cst_lt (len, size))
2951 return false;
2954 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2955 or if format doesn't contain % chars or is "%s". */
2956 if (! integer_zerop (flag))
2958 if (fmt_str == NULL)
2959 return false;
2960 if (strchr (fmt_str, target_percent) != NULL
2961 && strcmp (fmt_str, target_percent_s))
2962 return false;
2965 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2966 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2967 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2968 if (!fn)
2969 return false;
2971 /* Replace the called function and the first 4 argument by 2 retaining
2972 trailing varargs. */
2973 gimple_call_set_fndecl (stmt, fn);
2974 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2975 gimple_call_set_arg (stmt, 0, dest);
2976 gimple_call_set_arg (stmt, 1, fmt);
2977 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2978 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2979 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2980 fold_stmt (gsi);
2981 return true;
2984 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2985 ORIG may be null if this is a 2-argument call. We don't attempt to
2986 simplify calls with more than 3 arguments.
2988 Return true if simplification was possible, otherwise false. */
2990 bool
2991 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2993 gimple *stmt = gsi_stmt (*gsi);
2994 tree dest = gimple_call_arg (stmt, 0);
2995 tree fmt = gimple_call_arg (stmt, 1);
2996 tree orig = NULL_TREE;
2997 const char *fmt_str = NULL;
2999 /* Verify the required arguments in the original call. We deal with two
3000 types of sprintf() calls: 'sprintf (str, fmt)' and
3001 'sprintf (dest, "%s", orig)'. */
3002 if (gimple_call_num_args (stmt) > 3)
3003 return false;
3005 if (gimple_call_num_args (stmt) == 3)
3006 orig = gimple_call_arg (stmt, 2);
3008 /* Check whether the format is a literal string constant. */
3009 fmt_str = c_getstr (fmt);
3010 if (fmt_str == NULL)
3011 return false;
3013 if (!init_target_chars ())
3014 return false;
3016 /* If the format doesn't contain % args or %%, use strcpy. */
3017 if (strchr (fmt_str, target_percent) == NULL)
3019 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3021 if (!fn)
3022 return false;
3024 /* Don't optimize sprintf (buf, "abc", ptr++). */
3025 if (orig)
3026 return false;
3028 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3029 'format' is known to contain no % formats. */
3030 gimple_seq stmts = NULL;
3031 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3032 gimple_seq_add_stmt_without_update (&stmts, repl);
3033 if (gimple_call_lhs (stmt))
3035 repl = gimple_build_assign (gimple_call_lhs (stmt),
3036 build_int_cst (integer_type_node,
3037 strlen (fmt_str)));
3038 gimple_seq_add_stmt_without_update (&stmts, repl);
3039 gsi_replace_with_seq_vops (gsi, stmts);
3040 /* gsi now points at the assignment to the lhs, get a
3041 stmt iterator to the memcpy call.
3042 ??? We can't use gsi_for_stmt as that doesn't work when the
3043 CFG isn't built yet. */
3044 gimple_stmt_iterator gsi2 = *gsi;
3045 gsi_prev (&gsi2);
3046 fold_stmt (&gsi2);
3048 else
3050 gsi_replace_with_seq_vops (gsi, stmts);
3051 fold_stmt (gsi);
3053 return true;
3056 /* If the format is "%s", use strcpy if the result isn't used. */
3057 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3059 tree fn;
3060 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3062 if (!fn)
3063 return false;
3065 /* Don't crash on sprintf (str1, "%s"). */
3066 if (!orig)
3067 return false;
3069 tree orig_len = NULL_TREE;
3070 if (gimple_call_lhs (stmt))
3072 orig_len = get_maxval_strlen (orig, 0);
3073 if (!orig_len)
3074 return false;
3077 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3078 gimple_seq stmts = NULL;
3079 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3080 gimple_seq_add_stmt_without_update (&stmts, repl);
3081 if (gimple_call_lhs (stmt))
3083 if (!useless_type_conversion_p (integer_type_node,
3084 TREE_TYPE (orig_len)))
3085 orig_len = fold_convert (integer_type_node, orig_len);
3086 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3087 gimple_seq_add_stmt_without_update (&stmts, repl);
3088 gsi_replace_with_seq_vops (gsi, stmts);
3089 /* gsi now points at the assignment to the lhs, get a
3090 stmt iterator to the memcpy call.
3091 ??? We can't use gsi_for_stmt as that doesn't work when the
3092 CFG isn't built yet. */
3093 gimple_stmt_iterator gsi2 = *gsi;
3094 gsi_prev (&gsi2);
3095 fold_stmt (&gsi2);
3097 else
3099 gsi_replace_with_seq_vops (gsi, stmts);
3100 fold_stmt (gsi);
3102 return true;
3104 return false;
3107 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3108 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3109 attempt to simplify calls with more than 4 arguments.
3111 Return true if simplification was possible, otherwise false. */
3113 bool
3114 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3116 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3117 tree dest = gimple_call_arg (stmt, 0);
3118 tree destsize = gimple_call_arg (stmt, 1);
3119 tree fmt = gimple_call_arg (stmt, 2);
3120 tree orig = NULL_TREE;
3121 const char *fmt_str = NULL;
3123 if (gimple_call_num_args (stmt) > 4)
3124 return false;
3126 if (gimple_call_num_args (stmt) == 4)
3127 orig = gimple_call_arg (stmt, 3);
3129 if (!tree_fits_uhwi_p (destsize))
3130 return false;
3131 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3133 /* Check whether the format is a literal string constant. */
3134 fmt_str = c_getstr (fmt);
3135 if (fmt_str == NULL)
3136 return false;
3138 if (!init_target_chars ())
3139 return false;
3141 /* If the format doesn't contain % args or %%, use strcpy. */
3142 if (strchr (fmt_str, target_percent) == NULL)
3144 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3145 if (!fn)
3146 return false;
3148 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3149 if (orig)
3150 return false;
3152 /* We could expand this as
3153 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3154 or to
3155 memcpy (str, fmt_with_nul_at_cstm1, cst);
3156 but in the former case that might increase code size
3157 and in the latter case grow .rodata section too much.
3158 So punt for now. */
3159 size_t len = strlen (fmt_str);
3160 if (len >= destlen)
3161 return false;
3163 gimple_seq stmts = NULL;
3164 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3165 gimple_seq_add_stmt_without_update (&stmts, repl);
3166 if (gimple_call_lhs (stmt))
3168 repl = gimple_build_assign (gimple_call_lhs (stmt),
3169 build_int_cst (integer_type_node, len));
3170 gimple_seq_add_stmt_without_update (&stmts, repl);
3171 gsi_replace_with_seq_vops (gsi, stmts);
3172 /* gsi now points at the assignment to the lhs, get a
3173 stmt iterator to the memcpy call.
3174 ??? We can't use gsi_for_stmt as that doesn't work when the
3175 CFG isn't built yet. */
3176 gimple_stmt_iterator gsi2 = *gsi;
3177 gsi_prev (&gsi2);
3178 fold_stmt (&gsi2);
3180 else
3182 gsi_replace_with_seq_vops (gsi, stmts);
3183 fold_stmt (gsi);
3185 return true;
3188 /* If the format is "%s", use strcpy if the result isn't used. */
3189 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3191 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3192 if (!fn)
3193 return false;
3195 /* Don't crash on snprintf (str1, cst, "%s"). */
3196 if (!orig)
3197 return false;
3199 tree orig_len = get_maxval_strlen (orig, 0);
3200 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3201 return false;
3203 /* We could expand this as
3204 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3205 or to
3206 memcpy (str1, str2_with_nul_at_cstm1, cst);
3207 but in the former case that might increase code size
3208 and in the latter case grow .rodata section too much.
3209 So punt for now. */
3210 if (compare_tree_int (orig_len, destlen) >= 0)
3211 return false;
3213 /* Convert snprintf (str1, cst, "%s", str2) into
3214 strcpy (str1, str2) if strlen (str2) < cst. */
3215 gimple_seq stmts = NULL;
3216 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3217 gimple_seq_add_stmt_without_update (&stmts, repl);
3218 if (gimple_call_lhs (stmt))
3220 if (!useless_type_conversion_p (integer_type_node,
3221 TREE_TYPE (orig_len)))
3222 orig_len = fold_convert (integer_type_node, orig_len);
3223 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3224 gimple_seq_add_stmt_without_update (&stmts, repl);
3225 gsi_replace_with_seq_vops (gsi, stmts);
3226 /* gsi now points at the assignment to the lhs, get a
3227 stmt iterator to the memcpy call.
3228 ??? We can't use gsi_for_stmt as that doesn't work when the
3229 CFG isn't built yet. */
3230 gimple_stmt_iterator gsi2 = *gsi;
3231 gsi_prev (&gsi2);
3232 fold_stmt (&gsi2);
3234 else
3236 gsi_replace_with_seq_vops (gsi, stmts);
3237 fold_stmt (gsi);
3239 return true;
3241 return false;
3244 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3245 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3246 more than 3 arguments, and ARG may be null in the 2-argument case.
3248 Return NULL_TREE if no simplification was possible, otherwise return the
3249 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3250 code of the function to be simplified. */
3252 static bool
3253 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3254 tree fp, tree fmt, tree arg,
3255 enum built_in_function fcode)
3257 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3258 tree fn_fputc, fn_fputs;
3259 const char *fmt_str = NULL;
3261 /* If the return value is used, don't do the transformation. */
3262 if (gimple_call_lhs (stmt) != NULL_TREE)
3263 return false;
3265 /* Check whether the format is a literal string constant. */
3266 fmt_str = c_getstr (fmt);
3267 if (fmt_str == NULL)
3268 return false;
3270 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3272 /* If we're using an unlocked function, assume the other
3273 unlocked functions exist explicitly. */
3274 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3275 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3277 else
3279 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3280 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3283 if (!init_target_chars ())
3284 return false;
3286 /* If the format doesn't contain % args or %%, use strcpy. */
3287 if (strchr (fmt_str, target_percent) == NULL)
3289 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3290 && arg)
3291 return false;
3293 /* If the format specifier was "", fprintf does nothing. */
3294 if (fmt_str[0] == '\0')
3296 replace_call_with_value (gsi, NULL_TREE);
3297 return true;
3300 /* When "string" doesn't contain %, replace all cases of
3301 fprintf (fp, string) with fputs (string, fp). The fputs
3302 builtin will take care of special cases like length == 1. */
3303 if (fn_fputs)
3305 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3306 replace_call_with_call_and_fold (gsi, repl);
3307 return true;
3311 /* The other optimizations can be done only on the non-va_list variants. */
3312 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3313 return false;
3315 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3316 else if (strcmp (fmt_str, target_percent_s) == 0)
3318 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3319 return false;
3320 if (fn_fputs)
3322 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3323 replace_call_with_call_and_fold (gsi, repl);
3324 return true;
3328 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3329 else if (strcmp (fmt_str, target_percent_c) == 0)
3331 if (!arg
3332 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3333 return false;
3334 if (fn_fputc)
3336 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3337 replace_call_with_call_and_fold (gsi, repl);
3338 return true;
3342 return false;
3345 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3346 FMT and ARG are the arguments to the call; we don't fold cases with
3347 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3349 Return NULL_TREE if no simplification was possible, otherwise return the
3350 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3351 code of the function to be simplified. */
3353 static bool
3354 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3355 tree arg, enum built_in_function fcode)
3357 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3358 tree fn_putchar, fn_puts, newarg;
3359 const char *fmt_str = NULL;
3361 /* If the return value is used, don't do the transformation. */
3362 if (gimple_call_lhs (stmt) != NULL_TREE)
3363 return false;
3365 /* Check whether the format is a literal string constant. */
3366 fmt_str = c_getstr (fmt);
3367 if (fmt_str == NULL)
3368 return false;
3370 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3372 /* If we're using an unlocked function, assume the other
3373 unlocked functions exist explicitly. */
3374 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3375 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3377 else
3379 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3380 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3383 if (!init_target_chars ())
3384 return false;
3386 if (strcmp (fmt_str, target_percent_s) == 0
3387 || strchr (fmt_str, target_percent) == NULL)
3389 const char *str;
3391 if (strcmp (fmt_str, target_percent_s) == 0)
3393 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3394 return false;
3396 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3397 return false;
3399 str = c_getstr (arg);
3400 if (str == NULL)
3401 return false;
3403 else
3405 /* The format specifier doesn't contain any '%' characters. */
3406 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3407 && arg)
3408 return false;
3409 str = fmt_str;
3412 /* If the string was "", printf does nothing. */
3413 if (str[0] == '\0')
3415 replace_call_with_value (gsi, NULL_TREE);
3416 return true;
3419 /* If the string has length of 1, call putchar. */
3420 if (str[1] == '\0')
3422 /* Given printf("c"), (where c is any one character,)
3423 convert "c"[0] to an int and pass that to the replacement
3424 function. */
3425 newarg = build_int_cst (integer_type_node, str[0]);
3426 if (fn_putchar)
3428 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3429 replace_call_with_call_and_fold (gsi, repl);
3430 return true;
3433 else
3435 /* If the string was "string\n", call puts("string"). */
3436 size_t len = strlen (str);
3437 if ((unsigned char)str[len - 1] == target_newline
3438 && (size_t) (int) len == len
3439 && (int) len > 0)
3441 char *newstr;
3443 /* Create a NUL-terminated string that's one char shorter
3444 than the original, stripping off the trailing '\n'. */
3445 newstr = xstrdup (str);
3446 newstr[len - 1] = '\0';
3447 newarg = build_string_literal (len, newstr);
3448 free (newstr);
3449 if (fn_puts)
3451 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3452 replace_call_with_call_and_fold (gsi, repl);
3453 return true;
3456 else
3457 /* We'd like to arrange to call fputs(string,stdout) here,
3458 but we need stdout and don't have a way to get it yet. */
3459 return false;
3463 /* The other optimizations can be done only on the non-va_list variants. */
3464 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3465 return false;
3467 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3468 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3470 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3471 return false;
3472 if (fn_puts)
3474 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3475 replace_call_with_call_and_fold (gsi, repl);
3476 return true;
3480 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3481 else if (strcmp (fmt_str, target_percent_c) == 0)
3483 if (!arg || ! useless_type_conversion_p (integer_type_node,
3484 TREE_TYPE (arg)))
3485 return false;
3486 if (fn_putchar)
3488 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3489 replace_call_with_call_and_fold (gsi, repl);
3490 return true;
3494 return false;
3499 /* Fold a call to __builtin_strlen with known length LEN. */
3501 static bool
3502 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3504 gimple *stmt = gsi_stmt (*gsi);
3506 wide_int minlen;
3507 wide_int maxlen;
3509 tree lenrange[2];
3510 if (!get_range_strlen (gimple_call_arg (stmt, 0), lenrange, 1, true)
3511 && lenrange[0] && TREE_CODE (lenrange[0]) == INTEGER_CST
3512 && lenrange[1] && TREE_CODE (lenrange[1]) == INTEGER_CST)
3514 /* The range of lengths refers to either a single constant
3515 string or to the longest and shortest constant string
3516 referenced by the argument of the strlen() call, or to
3517 the strings that can possibly be stored in the arrays
3518 the argument refers to. */
3519 minlen = wi::to_wide (lenrange[0]);
3520 maxlen = wi::to_wide (lenrange[1]);
3522 else
3524 unsigned prec = TYPE_PRECISION (sizetype);
3526 minlen = wi::shwi (0, prec);
3527 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3530 if (minlen == maxlen)
3532 lenrange[0] = force_gimple_operand_gsi (gsi, lenrange[0], true, NULL,
3533 true, GSI_SAME_STMT);
3534 replace_call_with_value (gsi, lenrange[0]);
3535 return true;
3538 if (tree lhs = gimple_call_lhs (stmt))
3539 if (TREE_CODE (lhs) == SSA_NAME
3540 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
3541 set_range_info (lhs, VR_RANGE, minlen, maxlen);
3543 return false;
3546 /* Fold a call to __builtin_acc_on_device. */
3548 static bool
3549 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3551 /* Defer folding until we know which compiler we're in. */
3552 if (symtab->state != EXPANSION)
3553 return false;
3555 unsigned val_host = GOMP_DEVICE_HOST;
3556 unsigned val_dev = GOMP_DEVICE_NONE;
3558 #ifdef ACCEL_COMPILER
3559 val_host = GOMP_DEVICE_NOT_HOST;
3560 val_dev = ACCEL_COMPILER_acc_device;
3561 #endif
3563 location_t loc = gimple_location (gsi_stmt (*gsi));
3565 tree host_eq = make_ssa_name (boolean_type_node);
3566 gimple *host_ass = gimple_build_assign
3567 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3568 gimple_set_location (host_ass, loc);
3569 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3571 tree dev_eq = make_ssa_name (boolean_type_node);
3572 gimple *dev_ass = gimple_build_assign
3573 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3574 gimple_set_location (dev_ass, loc);
3575 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3577 tree result = make_ssa_name (boolean_type_node);
3578 gimple *result_ass = gimple_build_assign
3579 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3580 gimple_set_location (result_ass, loc);
3581 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3583 replace_call_with_value (gsi, result);
3585 return true;
3588 /* Fold realloc (0, n) -> malloc (n). */
3590 static bool
3591 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3593 gimple *stmt = gsi_stmt (*gsi);
3594 tree arg = gimple_call_arg (stmt, 0);
3595 tree size = gimple_call_arg (stmt, 1);
3597 if (operand_equal_p (arg, null_pointer_node, 0))
3599 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3600 if (fn_malloc)
3602 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3603 replace_call_with_call_and_fold (gsi, repl);
3604 return true;
3607 return false;
3610 /* Fold the non-target builtin at *GSI and return whether any simplification
3611 was made. */
3613 static bool
3614 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3616 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3617 tree callee = gimple_call_fndecl (stmt);
3619 /* Give up for always_inline inline builtins until they are
3620 inlined. */
3621 if (avoid_folding_inline_builtin (callee))
3622 return false;
3624 unsigned n = gimple_call_num_args (stmt);
3625 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3626 switch (fcode)
3628 case BUILT_IN_BCMP:
3629 return gimple_fold_builtin_bcmp (gsi);
3630 case BUILT_IN_BCOPY:
3631 return gimple_fold_builtin_bcopy (gsi);
3632 case BUILT_IN_BZERO:
3633 return gimple_fold_builtin_bzero (gsi);
3635 case BUILT_IN_MEMSET:
3636 return gimple_fold_builtin_memset (gsi,
3637 gimple_call_arg (stmt, 1),
3638 gimple_call_arg (stmt, 2));
3639 case BUILT_IN_MEMCPY:
3640 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3641 gimple_call_arg (stmt, 1), 0);
3642 case BUILT_IN_MEMPCPY:
3643 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3644 gimple_call_arg (stmt, 1), 1);
3645 case BUILT_IN_MEMMOVE:
3646 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3647 gimple_call_arg (stmt, 1), 3);
3648 case BUILT_IN_SPRINTF_CHK:
3649 case BUILT_IN_VSPRINTF_CHK:
3650 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3651 case BUILT_IN_STRCAT_CHK:
3652 return gimple_fold_builtin_strcat_chk (gsi);
3653 case BUILT_IN_STRNCAT_CHK:
3654 return gimple_fold_builtin_strncat_chk (gsi);
3655 case BUILT_IN_STRLEN:
3656 return gimple_fold_builtin_strlen (gsi);
3657 case BUILT_IN_STRCPY:
3658 return gimple_fold_builtin_strcpy (gsi,
3659 gimple_call_arg (stmt, 0),
3660 gimple_call_arg (stmt, 1));
3661 case BUILT_IN_STRNCPY:
3662 return gimple_fold_builtin_strncpy (gsi,
3663 gimple_call_arg (stmt, 0),
3664 gimple_call_arg (stmt, 1),
3665 gimple_call_arg (stmt, 2));
3666 case BUILT_IN_STRCAT:
3667 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3668 gimple_call_arg (stmt, 1));
3669 case BUILT_IN_STRNCAT:
3670 return gimple_fold_builtin_strncat (gsi);
3671 case BUILT_IN_INDEX:
3672 case BUILT_IN_STRCHR:
3673 return gimple_fold_builtin_strchr (gsi, false);
3674 case BUILT_IN_RINDEX:
3675 case BUILT_IN_STRRCHR:
3676 return gimple_fold_builtin_strchr (gsi, true);
3677 case BUILT_IN_STRSTR:
3678 return gimple_fold_builtin_strstr (gsi);
3679 case BUILT_IN_STRCMP:
3680 case BUILT_IN_STRCMP_EQ:
3681 case BUILT_IN_STRCASECMP:
3682 case BUILT_IN_STRNCMP:
3683 case BUILT_IN_STRNCMP_EQ:
3684 case BUILT_IN_STRNCASECMP:
3685 return gimple_fold_builtin_string_compare (gsi);
3686 case BUILT_IN_MEMCHR:
3687 return gimple_fold_builtin_memchr (gsi);
3688 case BUILT_IN_FPUTS:
3689 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3690 gimple_call_arg (stmt, 1), false);
3691 case BUILT_IN_FPUTS_UNLOCKED:
3692 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3693 gimple_call_arg (stmt, 1), true);
3694 case BUILT_IN_MEMCPY_CHK:
3695 case BUILT_IN_MEMPCPY_CHK:
3696 case BUILT_IN_MEMMOVE_CHK:
3697 case BUILT_IN_MEMSET_CHK:
3698 return gimple_fold_builtin_memory_chk (gsi,
3699 gimple_call_arg (stmt, 0),
3700 gimple_call_arg (stmt, 1),
3701 gimple_call_arg (stmt, 2),
3702 gimple_call_arg (stmt, 3),
3703 fcode);
3704 case BUILT_IN_STPCPY:
3705 return gimple_fold_builtin_stpcpy (gsi);
3706 case BUILT_IN_STRCPY_CHK:
3707 case BUILT_IN_STPCPY_CHK:
3708 return gimple_fold_builtin_stxcpy_chk (gsi,
3709 gimple_call_arg (stmt, 0),
3710 gimple_call_arg (stmt, 1),
3711 gimple_call_arg (stmt, 2),
3712 fcode);
3713 case BUILT_IN_STRNCPY_CHK:
3714 case BUILT_IN_STPNCPY_CHK:
3715 return gimple_fold_builtin_stxncpy_chk (gsi,
3716 gimple_call_arg (stmt, 0),
3717 gimple_call_arg (stmt, 1),
3718 gimple_call_arg (stmt, 2),
3719 gimple_call_arg (stmt, 3),
3720 fcode);
3721 case BUILT_IN_SNPRINTF_CHK:
3722 case BUILT_IN_VSNPRINTF_CHK:
3723 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3725 case BUILT_IN_FPRINTF:
3726 case BUILT_IN_FPRINTF_UNLOCKED:
3727 case BUILT_IN_VFPRINTF:
3728 if (n == 2 || n == 3)
3729 return gimple_fold_builtin_fprintf (gsi,
3730 gimple_call_arg (stmt, 0),
3731 gimple_call_arg (stmt, 1),
3732 n == 3
3733 ? gimple_call_arg (stmt, 2)
3734 : NULL_TREE,
3735 fcode);
3736 break;
3737 case BUILT_IN_FPRINTF_CHK:
3738 case BUILT_IN_VFPRINTF_CHK:
3739 if (n == 3 || n == 4)
3740 return gimple_fold_builtin_fprintf (gsi,
3741 gimple_call_arg (stmt, 0),
3742 gimple_call_arg (stmt, 2),
3743 n == 4
3744 ? gimple_call_arg (stmt, 3)
3745 : NULL_TREE,
3746 fcode);
3747 break;
3748 case BUILT_IN_PRINTF:
3749 case BUILT_IN_PRINTF_UNLOCKED:
3750 case BUILT_IN_VPRINTF:
3751 if (n == 1 || n == 2)
3752 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3753 n == 2
3754 ? gimple_call_arg (stmt, 1)
3755 : NULL_TREE, fcode);
3756 break;
3757 case BUILT_IN_PRINTF_CHK:
3758 case BUILT_IN_VPRINTF_CHK:
3759 if (n == 2 || n == 3)
3760 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3761 n == 3
3762 ? gimple_call_arg (stmt, 2)
3763 : NULL_TREE, fcode);
3764 break;
3765 case BUILT_IN_ACC_ON_DEVICE:
3766 return gimple_fold_builtin_acc_on_device (gsi,
3767 gimple_call_arg (stmt, 0));
3768 case BUILT_IN_REALLOC:
3769 return gimple_fold_builtin_realloc (gsi);
3771 default:;
3774 /* Try the generic builtin folder. */
3775 bool ignore = (gimple_call_lhs (stmt) == NULL);
3776 tree result = fold_call_stmt (stmt, ignore);
3777 if (result)
3779 if (ignore)
3780 STRIP_NOPS (result);
3781 else
3782 result = fold_convert (gimple_call_return_type (stmt), result);
3783 if (!update_call_from_tree (gsi, result))
3784 gimplify_and_update_call_from_tree (gsi, result);
3785 return true;
3788 return false;
3791 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3792 function calls to constants, where possible. */
3794 static tree
3795 fold_internal_goacc_dim (const gimple *call)
3797 int axis = oacc_get_ifn_dim_arg (call);
3798 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3799 tree result = NULL_TREE;
3800 tree type = TREE_TYPE (gimple_call_lhs (call));
3802 switch (gimple_call_internal_fn (call))
3804 case IFN_GOACC_DIM_POS:
3805 /* If the size is 1, we know the answer. */
3806 if (size == 1)
3807 result = build_int_cst (type, 0);
3808 break;
3809 case IFN_GOACC_DIM_SIZE:
3810 /* If the size is not dynamic, we know the answer. */
3811 if (size)
3812 result = build_int_cst (type, size);
3813 break;
3814 default:
3815 break;
3818 return result;
3821 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3822 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3823 &var where var is only addressable because of such calls. */
3825 bool
3826 optimize_atomic_compare_exchange_p (gimple *stmt)
3828 if (gimple_call_num_args (stmt) != 6
3829 || !flag_inline_atomics
3830 || !optimize
3831 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3832 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3833 || !gimple_vdef (stmt)
3834 || !gimple_vuse (stmt))
3835 return false;
3837 tree fndecl = gimple_call_fndecl (stmt);
3838 switch (DECL_FUNCTION_CODE (fndecl))
3840 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3841 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3842 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3843 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3844 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3845 break;
3846 default:
3847 return false;
3850 tree expected = gimple_call_arg (stmt, 1);
3851 if (TREE_CODE (expected) != ADDR_EXPR
3852 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3853 return false;
3855 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3856 if (!is_gimple_reg_type (etype)
3857 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3858 || TREE_THIS_VOLATILE (etype)
3859 || VECTOR_TYPE_P (etype)
3860 || TREE_CODE (etype) == COMPLEX_TYPE
3861 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3862 might not preserve all the bits. See PR71716. */
3863 || SCALAR_FLOAT_TYPE_P (etype)
3864 || maybe_ne (TYPE_PRECISION (etype),
3865 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3866 return false;
3868 tree weak = gimple_call_arg (stmt, 3);
3869 if (!integer_zerop (weak) && !integer_onep (weak))
3870 return false;
3872 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3873 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3874 machine_mode mode = TYPE_MODE (itype);
3876 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3877 == CODE_FOR_nothing
3878 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3879 return false;
3881 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
3882 return false;
3884 return true;
3887 /* Fold
3888 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3889 into
3890 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3891 i = IMAGPART_EXPR <t>;
3892 r = (_Bool) i;
3893 e = REALPART_EXPR <t>; */
3895 void
3896 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3898 gimple *stmt = gsi_stmt (*gsi);
3899 tree fndecl = gimple_call_fndecl (stmt);
3900 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3901 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3902 tree ctype = build_complex_type (itype);
3903 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3904 bool throws = false;
3905 edge e = NULL;
3906 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3907 expected);
3908 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3909 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3910 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3912 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3913 build1 (VIEW_CONVERT_EXPR, itype,
3914 gimple_assign_lhs (g)));
3915 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3917 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3918 + int_size_in_bytes (itype);
3919 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3920 gimple_call_arg (stmt, 0),
3921 gimple_assign_lhs (g),
3922 gimple_call_arg (stmt, 2),
3923 build_int_cst (integer_type_node, flag),
3924 gimple_call_arg (stmt, 4),
3925 gimple_call_arg (stmt, 5));
3926 tree lhs = make_ssa_name (ctype);
3927 gimple_call_set_lhs (g, lhs);
3928 gimple_set_vdef (g, gimple_vdef (stmt));
3929 gimple_set_vuse (g, gimple_vuse (stmt));
3930 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3931 tree oldlhs = gimple_call_lhs (stmt);
3932 if (stmt_can_throw_internal (stmt))
3934 throws = true;
3935 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3937 gimple_call_set_nothrow (as_a <gcall *> (g),
3938 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3939 gimple_call_set_lhs (stmt, NULL_TREE);
3940 gsi_replace (gsi, g, true);
3941 if (oldlhs)
3943 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3944 build1 (IMAGPART_EXPR, itype, lhs));
3945 if (throws)
3947 gsi_insert_on_edge_immediate (e, g);
3948 *gsi = gsi_for_stmt (g);
3950 else
3951 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3952 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3953 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3955 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3956 build1 (REALPART_EXPR, itype, lhs));
3957 if (throws && oldlhs == NULL_TREE)
3959 gsi_insert_on_edge_immediate (e, g);
3960 *gsi = gsi_for_stmt (g);
3962 else
3963 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3964 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3966 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3967 VIEW_CONVERT_EXPR,
3968 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3969 gimple_assign_lhs (g)));
3970 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3972 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3973 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3974 *gsi = gsiret;
3977 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3978 doesn't fit into TYPE. The test for overflow should be regardless of
3979 -fwrapv, and even for unsigned types. */
3981 bool
3982 arith_overflowed_p (enum tree_code code, const_tree type,
3983 const_tree arg0, const_tree arg1)
3985 widest2_int warg0 = widest2_int_cst (arg0);
3986 widest2_int warg1 = widest2_int_cst (arg1);
3987 widest2_int wres;
3988 switch (code)
3990 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3991 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3992 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3993 default: gcc_unreachable ();
3995 signop sign = TYPE_SIGN (type);
3996 if (sign == UNSIGNED && wi::neg_p (wres))
3997 return true;
3998 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4001 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4002 The statement may be replaced by another statement, e.g., if the call
4003 simplifies to a constant value. Return true if any changes were made.
4004 It is assumed that the operands have been previously folded. */
4006 static bool
4007 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4009 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4010 tree callee;
4011 bool changed = false;
4012 unsigned i;
4014 /* Fold *& in call arguments. */
4015 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4016 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4018 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4019 if (tmp)
4021 gimple_call_set_arg (stmt, i, tmp);
4022 changed = true;
4026 /* Check for virtual calls that became direct calls. */
4027 callee = gimple_call_fn (stmt);
4028 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4030 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4032 if (dump_file && virtual_method_call_p (callee)
4033 && !possible_polymorphic_call_target_p
4034 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4035 (OBJ_TYPE_REF_EXPR (callee)))))
4037 fprintf (dump_file,
4038 "Type inheritance inconsistent devirtualization of ");
4039 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4040 fprintf (dump_file, " to ");
4041 print_generic_expr (dump_file, callee, TDF_SLIM);
4042 fprintf (dump_file, "\n");
4045 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4046 changed = true;
4048 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4050 bool final;
4051 vec <cgraph_node *>targets
4052 = possible_polymorphic_call_targets (callee, stmt, &final);
4053 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4055 tree lhs = gimple_call_lhs (stmt);
4056 if (dump_enabled_p ())
4058 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4059 "folding virtual function call to %s\n",
4060 targets.length () == 1
4061 ? targets[0]->name ()
4062 : "__builtin_unreachable");
4064 if (targets.length () == 1)
4066 tree fndecl = targets[0]->decl;
4067 gimple_call_set_fndecl (stmt, fndecl);
4068 changed = true;
4069 /* If changing the call to __cxa_pure_virtual
4070 or similar noreturn function, adjust gimple_call_fntype
4071 too. */
4072 if (gimple_call_noreturn_p (stmt)
4073 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4074 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4075 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4076 == void_type_node))
4077 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4078 /* If the call becomes noreturn, remove the lhs. */
4079 if (lhs
4080 && gimple_call_noreturn_p (stmt)
4081 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4082 || should_remove_lhs_p (lhs)))
4084 if (TREE_CODE (lhs) == SSA_NAME)
4086 tree var = create_tmp_var (TREE_TYPE (lhs));
4087 tree def = get_or_create_ssa_default_def (cfun, var);
4088 gimple *new_stmt = gimple_build_assign (lhs, def);
4089 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4091 gimple_call_set_lhs (stmt, NULL_TREE);
4093 maybe_remove_unused_call_args (cfun, stmt);
4095 else
4097 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4098 gimple *new_stmt = gimple_build_call (fndecl, 0);
4099 gimple_set_location (new_stmt, gimple_location (stmt));
4100 /* If the call had a SSA name as lhs morph that into
4101 an uninitialized value. */
4102 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4104 tree var = create_tmp_var (TREE_TYPE (lhs));
4105 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4106 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4107 set_ssa_default_def (cfun, var, lhs);
4109 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4110 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4111 gsi_replace (gsi, new_stmt, false);
4112 return true;
4118 /* Check for indirect calls that became direct calls, and then
4119 no longer require a static chain. */
4120 if (gimple_call_chain (stmt))
4122 tree fn = gimple_call_fndecl (stmt);
4123 if (fn && !DECL_STATIC_CHAIN (fn))
4125 gimple_call_set_chain (stmt, NULL);
4126 changed = true;
4128 else
4130 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4131 if (tmp)
4133 gimple_call_set_chain (stmt, tmp);
4134 changed = true;
4139 if (inplace)
4140 return changed;
4142 /* Check for builtins that CCP can handle using information not
4143 available in the generic fold routines. */
4144 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4146 if (gimple_fold_builtin (gsi))
4147 changed = true;
4149 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4151 changed |= targetm.gimple_fold_builtin (gsi);
4153 else if (gimple_call_internal_p (stmt))
4155 enum tree_code subcode = ERROR_MARK;
4156 tree result = NULL_TREE;
4157 bool cplx_result = false;
4158 tree overflow = NULL_TREE;
4159 switch (gimple_call_internal_fn (stmt))
4161 case IFN_BUILTIN_EXPECT:
4162 result = fold_builtin_expect (gimple_location (stmt),
4163 gimple_call_arg (stmt, 0),
4164 gimple_call_arg (stmt, 1),
4165 gimple_call_arg (stmt, 2),
4166 NULL_TREE);
4167 break;
4168 case IFN_UBSAN_OBJECT_SIZE:
4170 tree offset = gimple_call_arg (stmt, 1);
4171 tree objsize = gimple_call_arg (stmt, 2);
4172 if (integer_all_onesp (objsize)
4173 || (TREE_CODE (offset) == INTEGER_CST
4174 && TREE_CODE (objsize) == INTEGER_CST
4175 && tree_int_cst_le (offset, objsize)))
4177 replace_call_with_value (gsi, NULL_TREE);
4178 return true;
4181 break;
4182 case IFN_UBSAN_PTR:
4183 if (integer_zerop (gimple_call_arg (stmt, 1)))
4185 replace_call_with_value (gsi, NULL_TREE);
4186 return true;
4188 break;
4189 case IFN_UBSAN_BOUNDS:
4191 tree index = gimple_call_arg (stmt, 1);
4192 tree bound = gimple_call_arg (stmt, 2);
4193 if (TREE_CODE (index) == INTEGER_CST
4194 && TREE_CODE (bound) == INTEGER_CST)
4196 index = fold_convert (TREE_TYPE (bound), index);
4197 if (TREE_CODE (index) == INTEGER_CST
4198 && tree_int_cst_le (index, bound))
4200 replace_call_with_value (gsi, NULL_TREE);
4201 return true;
4205 break;
4206 case IFN_GOACC_DIM_SIZE:
4207 case IFN_GOACC_DIM_POS:
4208 result = fold_internal_goacc_dim (stmt);
4209 break;
4210 case IFN_UBSAN_CHECK_ADD:
4211 subcode = PLUS_EXPR;
4212 break;
4213 case IFN_UBSAN_CHECK_SUB:
4214 subcode = MINUS_EXPR;
4215 break;
4216 case IFN_UBSAN_CHECK_MUL:
4217 subcode = MULT_EXPR;
4218 break;
4219 case IFN_ADD_OVERFLOW:
4220 subcode = PLUS_EXPR;
4221 cplx_result = true;
4222 break;
4223 case IFN_SUB_OVERFLOW:
4224 subcode = MINUS_EXPR;
4225 cplx_result = true;
4226 break;
4227 case IFN_MUL_OVERFLOW:
4228 subcode = MULT_EXPR;
4229 cplx_result = true;
4230 break;
4231 default:
4232 break;
4234 if (subcode != ERROR_MARK)
4236 tree arg0 = gimple_call_arg (stmt, 0);
4237 tree arg1 = gimple_call_arg (stmt, 1);
4238 tree type = TREE_TYPE (arg0);
4239 if (cplx_result)
4241 tree lhs = gimple_call_lhs (stmt);
4242 if (lhs == NULL_TREE)
4243 type = NULL_TREE;
4244 else
4245 type = TREE_TYPE (TREE_TYPE (lhs));
4247 if (type == NULL_TREE)
4249 /* x = y + 0; x = y - 0; x = y * 0; */
4250 else if (integer_zerop (arg1))
4251 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4252 /* x = 0 + y; x = 0 * y; */
4253 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4254 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4255 /* x = y - y; */
4256 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4257 result = integer_zero_node;
4258 /* x = y * 1; x = 1 * y; */
4259 else if (subcode == MULT_EXPR && integer_onep (arg1))
4260 result = arg0;
4261 else if (subcode == MULT_EXPR && integer_onep (arg0))
4262 result = arg1;
4263 else if (TREE_CODE (arg0) == INTEGER_CST
4264 && TREE_CODE (arg1) == INTEGER_CST)
4266 if (cplx_result)
4267 result = int_const_binop (subcode, fold_convert (type, arg0),
4268 fold_convert (type, arg1));
4269 else
4270 result = int_const_binop (subcode, arg0, arg1);
4271 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4273 if (cplx_result)
4274 overflow = build_one_cst (type);
4275 else
4276 result = NULL_TREE;
4279 if (result)
4281 if (result == integer_zero_node)
4282 result = build_zero_cst (type);
4283 else if (cplx_result && TREE_TYPE (result) != type)
4285 if (TREE_CODE (result) == INTEGER_CST)
4287 if (arith_overflowed_p (PLUS_EXPR, type, result,
4288 integer_zero_node))
4289 overflow = build_one_cst (type);
4291 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4292 && TYPE_UNSIGNED (type))
4293 || (TYPE_PRECISION (type)
4294 < (TYPE_PRECISION (TREE_TYPE (result))
4295 + (TYPE_UNSIGNED (TREE_TYPE (result))
4296 && !TYPE_UNSIGNED (type)))))
4297 result = NULL_TREE;
4298 if (result)
4299 result = fold_convert (type, result);
4304 if (result)
4306 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4307 result = drop_tree_overflow (result);
4308 if (cplx_result)
4310 if (overflow == NULL_TREE)
4311 overflow = build_zero_cst (TREE_TYPE (result));
4312 tree ctype = build_complex_type (TREE_TYPE (result));
4313 if (TREE_CODE (result) == INTEGER_CST
4314 && TREE_CODE (overflow) == INTEGER_CST)
4315 result = build_complex (ctype, result, overflow);
4316 else
4317 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4318 ctype, result, overflow);
4320 if (!update_call_from_tree (gsi, result))
4321 gimplify_and_update_call_from_tree (gsi, result);
4322 changed = true;
4326 return changed;
4330 /* Return true whether NAME has a use on STMT. */
4332 static bool
4333 has_use_on_stmt (tree name, gimple *stmt)
4335 imm_use_iterator iter;
4336 use_operand_p use_p;
4337 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4338 if (USE_STMT (use_p) == stmt)
4339 return true;
4340 return false;
4343 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4344 gimple_simplify.
4346 Replaces *GSI with the simplification result in RCODE and OPS
4347 and the associated statements in *SEQ. Does the replacement
4348 according to INPLACE and returns true if the operation succeeded. */
4350 static bool
4351 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4352 gimple_match_op *res_op,
4353 gimple_seq *seq, bool inplace)
4355 gimple *stmt = gsi_stmt (*gsi);
4356 tree *ops = res_op->ops;
4357 unsigned int num_ops = res_op->num_ops;
4359 /* Play safe and do not allow abnormals to be mentioned in
4360 newly created statements. See also maybe_push_res_to_seq.
4361 As an exception allow such uses if there was a use of the
4362 same SSA name on the old stmt. */
4363 for (unsigned int i = 0; i < num_ops; ++i)
4364 if (TREE_CODE (ops[i]) == SSA_NAME
4365 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4366 && !has_use_on_stmt (ops[i], stmt))
4367 return false;
4369 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4370 for (unsigned int i = 0; i < 2; ++i)
4371 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4372 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4373 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4374 return false;
4376 /* Don't insert new statements when INPLACE is true, even if we could
4377 reuse STMT for the final statement. */
4378 if (inplace && !gimple_seq_empty_p (*seq))
4379 return false;
4381 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4383 gcc_assert (res_op->code.is_tree_code ());
4384 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4385 /* GIMPLE_CONDs condition may not throw. */
4386 && (!flag_exceptions
4387 || !cfun->can_throw_non_call_exceptions
4388 || !operation_could_trap_p (res_op->code,
4389 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4390 false, NULL_TREE)))
4391 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4392 else if (res_op->code == SSA_NAME)
4393 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4394 build_zero_cst (TREE_TYPE (ops[0])));
4395 else if (res_op->code == INTEGER_CST)
4397 if (integer_zerop (ops[0]))
4398 gimple_cond_make_false (cond_stmt);
4399 else
4400 gimple_cond_make_true (cond_stmt);
4402 else if (!inplace)
4404 tree res = maybe_push_res_to_seq (res_op, seq);
4405 if (!res)
4406 return false;
4407 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4408 build_zero_cst (TREE_TYPE (res)));
4410 else
4411 return false;
4412 if (dump_file && (dump_flags & TDF_DETAILS))
4414 fprintf (dump_file, "gimple_simplified to ");
4415 if (!gimple_seq_empty_p (*seq))
4416 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4417 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4418 0, TDF_SLIM);
4420 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4421 return true;
4423 else if (is_gimple_assign (stmt)
4424 && res_op->code.is_tree_code ())
4426 if (!inplace
4427 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4429 maybe_build_generic_op (res_op);
4430 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4431 res_op->op_or_null (0),
4432 res_op->op_or_null (1),
4433 res_op->op_or_null (2));
4434 if (dump_file && (dump_flags & TDF_DETAILS))
4436 fprintf (dump_file, "gimple_simplified to ");
4437 if (!gimple_seq_empty_p (*seq))
4438 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4439 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4440 0, TDF_SLIM);
4442 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4443 return true;
4446 else if (res_op->code.is_fn_code ()
4447 && gimple_call_combined_fn (stmt) == res_op->code)
4449 gcc_assert (num_ops == gimple_call_num_args (stmt));
4450 for (unsigned int i = 0; i < num_ops; ++i)
4451 gimple_call_set_arg (stmt, i, ops[i]);
4452 if (dump_file && (dump_flags & TDF_DETAILS))
4454 fprintf (dump_file, "gimple_simplified to ");
4455 if (!gimple_seq_empty_p (*seq))
4456 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4457 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4459 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4460 return true;
4462 else if (!inplace)
4464 if (gimple_has_lhs (stmt))
4466 tree lhs = gimple_get_lhs (stmt);
4467 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4468 return false;
4469 if (dump_file && (dump_flags & TDF_DETAILS))
4471 fprintf (dump_file, "gimple_simplified to ");
4472 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4474 gsi_replace_with_seq_vops (gsi, *seq);
4475 return true;
4477 else
4478 gcc_unreachable ();
4481 return false;
4484 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4486 static bool
4487 maybe_canonicalize_mem_ref_addr (tree *t)
4489 bool res = false;
4491 if (TREE_CODE (*t) == ADDR_EXPR)
4492 t = &TREE_OPERAND (*t, 0);
4494 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4495 generic vector extension. The actual vector referenced is
4496 view-converted to an array type for this purpose. If the index
4497 is constant the canonical representation in the middle-end is a
4498 BIT_FIELD_REF so re-write the former to the latter here. */
4499 if (TREE_CODE (*t) == ARRAY_REF
4500 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4501 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4502 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4504 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4505 if (VECTOR_TYPE_P (vtype))
4507 tree low = array_ref_low_bound (*t);
4508 if (TREE_CODE (low) == INTEGER_CST)
4510 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4512 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4513 wi::to_widest (low));
4514 idx = wi::mul (idx, wi::to_widest
4515 (TYPE_SIZE (TREE_TYPE (*t))));
4516 widest_int ext
4517 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4518 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4520 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4521 TREE_TYPE (*t),
4522 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4523 TYPE_SIZE (TREE_TYPE (*t)),
4524 wide_int_to_tree (bitsizetype, idx));
4525 res = true;
4532 while (handled_component_p (*t))
4533 t = &TREE_OPERAND (*t, 0);
4535 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4536 of invariant addresses into a SSA name MEM_REF address. */
4537 if (TREE_CODE (*t) == MEM_REF
4538 || TREE_CODE (*t) == TARGET_MEM_REF)
4540 tree addr = TREE_OPERAND (*t, 0);
4541 if (TREE_CODE (addr) == ADDR_EXPR
4542 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4543 || handled_component_p (TREE_OPERAND (addr, 0))))
4545 tree base;
4546 poly_int64 coffset;
4547 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4548 &coffset);
4549 if (!base)
4550 gcc_unreachable ();
4552 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4553 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4554 TREE_OPERAND (*t, 1),
4555 size_int (coffset));
4556 res = true;
4558 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4559 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4562 /* Canonicalize back MEM_REFs to plain reference trees if the object
4563 accessed is a decl that has the same access semantics as the MEM_REF. */
4564 if (TREE_CODE (*t) == MEM_REF
4565 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4566 && integer_zerop (TREE_OPERAND (*t, 1))
4567 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4569 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4570 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4571 if (/* Same volatile qualification. */
4572 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4573 /* Same TBAA behavior with -fstrict-aliasing. */
4574 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4575 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4576 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4577 /* Same alignment. */
4578 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4579 /* We have to look out here to not drop a required conversion
4580 from the rhs to the lhs if *t appears on the lhs or vice-versa
4581 if it appears on the rhs. Thus require strict type
4582 compatibility. */
4583 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4585 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4586 res = true;
4590 /* Canonicalize TARGET_MEM_REF in particular with respect to
4591 the indexes becoming constant. */
4592 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4594 tree tem = maybe_fold_tmr (*t);
4595 if (tem)
4597 *t = tem;
4598 res = true;
4602 return res;
4605 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4606 distinguishes both cases. */
4608 static bool
4609 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4611 bool changed = false;
4612 gimple *stmt = gsi_stmt (*gsi);
4613 bool nowarning = gimple_no_warning_p (stmt);
4614 unsigned i;
4615 fold_defer_overflow_warnings ();
4617 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4618 after propagation.
4619 ??? This shouldn't be done in generic folding but in the
4620 propagation helpers which also know whether an address was
4621 propagated.
4622 Also canonicalize operand order. */
4623 switch (gimple_code (stmt))
4625 case GIMPLE_ASSIGN:
4626 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4628 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4629 if ((REFERENCE_CLASS_P (*rhs)
4630 || TREE_CODE (*rhs) == ADDR_EXPR)
4631 && maybe_canonicalize_mem_ref_addr (rhs))
4632 changed = true;
4633 tree *lhs = gimple_assign_lhs_ptr (stmt);
4634 if (REFERENCE_CLASS_P (*lhs)
4635 && maybe_canonicalize_mem_ref_addr (lhs))
4636 changed = true;
4638 else
4640 /* Canonicalize operand order. */
4641 enum tree_code code = gimple_assign_rhs_code (stmt);
4642 if (TREE_CODE_CLASS (code) == tcc_comparison
4643 || commutative_tree_code (code)
4644 || commutative_ternary_tree_code (code))
4646 tree rhs1 = gimple_assign_rhs1 (stmt);
4647 tree rhs2 = gimple_assign_rhs2 (stmt);
4648 if (tree_swap_operands_p (rhs1, rhs2))
4650 gimple_assign_set_rhs1 (stmt, rhs2);
4651 gimple_assign_set_rhs2 (stmt, rhs1);
4652 if (TREE_CODE_CLASS (code) == tcc_comparison)
4653 gimple_assign_set_rhs_code (stmt,
4654 swap_tree_comparison (code));
4655 changed = true;
4659 break;
4660 case GIMPLE_CALL:
4662 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4664 tree *arg = gimple_call_arg_ptr (stmt, i);
4665 if (REFERENCE_CLASS_P (*arg)
4666 && maybe_canonicalize_mem_ref_addr (arg))
4667 changed = true;
4669 tree *lhs = gimple_call_lhs_ptr (stmt);
4670 if (*lhs
4671 && REFERENCE_CLASS_P (*lhs)
4672 && maybe_canonicalize_mem_ref_addr (lhs))
4673 changed = true;
4674 break;
4676 case GIMPLE_ASM:
4678 gasm *asm_stmt = as_a <gasm *> (stmt);
4679 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4681 tree link = gimple_asm_output_op (asm_stmt, i);
4682 tree op = TREE_VALUE (link);
4683 if (REFERENCE_CLASS_P (op)
4684 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4685 changed = true;
4687 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4689 tree link = gimple_asm_input_op (asm_stmt, i);
4690 tree op = TREE_VALUE (link);
4691 if ((REFERENCE_CLASS_P (op)
4692 || TREE_CODE (op) == ADDR_EXPR)
4693 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4694 changed = true;
4697 break;
4698 case GIMPLE_DEBUG:
4699 if (gimple_debug_bind_p (stmt))
4701 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4702 if (*val
4703 && (REFERENCE_CLASS_P (*val)
4704 || TREE_CODE (*val) == ADDR_EXPR)
4705 && maybe_canonicalize_mem_ref_addr (val))
4706 changed = true;
4708 break;
4709 case GIMPLE_COND:
4711 /* Canonicalize operand order. */
4712 tree lhs = gimple_cond_lhs (stmt);
4713 tree rhs = gimple_cond_rhs (stmt);
4714 if (tree_swap_operands_p (lhs, rhs))
4716 gcond *gc = as_a <gcond *> (stmt);
4717 gimple_cond_set_lhs (gc, rhs);
4718 gimple_cond_set_rhs (gc, lhs);
4719 gimple_cond_set_code (gc,
4720 swap_tree_comparison (gimple_cond_code (gc)));
4721 changed = true;
4724 default:;
4727 /* Dispatch to pattern-based folding. */
4728 if (!inplace
4729 || is_gimple_assign (stmt)
4730 || gimple_code (stmt) == GIMPLE_COND)
4732 gimple_seq seq = NULL;
4733 gimple_match_op res_op;
4734 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
4735 valueize, valueize))
4737 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
4738 changed = true;
4739 else
4740 gimple_seq_discard (seq);
4744 stmt = gsi_stmt (*gsi);
4746 /* Fold the main computation performed by the statement. */
4747 switch (gimple_code (stmt))
4749 case GIMPLE_ASSIGN:
4751 /* Try to canonicalize for boolean-typed X the comparisons
4752 X == 0, X == 1, X != 0, and X != 1. */
4753 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4754 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4756 tree lhs = gimple_assign_lhs (stmt);
4757 tree op1 = gimple_assign_rhs1 (stmt);
4758 tree op2 = gimple_assign_rhs2 (stmt);
4759 tree type = TREE_TYPE (op1);
4761 /* Check whether the comparison operands are of the same boolean
4762 type as the result type is.
4763 Check that second operand is an integer-constant with value
4764 one or zero. */
4765 if (TREE_CODE (op2) == INTEGER_CST
4766 && (integer_zerop (op2) || integer_onep (op2))
4767 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4769 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4770 bool is_logical_not = false;
4772 /* X == 0 and X != 1 is a logical-not.of X
4773 X == 1 and X != 0 is X */
4774 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4775 || (cmp_code == NE_EXPR && integer_onep (op2)))
4776 is_logical_not = true;
4778 if (is_logical_not == false)
4779 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4780 /* Only for one-bit precision typed X the transformation
4781 !X -> ~X is valied. */
4782 else if (TYPE_PRECISION (type) == 1)
4783 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4784 /* Otherwise we use !X -> X ^ 1. */
4785 else
4786 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4787 build_int_cst (type, 1));
4788 changed = true;
4789 break;
4793 unsigned old_num_ops = gimple_num_ops (stmt);
4794 tree lhs = gimple_assign_lhs (stmt);
4795 tree new_rhs = fold_gimple_assign (gsi);
4796 if (new_rhs
4797 && !useless_type_conversion_p (TREE_TYPE (lhs),
4798 TREE_TYPE (new_rhs)))
4799 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4800 if (new_rhs
4801 && (!inplace
4802 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4804 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4805 changed = true;
4807 break;
4810 case GIMPLE_CALL:
4811 changed |= gimple_fold_call (gsi, inplace);
4812 break;
4814 case GIMPLE_ASM:
4815 /* Fold *& in asm operands. */
4817 gasm *asm_stmt = as_a <gasm *> (stmt);
4818 size_t noutputs;
4819 const char **oconstraints;
4820 const char *constraint;
4821 bool allows_mem, allows_reg;
4823 noutputs = gimple_asm_noutputs (asm_stmt);
4824 oconstraints = XALLOCAVEC (const char *, noutputs);
4826 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4828 tree link = gimple_asm_output_op (asm_stmt, i);
4829 tree op = TREE_VALUE (link);
4830 oconstraints[i]
4831 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4832 if (REFERENCE_CLASS_P (op)
4833 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4835 TREE_VALUE (link) = op;
4836 changed = true;
4839 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4841 tree link = gimple_asm_input_op (asm_stmt, i);
4842 tree op = TREE_VALUE (link);
4843 constraint
4844 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4845 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4846 oconstraints, &allows_mem, &allows_reg);
4847 if (REFERENCE_CLASS_P (op)
4848 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4849 != NULL_TREE)
4851 TREE_VALUE (link) = op;
4852 changed = true;
4856 break;
4858 case GIMPLE_DEBUG:
4859 if (gimple_debug_bind_p (stmt))
4861 tree val = gimple_debug_bind_get_value (stmt);
4862 if (val
4863 && REFERENCE_CLASS_P (val))
4865 tree tem = maybe_fold_reference (val, false);
4866 if (tem)
4868 gimple_debug_bind_set_value (stmt, tem);
4869 changed = true;
4872 else if (val
4873 && TREE_CODE (val) == ADDR_EXPR)
4875 tree ref = TREE_OPERAND (val, 0);
4876 tree tem = maybe_fold_reference (ref, false);
4877 if (tem)
4879 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4880 gimple_debug_bind_set_value (stmt, tem);
4881 changed = true;
4885 break;
4887 case GIMPLE_RETURN:
4889 greturn *ret_stmt = as_a<greturn *> (stmt);
4890 tree ret = gimple_return_retval(ret_stmt);
4892 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4894 tree val = valueize (ret);
4895 if (val && val != ret
4896 && may_propagate_copy (ret, val))
4898 gimple_return_set_retval (ret_stmt, val);
4899 changed = true;
4903 break;
4905 default:;
4908 stmt = gsi_stmt (*gsi);
4910 /* Fold *& on the lhs. */
4911 if (gimple_has_lhs (stmt))
4913 tree lhs = gimple_get_lhs (stmt);
4914 if (lhs && REFERENCE_CLASS_P (lhs))
4916 tree new_lhs = maybe_fold_reference (lhs, true);
4917 if (new_lhs)
4919 gimple_set_lhs (stmt, new_lhs);
4920 changed = true;
4925 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4926 return changed;
4929 /* Valueziation callback that ends up not following SSA edges. */
4931 tree
4932 no_follow_ssa_edges (tree)
4934 return NULL_TREE;
4937 /* Valueization callback that ends up following single-use SSA edges only. */
4939 tree
4940 follow_single_use_edges (tree val)
4942 if (TREE_CODE (val) == SSA_NAME
4943 && !has_single_use (val))
4944 return NULL_TREE;
4945 return val;
4948 /* Valueization callback that follows all SSA edges. */
4950 tree
4951 follow_all_ssa_edges (tree val)
4953 return val;
4956 /* Fold the statement pointed to by GSI. In some cases, this function may
4957 replace the whole statement with a new one. Returns true iff folding
4958 makes any changes.
4959 The statement pointed to by GSI should be in valid gimple form but may
4960 be in unfolded state as resulting from for example constant propagation
4961 which can produce *&x = 0. */
4963 bool
4964 fold_stmt (gimple_stmt_iterator *gsi)
4966 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4969 bool
4970 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4972 return fold_stmt_1 (gsi, false, valueize);
4975 /* Perform the minimal folding on statement *GSI. Only operations like
4976 *&x created by constant propagation are handled. The statement cannot
4977 be replaced with a new one. Return true if the statement was
4978 changed, false otherwise.
4979 The statement *GSI should be in valid gimple form but may
4980 be in unfolded state as resulting from for example constant propagation
4981 which can produce *&x = 0. */
4983 bool
4984 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4986 gimple *stmt = gsi_stmt (*gsi);
4987 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4988 gcc_assert (gsi_stmt (*gsi) == stmt);
4989 return changed;
4992 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4993 if EXPR is null or we don't know how.
4994 If non-null, the result always has boolean type. */
4996 static tree
4997 canonicalize_bool (tree expr, bool invert)
4999 if (!expr)
5000 return NULL_TREE;
5001 else if (invert)
5003 if (integer_nonzerop (expr))
5004 return boolean_false_node;
5005 else if (integer_zerop (expr))
5006 return boolean_true_node;
5007 else if (TREE_CODE (expr) == SSA_NAME)
5008 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5009 build_int_cst (TREE_TYPE (expr), 0));
5010 else if (COMPARISON_CLASS_P (expr))
5011 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5012 boolean_type_node,
5013 TREE_OPERAND (expr, 0),
5014 TREE_OPERAND (expr, 1));
5015 else
5016 return NULL_TREE;
5018 else
5020 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5021 return expr;
5022 if (integer_nonzerop (expr))
5023 return boolean_true_node;
5024 else if (integer_zerop (expr))
5025 return boolean_false_node;
5026 else if (TREE_CODE (expr) == SSA_NAME)
5027 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5028 build_int_cst (TREE_TYPE (expr), 0));
5029 else if (COMPARISON_CLASS_P (expr))
5030 return fold_build2 (TREE_CODE (expr),
5031 boolean_type_node,
5032 TREE_OPERAND (expr, 0),
5033 TREE_OPERAND (expr, 1));
5034 else
5035 return NULL_TREE;
5039 /* Check to see if a boolean expression EXPR is logically equivalent to the
5040 comparison (OP1 CODE OP2). Check for various identities involving
5041 SSA_NAMEs. */
5043 static bool
5044 same_bool_comparison_p (const_tree expr, enum tree_code code,
5045 const_tree op1, const_tree op2)
5047 gimple *s;
5049 /* The obvious case. */
5050 if (TREE_CODE (expr) == code
5051 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5052 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5053 return true;
5055 /* Check for comparing (name, name != 0) and the case where expr
5056 is an SSA_NAME with a definition matching the comparison. */
5057 if (TREE_CODE (expr) == SSA_NAME
5058 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5060 if (operand_equal_p (expr, op1, 0))
5061 return ((code == NE_EXPR && integer_zerop (op2))
5062 || (code == EQ_EXPR && integer_nonzerop (op2)));
5063 s = SSA_NAME_DEF_STMT (expr);
5064 if (is_gimple_assign (s)
5065 && gimple_assign_rhs_code (s) == code
5066 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5067 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5068 return true;
5071 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5072 of name is a comparison, recurse. */
5073 if (TREE_CODE (op1) == SSA_NAME
5074 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5076 s = SSA_NAME_DEF_STMT (op1);
5077 if (is_gimple_assign (s)
5078 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5080 enum tree_code c = gimple_assign_rhs_code (s);
5081 if ((c == NE_EXPR && integer_zerop (op2))
5082 || (c == EQ_EXPR && integer_nonzerop (op2)))
5083 return same_bool_comparison_p (expr, c,
5084 gimple_assign_rhs1 (s),
5085 gimple_assign_rhs2 (s));
5086 if ((c == EQ_EXPR && integer_zerop (op2))
5087 || (c == NE_EXPR && integer_nonzerop (op2)))
5088 return same_bool_comparison_p (expr,
5089 invert_tree_comparison (c, false),
5090 gimple_assign_rhs1 (s),
5091 gimple_assign_rhs2 (s));
5094 return false;
5097 /* Check to see if two boolean expressions OP1 and OP2 are logically
5098 equivalent. */
5100 static bool
5101 same_bool_result_p (const_tree op1, const_tree op2)
5103 /* Simple cases first. */
5104 if (operand_equal_p (op1, op2, 0))
5105 return true;
5107 /* Check the cases where at least one of the operands is a comparison.
5108 These are a bit smarter than operand_equal_p in that they apply some
5109 identifies on SSA_NAMEs. */
5110 if (COMPARISON_CLASS_P (op2)
5111 && same_bool_comparison_p (op1, TREE_CODE (op2),
5112 TREE_OPERAND (op2, 0),
5113 TREE_OPERAND (op2, 1)))
5114 return true;
5115 if (COMPARISON_CLASS_P (op1)
5116 && same_bool_comparison_p (op2, TREE_CODE (op1),
5117 TREE_OPERAND (op1, 0),
5118 TREE_OPERAND (op1, 1)))
5119 return true;
5121 /* Default case. */
5122 return false;
5125 /* Forward declarations for some mutually recursive functions. */
5127 static tree
5128 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5129 enum tree_code code2, tree op2a, tree op2b);
5130 static tree
5131 and_var_with_comparison (tree var, bool invert,
5132 enum tree_code code2, tree op2a, tree op2b);
5133 static tree
5134 and_var_with_comparison_1 (gimple *stmt,
5135 enum tree_code code2, tree op2a, tree op2b);
5136 static tree
5137 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5138 enum tree_code code2, tree op2a, tree op2b);
5139 static tree
5140 or_var_with_comparison (tree var, bool invert,
5141 enum tree_code code2, tree op2a, tree op2b);
5142 static tree
5143 or_var_with_comparison_1 (gimple *stmt,
5144 enum tree_code code2, tree op2a, tree op2b);
5146 /* Helper function for and_comparisons_1: try to simplify the AND of the
5147 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5148 If INVERT is true, invert the value of the VAR before doing the AND.
5149 Return NULL_EXPR if we can't simplify this to a single expression. */
5151 static tree
5152 and_var_with_comparison (tree var, bool invert,
5153 enum tree_code code2, tree op2a, tree op2b)
5155 tree t;
5156 gimple *stmt = SSA_NAME_DEF_STMT (var);
5158 /* We can only deal with variables whose definitions are assignments. */
5159 if (!is_gimple_assign (stmt))
5160 return NULL_TREE;
5162 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5163 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5164 Then we only have to consider the simpler non-inverted cases. */
5165 if (invert)
5166 t = or_var_with_comparison_1 (stmt,
5167 invert_tree_comparison (code2, false),
5168 op2a, op2b);
5169 else
5170 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5171 return canonicalize_bool (t, invert);
5174 /* Try to simplify the AND of the ssa variable defined by the assignment
5175 STMT with the comparison specified by (OP2A CODE2 OP2B).
5176 Return NULL_EXPR if we can't simplify this to a single expression. */
5178 static tree
5179 and_var_with_comparison_1 (gimple *stmt,
5180 enum tree_code code2, tree op2a, tree op2b)
5182 tree var = gimple_assign_lhs (stmt);
5183 tree true_test_var = NULL_TREE;
5184 tree false_test_var = NULL_TREE;
5185 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5187 /* Check for identities like (var AND (var == 0)) => false. */
5188 if (TREE_CODE (op2a) == SSA_NAME
5189 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5191 if ((code2 == NE_EXPR && integer_zerop (op2b))
5192 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5194 true_test_var = op2a;
5195 if (var == true_test_var)
5196 return var;
5198 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5199 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5201 false_test_var = op2a;
5202 if (var == false_test_var)
5203 return boolean_false_node;
5207 /* If the definition is a comparison, recurse on it. */
5208 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5210 tree t = and_comparisons_1 (innercode,
5211 gimple_assign_rhs1 (stmt),
5212 gimple_assign_rhs2 (stmt),
5213 code2,
5214 op2a,
5215 op2b);
5216 if (t)
5217 return t;
5220 /* If the definition is an AND or OR expression, we may be able to
5221 simplify by reassociating. */
5222 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5223 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5225 tree inner1 = gimple_assign_rhs1 (stmt);
5226 tree inner2 = gimple_assign_rhs2 (stmt);
5227 gimple *s;
5228 tree t;
5229 tree partial = NULL_TREE;
5230 bool is_and = (innercode == BIT_AND_EXPR);
5232 /* Check for boolean identities that don't require recursive examination
5233 of inner1/inner2:
5234 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5235 inner1 AND (inner1 OR inner2) => inner1
5236 !inner1 AND (inner1 AND inner2) => false
5237 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5238 Likewise for similar cases involving inner2. */
5239 if (inner1 == true_test_var)
5240 return (is_and ? var : inner1);
5241 else if (inner2 == true_test_var)
5242 return (is_and ? var : inner2);
5243 else if (inner1 == false_test_var)
5244 return (is_and
5245 ? boolean_false_node
5246 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5247 else if (inner2 == false_test_var)
5248 return (is_and
5249 ? boolean_false_node
5250 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5252 /* Next, redistribute/reassociate the AND across the inner tests.
5253 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5254 if (TREE_CODE (inner1) == SSA_NAME
5255 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5256 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5257 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5258 gimple_assign_rhs1 (s),
5259 gimple_assign_rhs2 (s),
5260 code2, op2a, op2b)))
5262 /* Handle the AND case, where we are reassociating:
5263 (inner1 AND inner2) AND (op2a code2 op2b)
5264 => (t AND inner2)
5265 If the partial result t is a constant, we win. Otherwise
5266 continue on to try reassociating with the other inner test. */
5267 if (is_and)
5269 if (integer_onep (t))
5270 return inner2;
5271 else if (integer_zerop (t))
5272 return boolean_false_node;
5275 /* Handle the OR case, where we are redistributing:
5276 (inner1 OR inner2) AND (op2a code2 op2b)
5277 => (t OR (inner2 AND (op2a code2 op2b))) */
5278 else if (integer_onep (t))
5279 return boolean_true_node;
5281 /* Save partial result for later. */
5282 partial = t;
5285 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5286 if (TREE_CODE (inner2) == SSA_NAME
5287 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5288 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5289 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5290 gimple_assign_rhs1 (s),
5291 gimple_assign_rhs2 (s),
5292 code2, op2a, op2b)))
5294 /* Handle the AND case, where we are reassociating:
5295 (inner1 AND inner2) AND (op2a code2 op2b)
5296 => (inner1 AND t) */
5297 if (is_and)
5299 if (integer_onep (t))
5300 return inner1;
5301 else if (integer_zerop (t))
5302 return boolean_false_node;
5303 /* If both are the same, we can apply the identity
5304 (x AND x) == x. */
5305 else if (partial && same_bool_result_p (t, partial))
5306 return t;
5309 /* Handle the OR case. where we are redistributing:
5310 (inner1 OR inner2) AND (op2a code2 op2b)
5311 => (t OR (inner1 AND (op2a code2 op2b)))
5312 => (t OR partial) */
5313 else
5315 if (integer_onep (t))
5316 return boolean_true_node;
5317 else if (partial)
5319 /* We already got a simplification for the other
5320 operand to the redistributed OR expression. The
5321 interesting case is when at least one is false.
5322 Or, if both are the same, we can apply the identity
5323 (x OR x) == x. */
5324 if (integer_zerop (partial))
5325 return t;
5326 else if (integer_zerop (t))
5327 return partial;
5328 else if (same_bool_result_p (t, partial))
5329 return t;
5334 return NULL_TREE;
5337 /* Try to simplify the AND of two comparisons defined by
5338 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5339 If this can be done without constructing an intermediate value,
5340 return the resulting tree; otherwise NULL_TREE is returned.
5341 This function is deliberately asymmetric as it recurses on SSA_DEFs
5342 in the first comparison but not the second. */
5344 static tree
5345 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5346 enum tree_code code2, tree op2a, tree op2b)
5348 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5350 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5351 if (operand_equal_p (op1a, op2a, 0)
5352 && operand_equal_p (op1b, op2b, 0))
5354 /* Result will be either NULL_TREE, or a combined comparison. */
5355 tree t = combine_comparisons (UNKNOWN_LOCATION,
5356 TRUTH_ANDIF_EXPR, code1, code2,
5357 truth_type, op1a, op1b);
5358 if (t)
5359 return t;
5362 /* Likewise the swapped case of the above. */
5363 if (operand_equal_p (op1a, op2b, 0)
5364 && operand_equal_p (op1b, op2a, 0))
5366 /* Result will be either NULL_TREE, or a combined comparison. */
5367 tree t = combine_comparisons (UNKNOWN_LOCATION,
5368 TRUTH_ANDIF_EXPR, code1,
5369 swap_tree_comparison (code2),
5370 truth_type, op1a, op1b);
5371 if (t)
5372 return t;
5375 /* If both comparisons are of the same value against constants, we might
5376 be able to merge them. */
5377 if (operand_equal_p (op1a, op2a, 0)
5378 && TREE_CODE (op1b) == INTEGER_CST
5379 && TREE_CODE (op2b) == INTEGER_CST)
5381 int cmp = tree_int_cst_compare (op1b, op2b);
5383 /* If we have (op1a == op1b), we should either be able to
5384 return that or FALSE, depending on whether the constant op1b
5385 also satisfies the other comparison against op2b. */
5386 if (code1 == EQ_EXPR)
5388 bool done = true;
5389 bool val;
5390 switch (code2)
5392 case EQ_EXPR: val = (cmp == 0); break;
5393 case NE_EXPR: val = (cmp != 0); break;
5394 case LT_EXPR: val = (cmp < 0); break;
5395 case GT_EXPR: val = (cmp > 0); break;
5396 case LE_EXPR: val = (cmp <= 0); break;
5397 case GE_EXPR: val = (cmp >= 0); break;
5398 default: done = false;
5400 if (done)
5402 if (val)
5403 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5404 else
5405 return boolean_false_node;
5408 /* Likewise if the second comparison is an == comparison. */
5409 else if (code2 == EQ_EXPR)
5411 bool done = true;
5412 bool val;
5413 switch (code1)
5415 case EQ_EXPR: val = (cmp == 0); break;
5416 case NE_EXPR: val = (cmp != 0); break;
5417 case LT_EXPR: val = (cmp > 0); break;
5418 case GT_EXPR: val = (cmp < 0); break;
5419 case LE_EXPR: val = (cmp >= 0); break;
5420 case GE_EXPR: val = (cmp <= 0); break;
5421 default: done = false;
5423 if (done)
5425 if (val)
5426 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5427 else
5428 return boolean_false_node;
5432 /* Same business with inequality tests. */
5433 else if (code1 == NE_EXPR)
5435 bool val;
5436 switch (code2)
5438 case EQ_EXPR: val = (cmp != 0); break;
5439 case NE_EXPR: val = (cmp == 0); break;
5440 case LT_EXPR: val = (cmp >= 0); break;
5441 case GT_EXPR: val = (cmp <= 0); break;
5442 case LE_EXPR: val = (cmp > 0); break;
5443 case GE_EXPR: val = (cmp < 0); break;
5444 default:
5445 val = false;
5447 if (val)
5448 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5450 else if (code2 == NE_EXPR)
5452 bool val;
5453 switch (code1)
5455 case EQ_EXPR: val = (cmp == 0); break;
5456 case NE_EXPR: val = (cmp != 0); break;
5457 case LT_EXPR: val = (cmp <= 0); break;
5458 case GT_EXPR: val = (cmp >= 0); break;
5459 case LE_EXPR: val = (cmp < 0); break;
5460 case GE_EXPR: val = (cmp > 0); break;
5461 default:
5462 val = false;
5464 if (val)
5465 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5468 /* Chose the more restrictive of two < or <= comparisons. */
5469 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5470 && (code2 == LT_EXPR || code2 == LE_EXPR))
5472 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5473 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5474 else
5475 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5478 /* Likewise chose the more restrictive of two > or >= comparisons. */
5479 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5480 && (code2 == GT_EXPR || code2 == GE_EXPR))
5482 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5483 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5484 else
5485 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5488 /* Check for singleton ranges. */
5489 else if (cmp == 0
5490 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5491 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5492 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5494 /* Check for disjoint ranges. */
5495 else if (cmp <= 0
5496 && (code1 == LT_EXPR || code1 == LE_EXPR)
5497 && (code2 == GT_EXPR || code2 == GE_EXPR))
5498 return boolean_false_node;
5499 else if (cmp >= 0
5500 && (code1 == GT_EXPR || code1 == GE_EXPR)
5501 && (code2 == LT_EXPR || code2 == LE_EXPR))
5502 return boolean_false_node;
5505 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5506 NAME's definition is a truth value. See if there are any simplifications
5507 that can be done against the NAME's definition. */
5508 if (TREE_CODE (op1a) == SSA_NAME
5509 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5510 && (integer_zerop (op1b) || integer_onep (op1b)))
5512 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5513 || (code1 == NE_EXPR && integer_onep (op1b)));
5514 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5515 switch (gimple_code (stmt))
5517 case GIMPLE_ASSIGN:
5518 /* Try to simplify by copy-propagating the definition. */
5519 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5521 case GIMPLE_PHI:
5522 /* If every argument to the PHI produces the same result when
5523 ANDed with the second comparison, we win.
5524 Do not do this unless the type is bool since we need a bool
5525 result here anyway. */
5526 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5528 tree result = NULL_TREE;
5529 unsigned i;
5530 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5532 tree arg = gimple_phi_arg_def (stmt, i);
5534 /* If this PHI has itself as an argument, ignore it.
5535 If all the other args produce the same result,
5536 we're still OK. */
5537 if (arg == gimple_phi_result (stmt))
5538 continue;
5539 else if (TREE_CODE (arg) == INTEGER_CST)
5541 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5543 if (!result)
5544 result = boolean_false_node;
5545 else if (!integer_zerop (result))
5546 return NULL_TREE;
5548 else if (!result)
5549 result = fold_build2 (code2, boolean_type_node,
5550 op2a, op2b);
5551 else if (!same_bool_comparison_p (result,
5552 code2, op2a, op2b))
5553 return NULL_TREE;
5555 else if (TREE_CODE (arg) == SSA_NAME
5556 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5558 tree temp;
5559 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5560 /* In simple cases we can look through PHI nodes,
5561 but we have to be careful with loops.
5562 See PR49073. */
5563 if (! dom_info_available_p (CDI_DOMINATORS)
5564 || gimple_bb (def_stmt) == gimple_bb (stmt)
5565 || dominated_by_p (CDI_DOMINATORS,
5566 gimple_bb (def_stmt),
5567 gimple_bb (stmt)))
5568 return NULL_TREE;
5569 temp = and_var_with_comparison (arg, invert, code2,
5570 op2a, op2b);
5571 if (!temp)
5572 return NULL_TREE;
5573 else if (!result)
5574 result = temp;
5575 else if (!same_bool_result_p (result, temp))
5576 return NULL_TREE;
5578 else
5579 return NULL_TREE;
5581 return result;
5584 default:
5585 break;
5588 return NULL_TREE;
5591 /* Try to simplify the AND of two comparisons, specified by
5592 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5593 If this can be simplified to a single expression (without requiring
5594 introducing more SSA variables to hold intermediate values),
5595 return the resulting tree. Otherwise return NULL_TREE.
5596 If the result expression is non-null, it has boolean type. */
5598 tree
5599 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5600 enum tree_code code2, tree op2a, tree op2b)
5602 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5603 if (t)
5604 return t;
5605 else
5606 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5609 /* Helper function for or_comparisons_1: try to simplify the OR of the
5610 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5611 If INVERT is true, invert the value of VAR before doing the OR.
5612 Return NULL_EXPR if we can't simplify this to a single expression. */
5614 static tree
5615 or_var_with_comparison (tree var, bool invert,
5616 enum tree_code code2, tree op2a, tree op2b)
5618 tree t;
5619 gimple *stmt = SSA_NAME_DEF_STMT (var);
5621 /* We can only deal with variables whose definitions are assignments. */
5622 if (!is_gimple_assign (stmt))
5623 return NULL_TREE;
5625 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5626 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5627 Then we only have to consider the simpler non-inverted cases. */
5628 if (invert)
5629 t = and_var_with_comparison_1 (stmt,
5630 invert_tree_comparison (code2, false),
5631 op2a, op2b);
5632 else
5633 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5634 return canonicalize_bool (t, invert);
5637 /* Try to simplify the OR of the ssa variable defined by the assignment
5638 STMT with the comparison specified by (OP2A CODE2 OP2B).
5639 Return NULL_EXPR if we can't simplify this to a single expression. */
5641 static tree
5642 or_var_with_comparison_1 (gimple *stmt,
5643 enum tree_code code2, tree op2a, tree op2b)
5645 tree var = gimple_assign_lhs (stmt);
5646 tree true_test_var = NULL_TREE;
5647 tree false_test_var = NULL_TREE;
5648 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5650 /* Check for identities like (var OR (var != 0)) => true . */
5651 if (TREE_CODE (op2a) == SSA_NAME
5652 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5654 if ((code2 == NE_EXPR && integer_zerop (op2b))
5655 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5657 true_test_var = op2a;
5658 if (var == true_test_var)
5659 return var;
5661 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5662 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5664 false_test_var = op2a;
5665 if (var == false_test_var)
5666 return boolean_true_node;
5670 /* If the definition is a comparison, recurse on it. */
5671 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5673 tree t = or_comparisons_1 (innercode,
5674 gimple_assign_rhs1 (stmt),
5675 gimple_assign_rhs2 (stmt),
5676 code2,
5677 op2a,
5678 op2b);
5679 if (t)
5680 return t;
5683 /* If the definition is an AND or OR expression, we may be able to
5684 simplify by reassociating. */
5685 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5686 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5688 tree inner1 = gimple_assign_rhs1 (stmt);
5689 tree inner2 = gimple_assign_rhs2 (stmt);
5690 gimple *s;
5691 tree t;
5692 tree partial = NULL_TREE;
5693 bool is_or = (innercode == BIT_IOR_EXPR);
5695 /* Check for boolean identities that don't require recursive examination
5696 of inner1/inner2:
5697 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5698 inner1 OR (inner1 AND inner2) => inner1
5699 !inner1 OR (inner1 OR inner2) => true
5700 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5702 if (inner1 == true_test_var)
5703 return (is_or ? var : inner1);
5704 else if (inner2 == true_test_var)
5705 return (is_or ? var : inner2);
5706 else if (inner1 == false_test_var)
5707 return (is_or
5708 ? boolean_true_node
5709 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5710 else if (inner2 == false_test_var)
5711 return (is_or
5712 ? boolean_true_node
5713 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5715 /* Next, redistribute/reassociate the OR across the inner tests.
5716 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5717 if (TREE_CODE (inner1) == SSA_NAME
5718 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5719 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5720 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5721 gimple_assign_rhs1 (s),
5722 gimple_assign_rhs2 (s),
5723 code2, op2a, op2b)))
5725 /* Handle the OR case, where we are reassociating:
5726 (inner1 OR inner2) OR (op2a code2 op2b)
5727 => (t OR inner2)
5728 If the partial result t is a constant, we win. Otherwise
5729 continue on to try reassociating with the other inner test. */
5730 if (is_or)
5732 if (integer_onep (t))
5733 return boolean_true_node;
5734 else if (integer_zerop (t))
5735 return inner2;
5738 /* Handle the AND case, where we are redistributing:
5739 (inner1 AND inner2) OR (op2a code2 op2b)
5740 => (t AND (inner2 OR (op2a code op2b))) */
5741 else if (integer_zerop (t))
5742 return boolean_false_node;
5744 /* Save partial result for later. */
5745 partial = t;
5748 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5749 if (TREE_CODE (inner2) == SSA_NAME
5750 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5751 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5752 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5753 gimple_assign_rhs1 (s),
5754 gimple_assign_rhs2 (s),
5755 code2, op2a, op2b)))
5757 /* Handle the OR case, where we are reassociating:
5758 (inner1 OR inner2) OR (op2a code2 op2b)
5759 => (inner1 OR t)
5760 => (t OR partial) */
5761 if (is_or)
5763 if (integer_zerop (t))
5764 return inner1;
5765 else if (integer_onep (t))
5766 return boolean_true_node;
5767 /* If both are the same, we can apply the identity
5768 (x OR x) == x. */
5769 else if (partial && same_bool_result_p (t, partial))
5770 return t;
5773 /* Handle the AND case, where we are redistributing:
5774 (inner1 AND inner2) OR (op2a code2 op2b)
5775 => (t AND (inner1 OR (op2a code2 op2b)))
5776 => (t AND partial) */
5777 else
5779 if (integer_zerop (t))
5780 return boolean_false_node;
5781 else if (partial)
5783 /* We already got a simplification for the other
5784 operand to the redistributed AND expression. The
5785 interesting case is when at least one is true.
5786 Or, if both are the same, we can apply the identity
5787 (x AND x) == x. */
5788 if (integer_onep (partial))
5789 return t;
5790 else if (integer_onep (t))
5791 return partial;
5792 else if (same_bool_result_p (t, partial))
5793 return t;
5798 return NULL_TREE;
5801 /* Try to simplify the OR of two comparisons defined by
5802 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5803 If this can be done without constructing an intermediate value,
5804 return the resulting tree; otherwise NULL_TREE is returned.
5805 This function is deliberately asymmetric as it recurses on SSA_DEFs
5806 in the first comparison but not the second. */
5808 static tree
5809 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5810 enum tree_code code2, tree op2a, tree op2b)
5812 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5814 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5815 if (operand_equal_p (op1a, op2a, 0)
5816 && operand_equal_p (op1b, op2b, 0))
5818 /* Result will be either NULL_TREE, or a combined comparison. */
5819 tree t = combine_comparisons (UNKNOWN_LOCATION,
5820 TRUTH_ORIF_EXPR, code1, code2,
5821 truth_type, op1a, op1b);
5822 if (t)
5823 return t;
5826 /* Likewise the swapped case of the above. */
5827 if (operand_equal_p (op1a, op2b, 0)
5828 && operand_equal_p (op1b, op2a, 0))
5830 /* Result will be either NULL_TREE, or a combined comparison. */
5831 tree t = combine_comparisons (UNKNOWN_LOCATION,
5832 TRUTH_ORIF_EXPR, code1,
5833 swap_tree_comparison (code2),
5834 truth_type, op1a, op1b);
5835 if (t)
5836 return t;
5839 /* If both comparisons are of the same value against constants, we might
5840 be able to merge them. */
5841 if (operand_equal_p (op1a, op2a, 0)
5842 && TREE_CODE (op1b) == INTEGER_CST
5843 && TREE_CODE (op2b) == INTEGER_CST)
5845 int cmp = tree_int_cst_compare (op1b, op2b);
5847 /* If we have (op1a != op1b), we should either be able to
5848 return that or TRUE, depending on whether the constant op1b
5849 also satisfies the other comparison against op2b. */
5850 if (code1 == NE_EXPR)
5852 bool done = true;
5853 bool val;
5854 switch (code2)
5856 case EQ_EXPR: val = (cmp == 0); break;
5857 case NE_EXPR: val = (cmp != 0); break;
5858 case LT_EXPR: val = (cmp < 0); break;
5859 case GT_EXPR: val = (cmp > 0); break;
5860 case LE_EXPR: val = (cmp <= 0); break;
5861 case GE_EXPR: val = (cmp >= 0); break;
5862 default: done = false;
5864 if (done)
5866 if (val)
5867 return boolean_true_node;
5868 else
5869 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5872 /* Likewise if the second comparison is a != comparison. */
5873 else if (code2 == NE_EXPR)
5875 bool done = true;
5876 bool val;
5877 switch (code1)
5879 case EQ_EXPR: val = (cmp == 0); break;
5880 case NE_EXPR: val = (cmp != 0); break;
5881 case LT_EXPR: val = (cmp > 0); break;
5882 case GT_EXPR: val = (cmp < 0); break;
5883 case LE_EXPR: val = (cmp >= 0); break;
5884 case GE_EXPR: val = (cmp <= 0); break;
5885 default: done = false;
5887 if (done)
5889 if (val)
5890 return boolean_true_node;
5891 else
5892 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5896 /* See if an equality test is redundant with the other comparison. */
5897 else if (code1 == EQ_EXPR)
5899 bool val;
5900 switch (code2)
5902 case EQ_EXPR: val = (cmp == 0); break;
5903 case NE_EXPR: val = (cmp != 0); break;
5904 case LT_EXPR: val = (cmp < 0); break;
5905 case GT_EXPR: val = (cmp > 0); break;
5906 case LE_EXPR: val = (cmp <= 0); break;
5907 case GE_EXPR: val = (cmp >= 0); break;
5908 default:
5909 val = false;
5911 if (val)
5912 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5914 else if (code2 == EQ_EXPR)
5916 bool val;
5917 switch (code1)
5919 case EQ_EXPR: val = (cmp == 0); break;
5920 case NE_EXPR: val = (cmp != 0); break;
5921 case LT_EXPR: val = (cmp > 0); break;
5922 case GT_EXPR: val = (cmp < 0); break;
5923 case LE_EXPR: val = (cmp >= 0); break;
5924 case GE_EXPR: val = (cmp <= 0); break;
5925 default:
5926 val = false;
5928 if (val)
5929 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5932 /* Chose the less restrictive of two < or <= comparisons. */
5933 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5934 && (code2 == LT_EXPR || code2 == LE_EXPR))
5936 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5937 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5938 else
5939 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5942 /* Likewise chose the less restrictive of two > or >= comparisons. */
5943 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5944 && (code2 == GT_EXPR || code2 == GE_EXPR))
5946 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5947 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5948 else
5949 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5952 /* Check for singleton ranges. */
5953 else if (cmp == 0
5954 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5955 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5956 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5958 /* Check for less/greater pairs that don't restrict the range at all. */
5959 else if (cmp >= 0
5960 && (code1 == LT_EXPR || code1 == LE_EXPR)
5961 && (code2 == GT_EXPR || code2 == GE_EXPR))
5962 return boolean_true_node;
5963 else if (cmp <= 0
5964 && (code1 == GT_EXPR || code1 == GE_EXPR)
5965 && (code2 == LT_EXPR || code2 == LE_EXPR))
5966 return boolean_true_node;
5969 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5970 NAME's definition is a truth value. See if there are any simplifications
5971 that can be done against the NAME's definition. */
5972 if (TREE_CODE (op1a) == SSA_NAME
5973 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5974 && (integer_zerop (op1b) || integer_onep (op1b)))
5976 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5977 || (code1 == NE_EXPR && integer_onep (op1b)));
5978 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5979 switch (gimple_code (stmt))
5981 case GIMPLE_ASSIGN:
5982 /* Try to simplify by copy-propagating the definition. */
5983 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5985 case GIMPLE_PHI:
5986 /* If every argument to the PHI produces the same result when
5987 ORed with the second comparison, we win.
5988 Do not do this unless the type is bool since we need a bool
5989 result here anyway. */
5990 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5992 tree result = NULL_TREE;
5993 unsigned i;
5994 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5996 tree arg = gimple_phi_arg_def (stmt, i);
5998 /* If this PHI has itself as an argument, ignore it.
5999 If all the other args produce the same result,
6000 we're still OK. */
6001 if (arg == gimple_phi_result (stmt))
6002 continue;
6003 else if (TREE_CODE (arg) == INTEGER_CST)
6005 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6007 if (!result)
6008 result = boolean_true_node;
6009 else if (!integer_onep (result))
6010 return NULL_TREE;
6012 else if (!result)
6013 result = fold_build2 (code2, boolean_type_node,
6014 op2a, op2b);
6015 else if (!same_bool_comparison_p (result,
6016 code2, op2a, op2b))
6017 return NULL_TREE;
6019 else if (TREE_CODE (arg) == SSA_NAME
6020 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6022 tree temp;
6023 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6024 /* In simple cases we can look through PHI nodes,
6025 but we have to be careful with loops.
6026 See PR49073. */
6027 if (! dom_info_available_p (CDI_DOMINATORS)
6028 || gimple_bb (def_stmt) == gimple_bb (stmt)
6029 || dominated_by_p (CDI_DOMINATORS,
6030 gimple_bb (def_stmt),
6031 gimple_bb (stmt)))
6032 return NULL_TREE;
6033 temp = or_var_with_comparison (arg, invert, code2,
6034 op2a, op2b);
6035 if (!temp)
6036 return NULL_TREE;
6037 else if (!result)
6038 result = temp;
6039 else if (!same_bool_result_p (result, temp))
6040 return NULL_TREE;
6042 else
6043 return NULL_TREE;
6045 return result;
6048 default:
6049 break;
6052 return NULL_TREE;
6055 /* Try to simplify the OR of two comparisons, specified by
6056 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6057 If this can be simplified to a single expression (without requiring
6058 introducing more SSA variables to hold intermediate values),
6059 return the resulting tree. Otherwise return NULL_TREE.
6060 If the result expression is non-null, it has boolean type. */
6062 tree
6063 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6064 enum tree_code code2, tree op2a, tree op2b)
6066 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6067 if (t)
6068 return t;
6069 else
6070 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6074 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6076 Either NULL_TREE, a simplified but non-constant or a constant
6077 is returned.
6079 ??? This should go into a gimple-fold-inline.h file to be eventually
6080 privatized with the single valueize function used in the various TUs
6081 to avoid the indirect function call overhead. */
6083 tree
6084 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6085 tree (*gvalueize) (tree))
6087 gimple_match_op res_op;
6088 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6089 edges if there are intermediate VARYING defs. For this reason
6090 do not follow SSA edges here even though SCCVN can technically
6091 just deal fine with that. */
6092 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6094 tree res = NULL_TREE;
6095 if (gimple_simplified_result_is_gimple_val (&res_op))
6096 res = res_op.ops[0];
6097 else if (mprts_hook)
6098 res = mprts_hook (&res_op);
6099 if (res)
6101 if (dump_file && dump_flags & TDF_DETAILS)
6103 fprintf (dump_file, "Match-and-simplified ");
6104 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6105 fprintf (dump_file, " to ");
6106 print_generic_expr (dump_file, res);
6107 fprintf (dump_file, "\n");
6109 return res;
6113 location_t loc = gimple_location (stmt);
6114 switch (gimple_code (stmt))
6116 case GIMPLE_ASSIGN:
6118 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6120 switch (get_gimple_rhs_class (subcode))
6122 case GIMPLE_SINGLE_RHS:
6124 tree rhs = gimple_assign_rhs1 (stmt);
6125 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6127 if (TREE_CODE (rhs) == SSA_NAME)
6129 /* If the RHS is an SSA_NAME, return its known constant value,
6130 if any. */
6131 return (*valueize) (rhs);
6133 /* Handle propagating invariant addresses into address
6134 operations. */
6135 else if (TREE_CODE (rhs) == ADDR_EXPR
6136 && !is_gimple_min_invariant (rhs))
6138 poly_int64 offset = 0;
6139 tree base;
6140 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6141 &offset,
6142 valueize);
6143 if (base
6144 && (CONSTANT_CLASS_P (base)
6145 || decl_address_invariant_p (base)))
6146 return build_invariant_address (TREE_TYPE (rhs),
6147 base, offset);
6149 else if (TREE_CODE (rhs) == CONSTRUCTOR
6150 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6151 && known_eq (CONSTRUCTOR_NELTS (rhs),
6152 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6154 unsigned i, nelts;
6155 tree val;
6157 nelts = CONSTRUCTOR_NELTS (rhs);
6158 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6159 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6161 val = (*valueize) (val);
6162 if (TREE_CODE (val) == INTEGER_CST
6163 || TREE_CODE (val) == REAL_CST
6164 || TREE_CODE (val) == FIXED_CST)
6165 vec.quick_push (val);
6166 else
6167 return NULL_TREE;
6170 return vec.build ();
6172 if (subcode == OBJ_TYPE_REF)
6174 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6175 /* If callee is constant, we can fold away the wrapper. */
6176 if (is_gimple_min_invariant (val))
6177 return val;
6180 if (kind == tcc_reference)
6182 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6183 || TREE_CODE (rhs) == REALPART_EXPR
6184 || TREE_CODE (rhs) == IMAGPART_EXPR)
6185 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6187 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6188 return fold_unary_loc (EXPR_LOCATION (rhs),
6189 TREE_CODE (rhs),
6190 TREE_TYPE (rhs), val);
6192 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6193 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6195 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6196 return fold_ternary_loc (EXPR_LOCATION (rhs),
6197 TREE_CODE (rhs),
6198 TREE_TYPE (rhs), val,
6199 TREE_OPERAND (rhs, 1),
6200 TREE_OPERAND (rhs, 2));
6202 else if (TREE_CODE (rhs) == MEM_REF
6203 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6205 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6206 if (TREE_CODE (val) == ADDR_EXPR
6207 && is_gimple_min_invariant (val))
6209 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6210 unshare_expr (val),
6211 TREE_OPERAND (rhs, 1));
6212 if (tem)
6213 rhs = tem;
6216 return fold_const_aggregate_ref_1 (rhs, valueize);
6218 else if (kind == tcc_declaration)
6219 return get_symbol_constant_value (rhs);
6220 return rhs;
6223 case GIMPLE_UNARY_RHS:
6224 return NULL_TREE;
6226 case GIMPLE_BINARY_RHS:
6227 /* Translate &x + CST into an invariant form suitable for
6228 further propagation. */
6229 if (subcode == POINTER_PLUS_EXPR)
6231 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6232 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6233 if (TREE_CODE (op0) == ADDR_EXPR
6234 && TREE_CODE (op1) == INTEGER_CST)
6236 tree off = fold_convert (ptr_type_node, op1);
6237 return build_fold_addr_expr_loc
6238 (loc,
6239 fold_build2 (MEM_REF,
6240 TREE_TYPE (TREE_TYPE (op0)),
6241 unshare_expr (op0), off));
6244 /* Canonicalize bool != 0 and bool == 0 appearing after
6245 valueization. While gimple_simplify handles this
6246 it can get confused by the ~X == 1 -> X == 0 transform
6247 which we cant reduce to a SSA name or a constant
6248 (and we have no way to tell gimple_simplify to not
6249 consider those transforms in the first place). */
6250 else if (subcode == EQ_EXPR
6251 || subcode == NE_EXPR)
6253 tree lhs = gimple_assign_lhs (stmt);
6254 tree op0 = gimple_assign_rhs1 (stmt);
6255 if (useless_type_conversion_p (TREE_TYPE (lhs),
6256 TREE_TYPE (op0)))
6258 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6259 op0 = (*valueize) (op0);
6260 if (TREE_CODE (op0) == INTEGER_CST)
6261 std::swap (op0, op1);
6262 if (TREE_CODE (op1) == INTEGER_CST
6263 && ((subcode == NE_EXPR && integer_zerop (op1))
6264 || (subcode == EQ_EXPR && integer_onep (op1))))
6265 return op0;
6268 return NULL_TREE;
6270 case GIMPLE_TERNARY_RHS:
6272 /* Handle ternary operators that can appear in GIMPLE form. */
6273 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6274 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6275 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6276 return fold_ternary_loc (loc, subcode,
6277 gimple_expr_type (stmt), op0, op1, op2);
6280 default:
6281 gcc_unreachable ();
6285 case GIMPLE_CALL:
6287 tree fn;
6288 gcall *call_stmt = as_a <gcall *> (stmt);
6290 if (gimple_call_internal_p (stmt))
6292 enum tree_code subcode = ERROR_MARK;
6293 switch (gimple_call_internal_fn (stmt))
6295 case IFN_UBSAN_CHECK_ADD:
6296 subcode = PLUS_EXPR;
6297 break;
6298 case IFN_UBSAN_CHECK_SUB:
6299 subcode = MINUS_EXPR;
6300 break;
6301 case IFN_UBSAN_CHECK_MUL:
6302 subcode = MULT_EXPR;
6303 break;
6304 case IFN_BUILTIN_EXPECT:
6306 tree arg0 = gimple_call_arg (stmt, 0);
6307 tree op0 = (*valueize) (arg0);
6308 if (TREE_CODE (op0) == INTEGER_CST)
6309 return op0;
6310 return NULL_TREE;
6312 default:
6313 return NULL_TREE;
6315 tree arg0 = gimple_call_arg (stmt, 0);
6316 tree arg1 = gimple_call_arg (stmt, 1);
6317 tree op0 = (*valueize) (arg0);
6318 tree op1 = (*valueize) (arg1);
6320 if (TREE_CODE (op0) != INTEGER_CST
6321 || TREE_CODE (op1) != INTEGER_CST)
6323 switch (subcode)
6325 case MULT_EXPR:
6326 /* x * 0 = 0 * x = 0 without overflow. */
6327 if (integer_zerop (op0) || integer_zerop (op1))
6328 return build_zero_cst (TREE_TYPE (arg0));
6329 break;
6330 case MINUS_EXPR:
6331 /* y - y = 0 without overflow. */
6332 if (operand_equal_p (op0, op1, 0))
6333 return build_zero_cst (TREE_TYPE (arg0));
6334 break;
6335 default:
6336 break;
6339 tree res
6340 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6341 if (res
6342 && TREE_CODE (res) == INTEGER_CST
6343 && !TREE_OVERFLOW (res))
6344 return res;
6345 return NULL_TREE;
6348 fn = (*valueize) (gimple_call_fn (stmt));
6349 if (TREE_CODE (fn) == ADDR_EXPR
6350 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6351 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6352 && gimple_builtin_call_types_compatible_p (stmt,
6353 TREE_OPERAND (fn, 0)))
6355 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6356 tree retval;
6357 unsigned i;
6358 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6359 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6360 retval = fold_builtin_call_array (loc,
6361 gimple_call_return_type (call_stmt),
6362 fn, gimple_call_num_args (stmt), args);
6363 if (retval)
6365 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6366 STRIP_NOPS (retval);
6367 retval = fold_convert (gimple_call_return_type (call_stmt),
6368 retval);
6370 return retval;
6372 return NULL_TREE;
6375 default:
6376 return NULL_TREE;
6380 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6381 Returns NULL_TREE if folding to a constant is not possible, otherwise
6382 returns a constant according to is_gimple_min_invariant. */
6384 tree
6385 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6387 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6388 if (res && is_gimple_min_invariant (res))
6389 return res;
6390 return NULL_TREE;
6394 /* The following set of functions are supposed to fold references using
6395 their constant initializers. */
6397 /* See if we can find constructor defining value of BASE.
6398 When we know the consructor with constant offset (such as
6399 base is array[40] and we do know constructor of array), then
6400 BIT_OFFSET is adjusted accordingly.
6402 As a special case, return error_mark_node when constructor
6403 is not explicitly available, but it is known to be zero
6404 such as 'static const int a;'. */
6405 static tree
6406 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6407 tree (*valueize)(tree))
6409 poly_int64 bit_offset2, size, max_size;
6410 bool reverse;
6412 if (TREE_CODE (base) == MEM_REF)
6414 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6415 if (!boff.to_shwi (bit_offset))
6416 return NULL_TREE;
6418 if (valueize
6419 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6420 base = valueize (TREE_OPERAND (base, 0));
6421 if (!base || TREE_CODE (base) != ADDR_EXPR)
6422 return NULL_TREE;
6423 base = TREE_OPERAND (base, 0);
6425 else if (valueize
6426 && TREE_CODE (base) == SSA_NAME)
6427 base = valueize (base);
6429 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6430 DECL_INITIAL. If BASE is a nested reference into another
6431 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6432 the inner reference. */
6433 switch (TREE_CODE (base))
6435 case VAR_DECL:
6436 case CONST_DECL:
6438 tree init = ctor_for_folding (base);
6440 /* Our semantic is exact opposite of ctor_for_folding;
6441 NULL means unknown, while error_mark_node is 0. */
6442 if (init == error_mark_node)
6443 return NULL_TREE;
6444 if (!init)
6445 return error_mark_node;
6446 return init;
6449 case VIEW_CONVERT_EXPR:
6450 return get_base_constructor (TREE_OPERAND (base, 0),
6451 bit_offset, valueize);
6453 case ARRAY_REF:
6454 case COMPONENT_REF:
6455 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6456 &reverse);
6457 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6458 return NULL_TREE;
6459 *bit_offset += bit_offset2;
6460 return get_base_constructor (base, bit_offset, valueize);
6462 case CONSTRUCTOR:
6463 return base;
6465 default:
6466 if (CONSTANT_CLASS_P (base))
6467 return base;
6469 return NULL_TREE;
6473 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6474 to the memory at bit OFFSET. When non-null, TYPE is the expected
6475 type of the reference; otherwise the type of the referenced element
6476 is used instead. When SIZE is zero, attempt to fold a reference to
6477 the entire element which OFFSET refers to. Increment *SUBOFF by
6478 the bit offset of the accessed element. */
6480 static tree
6481 fold_array_ctor_reference (tree type, tree ctor,
6482 unsigned HOST_WIDE_INT offset,
6483 unsigned HOST_WIDE_INT size,
6484 tree from_decl,
6485 unsigned HOST_WIDE_INT *suboff)
6487 offset_int low_bound;
6488 offset_int elt_size;
6489 offset_int access_index;
6490 tree domain_type = NULL_TREE;
6491 HOST_WIDE_INT inner_offset;
6493 /* Compute low bound and elt size. */
6494 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6495 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6496 if (domain_type && TYPE_MIN_VALUE (domain_type))
6498 /* Static constructors for variably sized objects makes no sense. */
6499 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6500 return NULL_TREE;
6501 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6503 else
6504 low_bound = 0;
6505 /* Static constructors for variably sized objects makes no sense. */
6506 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6507 return NULL_TREE;
6508 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6510 /* When TYPE is non-null, verify that it specifies a constant-sized
6511 accessed not larger than size of array element. */
6512 if (type
6513 && (!TYPE_SIZE_UNIT (type)
6514 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6515 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6516 || elt_size == 0))
6517 return NULL_TREE;
6519 /* Compute the array index we look for. */
6520 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6521 elt_size);
6522 access_index += low_bound;
6524 /* And offset within the access. */
6525 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6527 /* See if the array field is large enough to span whole access. We do not
6528 care to fold accesses spanning multiple array indexes. */
6529 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6530 return NULL_TREE;
6531 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6533 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6535 /* For the final reference to the entire accessed element
6536 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6537 may be null) in favor of the type of the element, and set
6538 SIZE to the size of the accessed element. */
6539 inner_offset = 0;
6540 type = TREE_TYPE (val);
6541 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6544 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6545 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6546 suboff);
6549 /* Memory not explicitly mentioned in constructor is 0 (or
6550 the reference is out of range). */
6551 return type ? build_zero_cst (type) : NULL_TREE;
6554 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6555 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6556 is the expected type of the reference; otherwise the type of
6557 the referenced member is used instead. When SIZE is zero,
6558 attempt to fold a reference to the entire member which OFFSET
6559 refers to; in this case. Increment *SUBOFF by the bit offset
6560 of the accessed member. */
6562 static tree
6563 fold_nonarray_ctor_reference (tree type, tree ctor,
6564 unsigned HOST_WIDE_INT offset,
6565 unsigned HOST_WIDE_INT size,
6566 tree from_decl,
6567 unsigned HOST_WIDE_INT *suboff)
6569 unsigned HOST_WIDE_INT cnt;
6570 tree cfield, cval;
6572 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6573 cval)
6575 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6576 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6577 tree field_size = DECL_SIZE (cfield);
6579 if (!field_size)
6581 /* Determine the size of the flexible array member from
6582 the size of the initializer provided for it. */
6583 field_size = TYPE_SIZE (TREE_TYPE (cval));
6586 /* Variable sized objects in static constructors makes no sense,
6587 but field_size can be NULL for flexible array members. */
6588 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6589 && TREE_CODE (byte_offset) == INTEGER_CST
6590 && (field_size != NULL_TREE
6591 ? TREE_CODE (field_size) == INTEGER_CST
6592 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6594 /* Compute bit offset of the field. */
6595 offset_int bitoffset
6596 = (wi::to_offset (field_offset)
6597 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6598 /* Compute bit offset where the field ends. */
6599 offset_int bitoffset_end;
6600 if (field_size != NULL_TREE)
6601 bitoffset_end = bitoffset + wi::to_offset (field_size);
6602 else
6603 bitoffset_end = 0;
6605 /* Compute the bit offset of the end of the desired access.
6606 As a special case, if the size of the desired access is
6607 zero, assume the access is to the entire field (and let
6608 the caller make any necessary adjustments by storing
6609 the actual bounds of the field in FIELDBOUNDS). */
6610 offset_int access_end = offset_int (offset);
6611 if (size)
6612 access_end += size;
6613 else
6614 access_end = bitoffset_end;
6616 /* Is there any overlap between the desired access at
6617 [OFFSET, OFFSET+SIZE) and the offset of the field within
6618 the object at [BITOFFSET, BITOFFSET_END)? */
6619 if (wi::cmps (access_end, bitoffset) > 0
6620 && (field_size == NULL_TREE
6621 || wi::lts_p (offset, bitoffset_end)))
6623 *suboff += bitoffset.to_uhwi ();
6625 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6627 /* For the final reference to the entire accessed member
6628 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6629 be null) in favor of the type of the member, and set
6630 SIZE to the size of the accessed member. */
6631 offset = bitoffset.to_uhwi ();
6632 type = TREE_TYPE (cval);
6633 size = (bitoffset_end - bitoffset).to_uhwi ();
6636 /* We do have overlap. Now see if the field is large enough
6637 to cover the access. Give up for accesses that extend
6638 beyond the end of the object or that span multiple fields. */
6639 if (wi::cmps (access_end, bitoffset_end) > 0)
6640 return NULL_TREE;
6641 if (offset < bitoffset)
6642 return NULL_TREE;
6644 offset_int inner_offset = offset_int (offset) - bitoffset;
6645 return fold_ctor_reference (type, cval,
6646 inner_offset.to_uhwi (), size,
6647 from_decl, suboff);
6650 /* Memory not explicitly mentioned in constructor is 0. */
6651 return type ? build_zero_cst (type) : NULL_TREE;
6654 /* CTOR is value initializing memory. Fold a reference of TYPE and
6655 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
6656 is zero, attempt to fold a reference to the entire subobject
6657 which OFFSET refers to. This is used when folding accesses to
6658 string members of aggregates. When non-null, set *SUBOFF to
6659 the bit offset of the accessed subobject. */
6661 tree
6662 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6663 const poly_uint64 &poly_size, tree from_decl,
6664 unsigned HOST_WIDE_INT *suboff /* = NULL */)
6666 tree ret;
6668 /* We found the field with exact match. */
6669 if (type
6670 && useless_type_conversion_p (type, TREE_TYPE (ctor))
6671 && known_eq (poly_offset, 0U))
6672 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6674 /* The remaining optimizations need a constant size and offset. */
6675 unsigned HOST_WIDE_INT size, offset;
6676 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6677 return NULL_TREE;
6679 /* We are at the end of walk, see if we can view convert the
6680 result. */
6681 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6682 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6683 && !compare_tree_int (TYPE_SIZE (type), size)
6684 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6686 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6687 if (ret)
6689 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6690 if (ret)
6691 STRIP_USELESS_TYPE_CONVERSION (ret);
6693 return ret;
6695 /* For constants and byte-aligned/sized reads try to go through
6696 native_encode/interpret. */
6697 if (CONSTANT_CLASS_P (ctor)
6698 && BITS_PER_UNIT == 8
6699 && offset % BITS_PER_UNIT == 0
6700 && size % BITS_PER_UNIT == 0
6701 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6703 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6704 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6705 offset / BITS_PER_UNIT);
6706 if (len > 0)
6707 return native_interpret_expr (type, buf, len);
6709 if (TREE_CODE (ctor) == CONSTRUCTOR)
6711 unsigned HOST_WIDE_INT dummy = 0;
6712 if (!suboff)
6713 suboff = &dummy;
6715 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6716 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6717 return fold_array_ctor_reference (type, ctor, offset, size,
6718 from_decl, suboff);
6720 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6721 from_decl, suboff);
6724 return NULL_TREE;
6727 /* Return the tree representing the element referenced by T if T is an
6728 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6729 names using VALUEIZE. Return NULL_TREE otherwise. */
6731 tree
6732 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6734 tree ctor, idx, base;
6735 poly_int64 offset, size, max_size;
6736 tree tem;
6737 bool reverse;
6739 if (TREE_THIS_VOLATILE (t))
6740 return NULL_TREE;
6742 if (DECL_P (t))
6743 return get_symbol_constant_value (t);
6745 tem = fold_read_from_constant_string (t);
6746 if (tem)
6747 return tem;
6749 switch (TREE_CODE (t))
6751 case ARRAY_REF:
6752 case ARRAY_RANGE_REF:
6753 /* Constant indexes are handled well by get_base_constructor.
6754 Only special case variable offsets.
6755 FIXME: This code can't handle nested references with variable indexes
6756 (they will be handled only by iteration of ccp). Perhaps we can bring
6757 get_ref_base_and_extent here and make it use a valueize callback. */
6758 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6759 && valueize
6760 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6761 && poly_int_tree_p (idx))
6763 tree low_bound, unit_size;
6765 /* If the resulting bit-offset is constant, track it. */
6766 if ((low_bound = array_ref_low_bound (t),
6767 poly_int_tree_p (low_bound))
6768 && (unit_size = array_ref_element_size (t),
6769 tree_fits_uhwi_p (unit_size)))
6771 poly_offset_int woffset
6772 = wi::sext (wi::to_poly_offset (idx)
6773 - wi::to_poly_offset (low_bound),
6774 TYPE_PRECISION (TREE_TYPE (idx)));
6776 if (woffset.to_shwi (&offset))
6778 /* TODO: This code seems wrong, multiply then check
6779 to see if it fits. */
6780 offset *= tree_to_uhwi (unit_size);
6781 offset *= BITS_PER_UNIT;
6783 base = TREE_OPERAND (t, 0);
6784 ctor = get_base_constructor (base, &offset, valueize);
6785 /* Empty constructor. Always fold to 0. */
6786 if (ctor == error_mark_node)
6787 return build_zero_cst (TREE_TYPE (t));
6788 /* Out of bound array access. Value is undefined,
6789 but don't fold. */
6790 if (maybe_lt (offset, 0))
6791 return NULL_TREE;
6792 /* We can not determine ctor. */
6793 if (!ctor)
6794 return NULL_TREE;
6795 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6796 tree_to_uhwi (unit_size)
6797 * BITS_PER_UNIT,
6798 base);
6802 /* Fallthru. */
6804 case COMPONENT_REF:
6805 case BIT_FIELD_REF:
6806 case TARGET_MEM_REF:
6807 case MEM_REF:
6808 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6809 ctor = get_base_constructor (base, &offset, valueize);
6811 /* Empty constructor. Always fold to 0. */
6812 if (ctor == error_mark_node)
6813 return build_zero_cst (TREE_TYPE (t));
6814 /* We do not know precise address. */
6815 if (!known_size_p (max_size) || maybe_ne (max_size, size))
6816 return NULL_TREE;
6817 /* We can not determine ctor. */
6818 if (!ctor)
6819 return NULL_TREE;
6821 /* Out of bound array access. Value is undefined, but don't fold. */
6822 if (maybe_lt (offset, 0))
6823 return NULL_TREE;
6825 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6826 base);
6828 case REALPART_EXPR:
6829 case IMAGPART_EXPR:
6831 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6832 if (c && TREE_CODE (c) == COMPLEX_CST)
6833 return fold_build1_loc (EXPR_LOCATION (t),
6834 TREE_CODE (t), TREE_TYPE (t), c);
6835 break;
6838 default:
6839 break;
6842 return NULL_TREE;
6845 tree
6846 fold_const_aggregate_ref (tree t)
6848 return fold_const_aggregate_ref_1 (t, NULL);
6851 /* Lookup virtual method with index TOKEN in a virtual table V
6852 at OFFSET.
6853 Set CAN_REFER if non-NULL to false if method
6854 is not referable or if the virtual table is ill-formed (such as rewriten
6855 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6857 tree
6858 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6859 tree v,
6860 unsigned HOST_WIDE_INT offset,
6861 bool *can_refer)
6863 tree vtable = v, init, fn;
6864 unsigned HOST_WIDE_INT size;
6865 unsigned HOST_WIDE_INT elt_size, access_index;
6866 tree domain_type;
6868 if (can_refer)
6869 *can_refer = true;
6871 /* First of all double check we have virtual table. */
6872 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6874 /* Pass down that we lost track of the target. */
6875 if (can_refer)
6876 *can_refer = false;
6877 return NULL_TREE;
6880 init = ctor_for_folding (v);
6882 /* The virtual tables should always be born with constructors
6883 and we always should assume that they are avaialble for
6884 folding. At the moment we do not stream them in all cases,
6885 but it should never happen that ctor seem unreachable. */
6886 gcc_assert (init);
6887 if (init == error_mark_node)
6889 /* Pass down that we lost track of the target. */
6890 if (can_refer)
6891 *can_refer = false;
6892 return NULL_TREE;
6894 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6895 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6896 offset *= BITS_PER_UNIT;
6897 offset += token * size;
6899 /* Lookup the value in the constructor that is assumed to be array.
6900 This is equivalent to
6901 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6902 offset, size, NULL);
6903 but in a constant time. We expect that frontend produced a simple
6904 array without indexed initializers. */
6906 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6907 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6908 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6909 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6911 access_index = offset / BITS_PER_UNIT / elt_size;
6912 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6914 /* This code makes an assumption that there are no
6915 indexed fileds produced by C++ FE, so we can directly index the array. */
6916 if (access_index < CONSTRUCTOR_NELTS (init))
6918 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6919 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6920 STRIP_NOPS (fn);
6922 else
6923 fn = NULL;
6925 /* For type inconsistent program we may end up looking up virtual method
6926 in virtual table that does not contain TOKEN entries. We may overrun
6927 the virtual table and pick up a constant or RTTI info pointer.
6928 In any case the call is undefined. */
6929 if (!fn
6930 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6931 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6932 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6933 else
6935 fn = TREE_OPERAND (fn, 0);
6937 /* When cgraph node is missing and function is not public, we cannot
6938 devirtualize. This can happen in WHOPR when the actual method
6939 ends up in other partition, because we found devirtualization
6940 possibility too late. */
6941 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6943 if (can_refer)
6945 *can_refer = false;
6946 return fn;
6948 return NULL_TREE;
6952 /* Make sure we create a cgraph node for functions we'll reference.
6953 They can be non-existent if the reference comes from an entry
6954 of an external vtable for example. */
6955 cgraph_node::get_create (fn);
6957 return fn;
6960 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6961 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6962 KNOWN_BINFO carries the binfo describing the true type of
6963 OBJ_TYPE_REF_OBJECT(REF).
6964 Set CAN_REFER if non-NULL to false if method
6965 is not referable or if the virtual table is ill-formed (such as rewriten
6966 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6968 tree
6969 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6970 bool *can_refer)
6972 unsigned HOST_WIDE_INT offset;
6973 tree v;
6975 v = BINFO_VTABLE (known_binfo);
6976 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6977 if (!v)
6978 return NULL_TREE;
6980 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6982 if (can_refer)
6983 *can_refer = false;
6984 return NULL_TREE;
6986 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6989 /* Given a pointer value T, return a simplified version of an
6990 indirection through T, or NULL_TREE if no simplification is
6991 possible. Note that the resulting type may be different from
6992 the type pointed to in the sense that it is still compatible
6993 from the langhooks point of view. */
6995 tree
6996 gimple_fold_indirect_ref (tree t)
6998 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6999 tree sub = t;
7000 tree subtype;
7002 STRIP_NOPS (sub);
7003 subtype = TREE_TYPE (sub);
7004 if (!POINTER_TYPE_P (subtype)
7005 || TYPE_REF_CAN_ALIAS_ALL (ptype))
7006 return NULL_TREE;
7008 if (TREE_CODE (sub) == ADDR_EXPR)
7010 tree op = TREE_OPERAND (sub, 0);
7011 tree optype = TREE_TYPE (op);
7012 /* *&p => p */
7013 if (useless_type_conversion_p (type, optype))
7014 return op;
7016 /* *(foo *)&fooarray => fooarray[0] */
7017 if (TREE_CODE (optype) == ARRAY_TYPE
7018 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7019 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7021 tree type_domain = TYPE_DOMAIN (optype);
7022 tree min_val = size_zero_node;
7023 if (type_domain && TYPE_MIN_VALUE (type_domain))
7024 min_val = TYPE_MIN_VALUE (type_domain);
7025 if (TREE_CODE (min_val) == INTEGER_CST)
7026 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7028 /* *(foo *)&complexfoo => __real__ complexfoo */
7029 else if (TREE_CODE (optype) == COMPLEX_TYPE
7030 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7031 return fold_build1 (REALPART_EXPR, type, op);
7032 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7033 else if (TREE_CODE (optype) == VECTOR_TYPE
7034 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7036 tree part_width = TYPE_SIZE (type);
7037 tree index = bitsize_int (0);
7038 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7042 /* *(p + CST) -> ... */
7043 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7044 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7046 tree addr = TREE_OPERAND (sub, 0);
7047 tree off = TREE_OPERAND (sub, 1);
7048 tree addrtype;
7050 STRIP_NOPS (addr);
7051 addrtype = TREE_TYPE (addr);
7053 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7054 if (TREE_CODE (addr) == ADDR_EXPR
7055 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7056 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7057 && tree_fits_uhwi_p (off))
7059 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7060 tree part_width = TYPE_SIZE (type);
7061 unsigned HOST_WIDE_INT part_widthi
7062 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7063 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7064 tree index = bitsize_int (indexi);
7065 if (known_lt (offset / part_widthi,
7066 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7067 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7068 part_width, index);
7071 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7072 if (TREE_CODE (addr) == ADDR_EXPR
7073 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7074 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7076 tree size = TYPE_SIZE_UNIT (type);
7077 if (tree_int_cst_equal (size, off))
7078 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7081 /* *(p + CST) -> MEM_REF <p, CST>. */
7082 if (TREE_CODE (addr) != ADDR_EXPR
7083 || DECL_P (TREE_OPERAND (addr, 0)))
7084 return fold_build2 (MEM_REF, type,
7085 addr,
7086 wide_int_to_tree (ptype, wi::to_wide (off)));
7089 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7090 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7091 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7092 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7094 tree type_domain;
7095 tree min_val = size_zero_node;
7096 tree osub = sub;
7097 sub = gimple_fold_indirect_ref (sub);
7098 if (! sub)
7099 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7100 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7101 if (type_domain && TYPE_MIN_VALUE (type_domain))
7102 min_val = TYPE_MIN_VALUE (type_domain);
7103 if (TREE_CODE (min_val) == INTEGER_CST)
7104 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7107 return NULL_TREE;
7110 /* Return true if CODE is an operation that when operating on signed
7111 integer types involves undefined behavior on overflow and the
7112 operation can be expressed with unsigned arithmetic. */
7114 bool
7115 arith_code_with_undefined_signed_overflow (tree_code code)
7117 switch (code)
7119 case PLUS_EXPR:
7120 case MINUS_EXPR:
7121 case MULT_EXPR:
7122 case NEGATE_EXPR:
7123 case POINTER_PLUS_EXPR:
7124 return true;
7125 default:
7126 return false;
7130 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7131 operation that can be transformed to unsigned arithmetic by converting
7132 its operand, carrying out the operation in the corresponding unsigned
7133 type and converting the result back to the original type.
7135 Returns a sequence of statements that replace STMT and also contain
7136 a modified form of STMT itself. */
7138 gimple_seq
7139 rewrite_to_defined_overflow (gimple *stmt)
7141 if (dump_file && (dump_flags & TDF_DETAILS))
7143 fprintf (dump_file, "rewriting stmt with undefined signed "
7144 "overflow ");
7145 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7148 tree lhs = gimple_assign_lhs (stmt);
7149 tree type = unsigned_type_for (TREE_TYPE (lhs));
7150 gimple_seq stmts = NULL;
7151 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7153 tree op = gimple_op (stmt, i);
7154 op = gimple_convert (&stmts, type, op);
7155 gimple_set_op (stmt, i, op);
7157 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7158 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7159 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7160 gimple_seq_add_stmt (&stmts, stmt);
7161 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7162 gimple_seq_add_stmt (&stmts, cvt);
7164 return stmts;
7168 /* The valueization hook we use for the gimple_build API simplification.
7169 This makes us match fold_buildN behavior by only combining with
7170 statements in the sequence(s) we are currently building. */
7172 static tree
7173 gimple_build_valueize (tree op)
7175 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7176 return op;
7177 return NULL_TREE;
7180 /* Build the expression CODE OP0 of type TYPE with location LOC,
7181 simplifying it first if possible. Returns the built
7182 expression value and appends statements possibly defining it
7183 to SEQ. */
7185 tree
7186 gimple_build (gimple_seq *seq, location_t loc,
7187 enum tree_code code, tree type, tree op0)
7189 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7190 if (!res)
7192 res = create_tmp_reg_or_ssa_name (type);
7193 gimple *stmt;
7194 if (code == REALPART_EXPR
7195 || code == IMAGPART_EXPR
7196 || code == VIEW_CONVERT_EXPR)
7197 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7198 else
7199 stmt = gimple_build_assign (res, code, op0);
7200 gimple_set_location (stmt, loc);
7201 gimple_seq_add_stmt_without_update (seq, stmt);
7203 return res;
7206 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7207 simplifying it first if possible. Returns the built
7208 expression value and appends statements possibly defining it
7209 to SEQ. */
7211 tree
7212 gimple_build (gimple_seq *seq, location_t loc,
7213 enum tree_code code, tree type, tree op0, tree op1)
7215 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7216 if (!res)
7218 res = create_tmp_reg_or_ssa_name (type);
7219 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7220 gimple_set_location (stmt, loc);
7221 gimple_seq_add_stmt_without_update (seq, stmt);
7223 return res;
7226 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7227 simplifying it first if possible. Returns the built
7228 expression value and appends statements possibly defining it
7229 to SEQ. */
7231 tree
7232 gimple_build (gimple_seq *seq, location_t loc,
7233 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7235 tree res = gimple_simplify (code, type, op0, op1, op2,
7236 seq, gimple_build_valueize);
7237 if (!res)
7239 res = create_tmp_reg_or_ssa_name (type);
7240 gimple *stmt;
7241 if (code == BIT_FIELD_REF)
7242 stmt = gimple_build_assign (res, code,
7243 build3 (code, type, op0, op1, op2));
7244 else
7245 stmt = gimple_build_assign (res, code, op0, op1, op2);
7246 gimple_set_location (stmt, loc);
7247 gimple_seq_add_stmt_without_update (seq, stmt);
7249 return res;
7252 /* Build the call FN (ARG0) with a result of type TYPE
7253 (or no result if TYPE is void) with location LOC,
7254 simplifying it first if possible. Returns the built
7255 expression value (or NULL_TREE if TYPE is void) and appends
7256 statements possibly defining it to SEQ. */
7258 tree
7259 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7260 tree type, tree arg0)
7262 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7263 if (!res)
7265 gcall *stmt;
7266 if (internal_fn_p (fn))
7267 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7268 else
7270 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7271 stmt = gimple_build_call (decl, 1, arg0);
7273 if (!VOID_TYPE_P (type))
7275 res = create_tmp_reg_or_ssa_name (type);
7276 gimple_call_set_lhs (stmt, res);
7278 gimple_set_location (stmt, loc);
7279 gimple_seq_add_stmt_without_update (seq, stmt);
7281 return res;
7284 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7285 (or no result if TYPE is void) with location LOC,
7286 simplifying it first if possible. Returns the built
7287 expression value (or NULL_TREE if TYPE is void) and appends
7288 statements possibly defining it to SEQ. */
7290 tree
7291 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7292 tree type, tree arg0, tree arg1)
7294 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7295 if (!res)
7297 gcall *stmt;
7298 if (internal_fn_p (fn))
7299 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7300 else
7302 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7303 stmt = gimple_build_call (decl, 2, arg0, arg1);
7305 if (!VOID_TYPE_P (type))
7307 res = create_tmp_reg_or_ssa_name (type);
7308 gimple_call_set_lhs (stmt, res);
7310 gimple_set_location (stmt, loc);
7311 gimple_seq_add_stmt_without_update (seq, stmt);
7313 return res;
7316 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7317 (or no result if TYPE is void) with location LOC,
7318 simplifying it first if possible. Returns the built
7319 expression value (or NULL_TREE if TYPE is void) and appends
7320 statements possibly defining it to SEQ. */
7322 tree
7323 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7324 tree type, tree arg0, tree arg1, tree arg2)
7326 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7327 seq, gimple_build_valueize);
7328 if (!res)
7330 gcall *stmt;
7331 if (internal_fn_p (fn))
7332 stmt = gimple_build_call_internal (as_internal_fn (fn),
7333 3, arg0, arg1, arg2);
7334 else
7336 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7337 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7339 if (!VOID_TYPE_P (type))
7341 res = create_tmp_reg_or_ssa_name (type);
7342 gimple_call_set_lhs (stmt, res);
7344 gimple_set_location (stmt, loc);
7345 gimple_seq_add_stmt_without_update (seq, stmt);
7347 return res;
7350 /* Build the conversion (TYPE) OP with a result of type TYPE
7351 with location LOC if such conversion is neccesary in GIMPLE,
7352 simplifying it first.
7353 Returns the built expression value and appends
7354 statements possibly defining it to SEQ. */
7356 tree
7357 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7359 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7360 return op;
7361 return gimple_build (seq, loc, NOP_EXPR, type, op);
7364 /* Build the conversion (ptrofftype) OP with a result of a type
7365 compatible with ptrofftype with location LOC if such conversion
7366 is neccesary in GIMPLE, simplifying it first.
7367 Returns the built expression value and appends
7368 statements possibly defining it to SEQ. */
7370 tree
7371 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7373 if (ptrofftype_p (TREE_TYPE (op)))
7374 return op;
7375 return gimple_convert (seq, loc, sizetype, op);
7378 /* Build a vector of type TYPE in which each element has the value OP.
7379 Return a gimple value for the result, appending any new statements
7380 to SEQ. */
7382 tree
7383 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7384 tree op)
7386 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7387 && !CONSTANT_CLASS_P (op))
7388 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7390 tree res, vec = build_vector_from_val (type, op);
7391 if (is_gimple_val (vec))
7392 return vec;
7393 if (gimple_in_ssa_p (cfun))
7394 res = make_ssa_name (type);
7395 else
7396 res = create_tmp_reg (type);
7397 gimple *stmt = gimple_build_assign (res, vec);
7398 gimple_set_location (stmt, loc);
7399 gimple_seq_add_stmt_without_update (seq, stmt);
7400 return res;
7403 /* Build a vector from BUILDER, handling the case in which some elements
7404 are non-constant. Return a gimple value for the result, appending any
7405 new instructions to SEQ.
7407 BUILDER must not have a stepped encoding on entry. This is because
7408 the function is not geared up to handle the arithmetic that would
7409 be needed in the variable case, and any code building a vector that
7410 is known to be constant should use BUILDER->build () directly. */
7412 tree
7413 gimple_build_vector (gimple_seq *seq, location_t loc,
7414 tree_vector_builder *builder)
7416 gcc_assert (builder->nelts_per_pattern () <= 2);
7417 unsigned int encoded_nelts = builder->encoded_nelts ();
7418 for (unsigned int i = 0; i < encoded_nelts; ++i)
7419 if (!TREE_CONSTANT ((*builder)[i]))
7421 tree type = builder->type ();
7422 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7423 vec<constructor_elt, va_gc> *v;
7424 vec_alloc (v, nelts);
7425 for (i = 0; i < nelts; ++i)
7426 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7428 tree res;
7429 if (gimple_in_ssa_p (cfun))
7430 res = make_ssa_name (type);
7431 else
7432 res = create_tmp_reg (type);
7433 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7434 gimple_set_location (stmt, loc);
7435 gimple_seq_add_stmt_without_update (seq, stmt);
7436 return res;
7438 return builder->build ();
7441 /* Return true if the result of assignment STMT is known to be non-negative.
7442 If the return value is based on the assumption that signed overflow is
7443 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7444 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7446 static bool
7447 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7448 int depth)
7450 enum tree_code code = gimple_assign_rhs_code (stmt);
7451 switch (get_gimple_rhs_class (code))
7453 case GIMPLE_UNARY_RHS:
7454 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7455 gimple_expr_type (stmt),
7456 gimple_assign_rhs1 (stmt),
7457 strict_overflow_p, depth);
7458 case GIMPLE_BINARY_RHS:
7459 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7460 gimple_expr_type (stmt),
7461 gimple_assign_rhs1 (stmt),
7462 gimple_assign_rhs2 (stmt),
7463 strict_overflow_p, depth);
7464 case GIMPLE_TERNARY_RHS:
7465 return false;
7466 case GIMPLE_SINGLE_RHS:
7467 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7468 strict_overflow_p, depth);
7469 case GIMPLE_INVALID_RHS:
7470 break;
7472 gcc_unreachable ();
7475 /* Return true if return value of call STMT is known to be non-negative.
7476 If the return value is based on the assumption that signed overflow is
7477 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7478 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7480 static bool
7481 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7482 int depth)
7484 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7485 gimple_call_arg (stmt, 0) : NULL_TREE;
7486 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7487 gimple_call_arg (stmt, 1) : NULL_TREE;
7489 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7490 gimple_call_combined_fn (stmt),
7491 arg0,
7492 arg1,
7493 strict_overflow_p, depth);
7496 /* Return true if return value of call STMT is known to be non-negative.
7497 If the return value is based on the assumption that signed overflow is
7498 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7499 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7501 static bool
7502 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7503 int depth)
7505 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7507 tree arg = gimple_phi_arg_def (stmt, i);
7508 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7509 return false;
7511 return true;
7514 /* Return true if STMT is known to compute a non-negative value.
7515 If the return value is based on the assumption that signed overflow is
7516 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7517 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7519 bool
7520 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7521 int depth)
7523 switch (gimple_code (stmt))
7525 case GIMPLE_ASSIGN:
7526 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7527 depth);
7528 case GIMPLE_CALL:
7529 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7530 depth);
7531 case GIMPLE_PHI:
7532 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7533 depth);
7534 default:
7535 return false;
7539 /* Return true if the floating-point value computed by assignment STMT
7540 is known to have an integer value. We also allow +Inf, -Inf and NaN
7541 to be considered integer values. Return false for signaling NaN.
7543 DEPTH is the current nesting depth of the query. */
7545 static bool
7546 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7548 enum tree_code code = gimple_assign_rhs_code (stmt);
7549 switch (get_gimple_rhs_class (code))
7551 case GIMPLE_UNARY_RHS:
7552 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7553 gimple_assign_rhs1 (stmt), depth);
7554 case GIMPLE_BINARY_RHS:
7555 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7556 gimple_assign_rhs1 (stmt),
7557 gimple_assign_rhs2 (stmt), depth);
7558 case GIMPLE_TERNARY_RHS:
7559 return false;
7560 case GIMPLE_SINGLE_RHS:
7561 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7562 case GIMPLE_INVALID_RHS:
7563 break;
7565 gcc_unreachable ();
7568 /* Return true if the floating-point value computed by call STMT is known
7569 to have an integer value. We also allow +Inf, -Inf and NaN to be
7570 considered integer values. Return false for signaling NaN.
7572 DEPTH is the current nesting depth of the query. */
7574 static bool
7575 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7577 tree arg0 = (gimple_call_num_args (stmt) > 0
7578 ? gimple_call_arg (stmt, 0)
7579 : NULL_TREE);
7580 tree arg1 = (gimple_call_num_args (stmt) > 1
7581 ? gimple_call_arg (stmt, 1)
7582 : NULL_TREE);
7583 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7584 arg0, arg1, depth);
7587 /* Return true if the floating-point result of phi STMT is known to have
7588 an integer value. We also allow +Inf, -Inf and NaN to be considered
7589 integer values. Return false for signaling NaN.
7591 DEPTH is the current nesting depth of the query. */
7593 static bool
7594 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7596 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7598 tree arg = gimple_phi_arg_def (stmt, i);
7599 if (!integer_valued_real_single_p (arg, depth + 1))
7600 return false;
7602 return true;
7605 /* Return true if the floating-point value computed by STMT is known
7606 to have an integer value. We also allow +Inf, -Inf and NaN to be
7607 considered integer values. Return false for signaling NaN.
7609 DEPTH is the current nesting depth of the query. */
7611 bool
7612 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7614 switch (gimple_code (stmt))
7616 case GIMPLE_ASSIGN:
7617 return gimple_assign_integer_valued_real_p (stmt, depth);
7618 case GIMPLE_CALL:
7619 return gimple_call_integer_valued_real_p (stmt, depth);
7620 case GIMPLE_PHI:
7621 return gimple_phi_integer_valued_real_p (stmt, depth);
7622 default:
7623 return false;