/cp
[official-gcc.git] / gcc / gimple-fold.c
blob506a296732c52e26d045b4ba2743231a331dccca
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
69 /* Return true when DECL can be referenced from current unit.
70 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
71 We can get declarations that are not possible to reference for various
72 reasons:
74 1) When analyzing C++ virtual tables.
75 C++ virtual tables do have known constructors even
76 when they are keyed to other compilation unit.
77 Those tables can contain pointers to methods and vars
78 in other units. Those methods have both STATIC and EXTERNAL
79 set.
80 2) In WHOPR mode devirtualization might lead to reference
81 to method that was partitioned elsehwere.
82 In this case we have static VAR_DECL or FUNCTION_DECL
83 that has no corresponding callgraph/varpool node
84 declaring the body.
85 3) COMDAT functions referred by external vtables that
86 we devirtualize only during final compilation stage.
87 At this time we already decided that we will not output
88 the function body and thus we can't reference the symbol
89 directly. */
91 static bool
92 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
94 varpool_node *vnode;
95 struct cgraph_node *node;
96 symtab_node *snode;
98 if (DECL_ABSTRACT_P (decl))
99 return false;
101 /* We are concerned only about static/external vars and functions. */
102 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
103 || !VAR_OR_FUNCTION_DECL_P (decl))
104 return true;
106 /* Static objects can be referred only if they was not optimized out yet. */
107 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
109 /* Before we start optimizing unreachable code we can be sure all
110 static objects are defined. */
111 if (symtab->function_flags_ready)
112 return true;
113 snode = symtab_node::get (decl);
114 if (!snode || !snode->definition)
115 return false;
116 node = dyn_cast <cgraph_node *> (snode);
117 return !node || !node->global.inlined_to;
120 /* We will later output the initializer, so we can refer to it.
121 So we are concerned only when DECL comes from initializer of
122 external var or var that has been optimized out. */
123 if (!from_decl
124 || !VAR_P (from_decl)
125 || (!DECL_EXTERNAL (from_decl)
126 && (vnode = varpool_node::get (from_decl)) != NULL
127 && vnode->definition)
128 || (flag_ltrans
129 && (vnode = varpool_node::get (from_decl)) != NULL
130 && vnode->in_other_partition))
131 return true;
132 /* We are folding reference from external vtable. The vtable may reffer
133 to a symbol keyed to other compilation unit. The other compilation
134 unit may be in separate DSO and the symbol may be hidden. */
135 if (DECL_VISIBILITY_SPECIFIED (decl)
136 && DECL_EXTERNAL (decl)
137 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
138 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
139 return false;
140 /* When function is public, we always can introduce new reference.
141 Exception are the COMDAT functions where introducing a direct
142 reference imply need to include function body in the curren tunit. */
143 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
144 return true;
145 /* We have COMDAT. We are going to check if we still have definition
146 or if the definition is going to be output in other partition.
147 Bypass this when gimplifying; all needed functions will be produced.
149 As observed in PR20991 for already optimized out comdat virtual functions
150 it may be tempting to not necessarily give up because the copy will be
151 output elsewhere when corresponding vtable is output.
152 This is however not possible - ABI specify that COMDATs are output in
153 units where they are used and when the other unit was compiled with LTO
154 it is possible that vtable was kept public while the function itself
155 was privatized. */
156 if (!symtab->function_flags_ready)
157 return true;
159 snode = symtab_node::get (decl);
160 if (!snode
161 || ((!snode->definition || DECL_EXTERNAL (decl))
162 && (!snode->in_other_partition
163 || (!snode->forced_by_abi && !snode->force_output))))
164 return false;
165 node = dyn_cast <cgraph_node *> (snode);
166 return !node || !node->global.inlined_to;
169 /* Create a temporary for TYPE for a statement STMT. If the current function
170 is in SSA form, a SSA name is created. Otherwise a temporary register
171 is made. */
173 tree
174 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
176 if (gimple_in_ssa_p (cfun))
177 return make_ssa_name (type, stmt);
178 else
179 return create_tmp_reg (type);
182 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
183 acceptable form for is_gimple_min_invariant.
184 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
186 tree
187 canonicalize_constructor_val (tree cval, tree from_decl)
189 tree orig_cval = cval;
190 STRIP_NOPS (cval);
191 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
192 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
194 tree ptr = TREE_OPERAND (cval, 0);
195 if (is_gimple_min_invariant (ptr))
196 cval = build1_loc (EXPR_LOCATION (cval),
197 ADDR_EXPR, TREE_TYPE (ptr),
198 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
199 ptr,
200 fold_convert (ptr_type_node,
201 TREE_OPERAND (cval, 1))));
203 if (TREE_CODE (cval) == ADDR_EXPR)
205 tree base = NULL_TREE;
206 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
208 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
209 if (base)
210 TREE_OPERAND (cval, 0) = base;
212 else
213 base = get_base_address (TREE_OPERAND (cval, 0));
214 if (!base)
215 return NULL_TREE;
217 if (VAR_OR_FUNCTION_DECL_P (base)
218 && !can_refer_decl_in_current_unit_p (base, from_decl))
219 return NULL_TREE;
220 if (TREE_TYPE (base) == error_mark_node)
221 return NULL_TREE;
222 if (VAR_P (base))
223 TREE_ADDRESSABLE (base) = 1;
224 else if (TREE_CODE (base) == FUNCTION_DECL)
226 /* Make sure we create a cgraph node for functions we'll reference.
227 They can be non-existent if the reference comes from an entry
228 of an external vtable for example. */
229 cgraph_node::get_create (base);
231 /* Fixup types in global initializers. */
232 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
233 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
235 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
236 cval = fold_convert (TREE_TYPE (orig_cval), cval);
237 return cval;
239 if (TREE_OVERFLOW_P (cval))
240 return drop_tree_overflow (cval);
241 return orig_cval;
244 /* If SYM is a constant variable with known value, return the value.
245 NULL_TREE is returned otherwise. */
247 tree
248 get_symbol_constant_value (tree sym)
250 tree val = ctor_for_folding (sym);
251 if (val != error_mark_node)
253 if (val)
255 val = canonicalize_constructor_val (unshare_expr (val), sym);
256 if (val && is_gimple_min_invariant (val))
257 return val;
258 else
259 return NULL_TREE;
261 /* Variables declared 'const' without an initializer
262 have zero as the initializer if they may not be
263 overridden at link or run time. */
264 if (!val
265 && is_gimple_reg_type (TREE_TYPE (sym)))
266 return build_zero_cst (TREE_TYPE (sym));
269 return NULL_TREE;
274 /* Subroutine of fold_stmt. We perform several simplifications of the
275 memory reference tree EXPR and make sure to re-gimplify them properly
276 after propagation of constant addresses. IS_LHS is true if the
277 reference is supposed to be an lvalue. */
279 static tree
280 maybe_fold_reference (tree expr, bool is_lhs)
282 tree result;
284 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
285 || TREE_CODE (expr) == REALPART_EXPR
286 || TREE_CODE (expr) == IMAGPART_EXPR)
287 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
288 return fold_unary_loc (EXPR_LOCATION (expr),
289 TREE_CODE (expr),
290 TREE_TYPE (expr),
291 TREE_OPERAND (expr, 0));
292 else if (TREE_CODE (expr) == BIT_FIELD_REF
293 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
294 return fold_ternary_loc (EXPR_LOCATION (expr),
295 TREE_CODE (expr),
296 TREE_TYPE (expr),
297 TREE_OPERAND (expr, 0),
298 TREE_OPERAND (expr, 1),
299 TREE_OPERAND (expr, 2));
301 if (!is_lhs
302 && (result = fold_const_aggregate_ref (expr))
303 && is_gimple_min_invariant (result))
304 return result;
306 return NULL_TREE;
310 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
311 replacement rhs for the statement or NULL_TREE if no simplification
312 could be made. It is assumed that the operands have been previously
313 folded. */
315 static tree
316 fold_gimple_assign (gimple_stmt_iterator *si)
318 gimple *stmt = gsi_stmt (*si);
319 enum tree_code subcode = gimple_assign_rhs_code (stmt);
320 location_t loc = gimple_location (stmt);
322 tree result = NULL_TREE;
324 switch (get_gimple_rhs_class (subcode))
326 case GIMPLE_SINGLE_RHS:
328 tree rhs = gimple_assign_rhs1 (stmt);
330 if (TREE_CLOBBER_P (rhs))
331 return NULL_TREE;
333 if (REFERENCE_CLASS_P (rhs))
334 return maybe_fold_reference (rhs, false);
336 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
338 tree val = OBJ_TYPE_REF_EXPR (rhs);
339 if (is_gimple_min_invariant (val))
340 return val;
341 else if (flag_devirtualize && virtual_method_call_p (rhs))
343 bool final;
344 vec <cgraph_node *>targets
345 = possible_polymorphic_call_targets (rhs, stmt, &final);
346 if (final && targets.length () <= 1 && dbg_cnt (devirt))
348 if (dump_enabled_p ())
350 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
351 "resolving virtual function address "
352 "reference to function %s\n",
353 targets.length () == 1
354 ? targets[0]->name ()
355 : "NULL");
357 if (targets.length () == 1)
359 val = fold_convert (TREE_TYPE (val),
360 build_fold_addr_expr_loc
361 (loc, targets[0]->decl));
362 STRIP_USELESS_TYPE_CONVERSION (val);
364 else
365 /* We can not use __builtin_unreachable here because it
366 can not have address taken. */
367 val = build_int_cst (TREE_TYPE (val), 0);
368 return val;
373 else if (TREE_CODE (rhs) == ADDR_EXPR)
375 tree ref = TREE_OPERAND (rhs, 0);
376 tree tem = maybe_fold_reference (ref, true);
377 if (tem
378 && TREE_CODE (tem) == MEM_REF
379 && integer_zerop (TREE_OPERAND (tem, 1)))
380 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
381 else if (tem)
382 result = fold_convert (TREE_TYPE (rhs),
383 build_fold_addr_expr_loc (loc, tem));
384 else if (TREE_CODE (ref) == MEM_REF
385 && integer_zerop (TREE_OPERAND (ref, 1)))
386 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
388 if (result)
390 /* Strip away useless type conversions. Both the
391 NON_LVALUE_EXPR that may have been added by fold, and
392 "useless" type conversions that might now be apparent
393 due to propagation. */
394 STRIP_USELESS_TYPE_CONVERSION (result);
396 if (result != rhs && valid_gimple_rhs_p (result))
397 return result;
401 else if (TREE_CODE (rhs) == CONSTRUCTOR
402 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
404 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
405 unsigned i;
406 tree val;
408 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
409 if (! CONSTANT_CLASS_P (val))
410 return NULL_TREE;
412 return build_vector_from_ctor (TREE_TYPE (rhs),
413 CONSTRUCTOR_ELTS (rhs));
416 else if (DECL_P (rhs))
417 return get_symbol_constant_value (rhs);
419 break;
421 case GIMPLE_UNARY_RHS:
422 break;
424 case GIMPLE_BINARY_RHS:
425 break;
427 case GIMPLE_TERNARY_RHS:
428 result = fold_ternary_loc (loc, subcode,
429 TREE_TYPE (gimple_assign_lhs (stmt)),
430 gimple_assign_rhs1 (stmt),
431 gimple_assign_rhs2 (stmt),
432 gimple_assign_rhs3 (stmt));
434 if (result)
436 STRIP_USELESS_TYPE_CONVERSION (result);
437 if (valid_gimple_rhs_p (result))
438 return result;
440 break;
442 case GIMPLE_INVALID_RHS:
443 gcc_unreachable ();
446 return NULL_TREE;
450 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
451 adjusting the replacement stmts location and virtual operands.
452 If the statement has a lhs the last stmt in the sequence is expected
453 to assign to that lhs. */
455 static void
456 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
458 gimple *stmt = gsi_stmt (*si_p);
460 if (gimple_has_location (stmt))
461 annotate_all_with_location (stmts, gimple_location (stmt));
463 /* First iterate over the replacement statements backward, assigning
464 virtual operands to their defining statements. */
465 gimple *laststore = NULL;
466 for (gimple_stmt_iterator i = gsi_last (stmts);
467 !gsi_end_p (i); gsi_prev (&i))
469 gimple *new_stmt = gsi_stmt (i);
470 if ((gimple_assign_single_p (new_stmt)
471 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
472 || (is_gimple_call (new_stmt)
473 && (gimple_call_flags (new_stmt)
474 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
476 tree vdef;
477 if (!laststore)
478 vdef = gimple_vdef (stmt);
479 else
480 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
481 gimple_set_vdef (new_stmt, vdef);
482 if (vdef && TREE_CODE (vdef) == SSA_NAME)
483 SSA_NAME_DEF_STMT (vdef) = new_stmt;
484 laststore = new_stmt;
488 /* Second iterate over the statements forward, assigning virtual
489 operands to their uses. */
490 tree reaching_vuse = gimple_vuse (stmt);
491 for (gimple_stmt_iterator i = gsi_start (stmts);
492 !gsi_end_p (i); gsi_next (&i))
494 gimple *new_stmt = gsi_stmt (i);
495 /* If the new statement possibly has a VUSE, update it with exact SSA
496 name we know will reach this one. */
497 if (gimple_has_mem_ops (new_stmt))
498 gimple_set_vuse (new_stmt, reaching_vuse);
499 gimple_set_modified (new_stmt, true);
500 if (gimple_vdef (new_stmt))
501 reaching_vuse = gimple_vdef (new_stmt);
504 /* If the new sequence does not do a store release the virtual
505 definition of the original statement. */
506 if (reaching_vuse
507 && reaching_vuse == gimple_vuse (stmt))
509 tree vdef = gimple_vdef (stmt);
510 if (vdef
511 && TREE_CODE (vdef) == SSA_NAME)
513 unlink_stmt_vdef (stmt);
514 release_ssa_name (vdef);
518 /* Finally replace the original statement with the sequence. */
519 gsi_replace_with_seq (si_p, stmts, false);
522 /* Convert EXPR into a GIMPLE value suitable for substitution on the
523 RHS of an assignment. Insert the necessary statements before
524 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
525 is replaced. If the call is expected to produces a result, then it
526 is replaced by an assignment of the new RHS to the result variable.
527 If the result is to be ignored, then the call is replaced by a
528 GIMPLE_NOP. A proper VDEF chain is retained by making the first
529 VUSE and the last VDEF of the whole sequence be the same as the replaced
530 statement and using new SSA names for stores in between. */
532 void
533 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
535 tree lhs;
536 gimple *stmt, *new_stmt;
537 gimple_stmt_iterator i;
538 gimple_seq stmts = NULL;
540 stmt = gsi_stmt (*si_p);
542 gcc_assert (is_gimple_call (stmt));
544 push_gimplify_context (gimple_in_ssa_p (cfun));
546 lhs = gimple_call_lhs (stmt);
547 if (lhs == NULL_TREE)
549 gimplify_and_add (expr, &stmts);
550 /* We can end up with folding a memcpy of an empty class assignment
551 which gets optimized away by C++ gimplification. */
552 if (gimple_seq_empty_p (stmts))
554 pop_gimplify_context (NULL);
555 if (gimple_in_ssa_p (cfun))
557 unlink_stmt_vdef (stmt);
558 release_defs (stmt);
560 gsi_replace (si_p, gimple_build_nop (), false);
561 return;
564 else
566 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
567 new_stmt = gimple_build_assign (lhs, tmp);
568 i = gsi_last (stmts);
569 gsi_insert_after_without_update (&i, new_stmt,
570 GSI_CONTINUE_LINKING);
573 pop_gimplify_context (NULL);
575 gsi_replace_with_seq_vops (si_p, stmts);
579 /* Replace the call at *GSI with the gimple value VAL. */
581 void
582 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
584 gimple *stmt = gsi_stmt (*gsi);
585 tree lhs = gimple_call_lhs (stmt);
586 gimple *repl;
587 if (lhs)
589 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
590 val = fold_convert (TREE_TYPE (lhs), val);
591 repl = gimple_build_assign (lhs, val);
593 else
594 repl = gimple_build_nop ();
595 tree vdef = gimple_vdef (stmt);
596 if (vdef && TREE_CODE (vdef) == SSA_NAME)
598 unlink_stmt_vdef (stmt);
599 release_ssa_name (vdef);
601 gsi_replace (gsi, repl, false);
604 /* Replace the call at *GSI with the new call REPL and fold that
605 again. */
607 static void
608 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
610 gimple *stmt = gsi_stmt (*gsi);
611 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
612 gimple_set_location (repl, gimple_location (stmt));
613 if (gimple_vdef (stmt)
614 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
616 gimple_set_vdef (repl, gimple_vdef (stmt));
617 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
619 if (gimple_vuse (stmt))
620 gimple_set_vuse (repl, gimple_vuse (stmt));
621 gsi_replace (gsi, repl, false);
622 fold_stmt (gsi);
625 /* Return true if VAR is a VAR_DECL or a component thereof. */
627 static bool
628 var_decl_component_p (tree var)
630 tree inner = var;
631 while (handled_component_p (inner))
632 inner = TREE_OPERAND (inner, 0);
633 return (DECL_P (inner)
634 || (TREE_CODE (inner) == MEM_REF
635 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
638 /* If the SIZE argument representing the size of an object is in a range
639 of values of which exactly one is valid (and that is zero), return
640 true, otherwise false. */
642 static bool
643 size_must_be_zero_p (tree size)
645 if (integer_zerop (size))
646 return true;
648 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
649 return false;
651 wide_int min, max;
652 enum value_range_type rtype = get_range_info (size, &min, &max);
653 if (rtype != VR_ANTI_RANGE)
654 return false;
656 tree type = TREE_TYPE (size);
657 int prec = TYPE_PRECISION (type);
659 wide_int wone = wi::one (prec);
661 /* Compute the value of SSIZE_MAX, the largest positive value that
662 can be stored in ssize_t, the signed counterpart of size_t. */
663 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
665 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
668 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
669 diagnose (otherwise undefined) overlapping copies without preventing
670 folding. When folded, GCC guarantees that overlapping memcpy has
671 the same semantics as memmove. Call to the library memcpy need not
672 provide the same guarantee. Return false if no simplification can
673 be made. */
675 static bool
676 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
677 tree dest, tree src, int endp)
679 gimple *stmt = gsi_stmt (*gsi);
680 tree lhs = gimple_call_lhs (stmt);
681 tree len = gimple_call_arg (stmt, 2);
682 tree destvar, srcvar;
683 location_t loc = gimple_location (stmt);
685 bool nowarn = gimple_no_warning_p (stmt);
687 /* If the LEN parameter is a constant zero or in range where
688 the only valid value is zero, return DEST. */
689 if (size_must_be_zero_p (len))
691 gimple *repl;
692 if (gimple_call_lhs (stmt))
693 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
694 else
695 repl = gimple_build_nop ();
696 tree vdef = gimple_vdef (stmt);
697 if (vdef && TREE_CODE (vdef) == SSA_NAME)
699 unlink_stmt_vdef (stmt);
700 release_ssa_name (vdef);
702 gsi_replace (gsi, repl, false);
703 return true;
706 /* If SRC and DEST are the same (and not volatile), return
707 DEST{,+LEN,+LEN-1}. */
708 if (operand_equal_p (src, dest, 0))
710 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
711 It's safe and may even be emitted by GCC itself (see bug
712 32667). */
713 unlink_stmt_vdef (stmt);
714 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
715 release_ssa_name (gimple_vdef (stmt));
716 if (!lhs)
718 gsi_replace (gsi, gimple_build_nop (), false);
719 return true;
721 goto done;
723 else
725 tree srctype, desttype;
726 unsigned int src_align, dest_align;
727 tree off0;
729 /* Build accesses at offset zero with a ref-all character type. */
730 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
731 ptr_mode, true), 0);
733 /* If we can perform the copy efficiently with first doing all loads
734 and then all stores inline it that way. Currently efficiently
735 means that we can load all the memory into a single integer
736 register which is what MOVE_MAX gives us. */
737 src_align = get_pointer_alignment (src);
738 dest_align = get_pointer_alignment (dest);
739 if (tree_fits_uhwi_p (len)
740 && compare_tree_int (len, MOVE_MAX) <= 0
741 /* ??? Don't transform copies from strings with known length this
742 confuses the tree-ssa-strlen.c. This doesn't handle
743 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
744 reason. */
745 && !c_strlen (src, 2))
747 unsigned ilen = tree_to_uhwi (len);
748 if (pow2p_hwi (ilen))
750 /* Detect invalid bounds and overlapping copies and issue
751 either -Warray-bounds or -Wrestrict. */
752 if (!nowarn
753 && check_bounds_or_overlap (as_a <gcall *>(stmt),
754 dest, src, len, len))
755 gimple_set_no_warning (stmt, true);
757 scalar_int_mode mode;
758 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
759 if (type
760 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
761 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
762 /* If the destination pointer is not aligned we must be able
763 to emit an unaligned store. */
764 && (dest_align >= GET_MODE_ALIGNMENT (mode)
765 || !targetm.slow_unaligned_access (mode, dest_align)
766 || (optab_handler (movmisalign_optab, mode)
767 != CODE_FOR_nothing)))
769 tree srctype = type;
770 tree desttype = type;
771 if (src_align < GET_MODE_ALIGNMENT (mode))
772 srctype = build_aligned_type (type, src_align);
773 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
774 tree tem = fold_const_aggregate_ref (srcmem);
775 if (tem)
776 srcmem = tem;
777 else if (src_align < GET_MODE_ALIGNMENT (mode)
778 && targetm.slow_unaligned_access (mode, src_align)
779 && (optab_handler (movmisalign_optab, mode)
780 == CODE_FOR_nothing))
781 srcmem = NULL_TREE;
782 if (srcmem)
784 gimple *new_stmt;
785 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
787 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
788 srcmem
789 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
790 new_stmt);
791 gimple_assign_set_lhs (new_stmt, srcmem);
792 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
793 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
795 if (dest_align < GET_MODE_ALIGNMENT (mode))
796 desttype = build_aligned_type (type, dest_align);
797 new_stmt
798 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
799 dest, off0),
800 srcmem);
801 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
802 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
803 if (gimple_vdef (new_stmt)
804 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
805 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
806 if (!lhs)
808 gsi_replace (gsi, new_stmt, false);
809 return true;
811 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
812 goto done;
818 if (endp == 3)
820 /* Both DEST and SRC must be pointer types.
821 ??? This is what old code did. Is the testing for pointer types
822 really mandatory?
824 If either SRC is readonly or length is 1, we can use memcpy. */
825 if (!dest_align || !src_align)
826 return false;
827 if (readonly_data_expr (src)
828 || (tree_fits_uhwi_p (len)
829 && (MIN (src_align, dest_align) / BITS_PER_UNIT
830 >= tree_to_uhwi (len))))
832 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
833 if (!fn)
834 return false;
835 gimple_call_set_fndecl (stmt, fn);
836 gimple_call_set_arg (stmt, 0, dest);
837 gimple_call_set_arg (stmt, 1, src);
838 fold_stmt (gsi);
839 return true;
842 /* If *src and *dest can't overlap, optimize into memcpy as well. */
843 if (TREE_CODE (src) == ADDR_EXPR
844 && TREE_CODE (dest) == ADDR_EXPR)
846 tree src_base, dest_base, fn;
847 poly_int64 src_offset = 0, dest_offset = 0;
848 poly_uint64 maxsize;
850 srcvar = TREE_OPERAND (src, 0);
851 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
852 if (src_base == NULL)
853 src_base = srcvar;
854 destvar = TREE_OPERAND (dest, 0);
855 dest_base = get_addr_base_and_unit_offset (destvar,
856 &dest_offset);
857 if (dest_base == NULL)
858 dest_base = destvar;
859 if (!poly_int_tree_p (len, &maxsize))
860 maxsize = -1;
861 if (SSA_VAR_P (src_base)
862 && SSA_VAR_P (dest_base))
864 if (operand_equal_p (src_base, dest_base, 0)
865 && ranges_maybe_overlap_p (src_offset, maxsize,
866 dest_offset, maxsize))
867 return false;
869 else if (TREE_CODE (src_base) == MEM_REF
870 && TREE_CODE (dest_base) == MEM_REF)
872 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
873 TREE_OPERAND (dest_base, 0), 0))
874 return false;
875 poly_offset_int full_src_offset
876 = mem_ref_offset (src_base) + src_offset;
877 poly_offset_int full_dest_offset
878 = mem_ref_offset (dest_base) + dest_offset;
879 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
880 full_dest_offset, maxsize))
881 return false;
883 else
884 return false;
886 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
887 if (!fn)
888 return false;
889 gimple_call_set_fndecl (stmt, fn);
890 gimple_call_set_arg (stmt, 0, dest);
891 gimple_call_set_arg (stmt, 1, src);
892 fold_stmt (gsi);
893 return true;
896 /* If the destination and source do not alias optimize into
897 memcpy as well. */
898 if ((is_gimple_min_invariant (dest)
899 || TREE_CODE (dest) == SSA_NAME)
900 && (is_gimple_min_invariant (src)
901 || TREE_CODE (src) == SSA_NAME))
903 ao_ref destr, srcr;
904 ao_ref_init_from_ptr_and_size (&destr, dest, len);
905 ao_ref_init_from_ptr_and_size (&srcr, src, len);
906 if (!refs_may_alias_p_1 (&destr, &srcr, false))
908 tree fn;
909 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
910 if (!fn)
911 return false;
912 gimple_call_set_fndecl (stmt, fn);
913 gimple_call_set_arg (stmt, 0, dest);
914 gimple_call_set_arg (stmt, 1, src);
915 fold_stmt (gsi);
916 return true;
920 return false;
923 if (!tree_fits_shwi_p (len))
924 return false;
925 if (!POINTER_TYPE_P (TREE_TYPE (src))
926 || !POINTER_TYPE_P (TREE_TYPE (dest)))
927 return false;
928 /* In the following try to find a type that is most natural to be
929 used for the memcpy source and destination and that allows
930 the most optimization when memcpy is turned into a plain assignment
931 using that type. In theory we could always use a char[len] type
932 but that only gains us that the destination and source possibly
933 no longer will have their address taken. */
934 srctype = TREE_TYPE (TREE_TYPE (src));
935 if (TREE_CODE (srctype) == ARRAY_TYPE
936 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
937 srctype = TREE_TYPE (srctype);
938 desttype = TREE_TYPE (TREE_TYPE (dest));
939 if (TREE_CODE (desttype) == ARRAY_TYPE
940 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
941 desttype = TREE_TYPE (desttype);
942 if (TREE_ADDRESSABLE (srctype)
943 || TREE_ADDRESSABLE (desttype))
944 return false;
946 /* Make sure we are not copying using a floating-point mode or
947 a type whose size possibly does not match its precision. */
948 if (FLOAT_MODE_P (TYPE_MODE (desttype))
949 || TREE_CODE (desttype) == BOOLEAN_TYPE
950 || TREE_CODE (desttype) == ENUMERAL_TYPE)
951 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
952 if (FLOAT_MODE_P (TYPE_MODE (srctype))
953 || TREE_CODE (srctype) == BOOLEAN_TYPE
954 || TREE_CODE (srctype) == ENUMERAL_TYPE)
955 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
956 if (!srctype)
957 srctype = desttype;
958 if (!desttype)
959 desttype = srctype;
960 if (!srctype)
961 return false;
963 src_align = get_pointer_alignment (src);
964 dest_align = get_pointer_alignment (dest);
965 if (dest_align < TYPE_ALIGN (desttype)
966 || src_align < TYPE_ALIGN (srctype))
967 return false;
969 destvar = NULL_TREE;
970 if (TREE_CODE (dest) == ADDR_EXPR
971 && var_decl_component_p (TREE_OPERAND (dest, 0))
972 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
973 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
975 srcvar = NULL_TREE;
976 if (TREE_CODE (src) == ADDR_EXPR
977 && var_decl_component_p (TREE_OPERAND (src, 0))
978 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
980 if (!destvar
981 || src_align >= TYPE_ALIGN (desttype))
982 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
983 src, off0);
984 else if (!STRICT_ALIGNMENT)
986 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
987 src_align);
988 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
992 if (srcvar == NULL_TREE && destvar == NULL_TREE)
993 return false;
995 if (srcvar == NULL_TREE)
997 if (src_align >= TYPE_ALIGN (desttype))
998 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
999 else
1001 if (STRICT_ALIGNMENT)
1002 return false;
1003 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1004 src_align);
1005 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1008 else if (destvar == NULL_TREE)
1010 if (dest_align >= TYPE_ALIGN (srctype))
1011 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1012 else
1014 if (STRICT_ALIGNMENT)
1015 return false;
1016 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1017 dest_align);
1018 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1022 /* Detect invalid bounds and overlapping copies and issue either
1023 -Warray-bounds or -Wrestrict. */
1024 if (!nowarn)
1025 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1027 gimple *new_stmt;
1028 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1030 tree tem = fold_const_aggregate_ref (srcvar);
1031 if (tem)
1032 srcvar = tem;
1033 if (! is_gimple_min_invariant (srcvar))
1035 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1036 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1037 new_stmt);
1038 gimple_assign_set_lhs (new_stmt, srcvar);
1039 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1040 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1042 new_stmt = gimple_build_assign (destvar, srcvar);
1043 goto set_vop_and_replace;
1046 /* We get an aggregate copy. Use an unsigned char[] type to
1047 perform the copying to preserve padding and to avoid any issues
1048 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1049 desttype = build_array_type_nelts (unsigned_char_type_node,
1050 tree_to_uhwi (len));
1051 srctype = desttype;
1052 if (src_align > TYPE_ALIGN (srctype))
1053 srctype = build_aligned_type (srctype, src_align);
1054 if (dest_align > TYPE_ALIGN (desttype))
1055 desttype = build_aligned_type (desttype, dest_align);
1056 new_stmt
1057 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1058 fold_build2 (MEM_REF, srctype, src, off0));
1059 set_vop_and_replace:
1060 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1061 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1062 if (gimple_vdef (new_stmt)
1063 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1064 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1065 if (!lhs)
1067 gsi_replace (gsi, new_stmt, false);
1068 return true;
1070 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1073 done:
1074 gimple_seq stmts = NULL;
1075 if (endp == 0 || endp == 3)
1076 len = NULL_TREE;
1077 else if (endp == 2)
1078 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1079 ssize_int (1));
1080 if (endp == 2 || endp == 1)
1082 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1083 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1084 TREE_TYPE (dest), dest, len);
1087 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1088 gimple *repl = gimple_build_assign (lhs, dest);
1089 gsi_replace (gsi, repl, false);
1090 return true;
1093 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1094 to built-in memcmp (a, b, len). */
1096 static bool
1097 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1099 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1101 if (!fn)
1102 return false;
1104 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1106 gimple *stmt = gsi_stmt (*gsi);
1107 tree a = gimple_call_arg (stmt, 0);
1108 tree b = gimple_call_arg (stmt, 1);
1109 tree len = gimple_call_arg (stmt, 2);
1111 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1112 replace_call_with_call_and_fold (gsi, repl);
1114 return true;
1117 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1118 to built-in memmove (dest, src, len). */
1120 static bool
1121 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1123 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1125 if (!fn)
1126 return false;
1128 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1129 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1130 len) into memmove (dest, src, len). */
1132 gimple *stmt = gsi_stmt (*gsi);
1133 tree src = gimple_call_arg (stmt, 0);
1134 tree dest = gimple_call_arg (stmt, 1);
1135 tree len = gimple_call_arg (stmt, 2);
1137 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1138 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1139 replace_call_with_call_and_fold (gsi, repl);
1141 return true;
1144 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1145 to built-in memset (dest, 0, len). */
1147 static bool
1148 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1150 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1152 if (!fn)
1153 return false;
1155 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1157 gimple *stmt = gsi_stmt (*gsi);
1158 tree dest = gimple_call_arg (stmt, 0);
1159 tree len = gimple_call_arg (stmt, 1);
1161 gimple_seq seq = NULL;
1162 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1163 gimple_seq_add_stmt_without_update (&seq, repl);
1164 gsi_replace_with_seq_vops (gsi, seq);
1165 fold_stmt (gsi);
1167 return true;
1170 /* Fold function call to builtin memset or bzero at *GSI setting the
1171 memory of size LEN to VAL. Return whether a simplification was made. */
1173 static bool
1174 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1176 gimple *stmt = gsi_stmt (*gsi);
1177 tree etype;
1178 unsigned HOST_WIDE_INT length, cval;
1180 /* If the LEN parameter is zero, return DEST. */
1181 if (integer_zerop (len))
1183 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1184 return true;
1187 if (! tree_fits_uhwi_p (len))
1188 return false;
1190 if (TREE_CODE (c) != INTEGER_CST)
1191 return false;
1193 tree dest = gimple_call_arg (stmt, 0);
1194 tree var = dest;
1195 if (TREE_CODE (var) != ADDR_EXPR)
1196 return false;
1198 var = TREE_OPERAND (var, 0);
1199 if (TREE_THIS_VOLATILE (var))
1200 return false;
1202 etype = TREE_TYPE (var);
1203 if (TREE_CODE (etype) == ARRAY_TYPE)
1204 etype = TREE_TYPE (etype);
1206 if (!INTEGRAL_TYPE_P (etype)
1207 && !POINTER_TYPE_P (etype))
1208 return NULL_TREE;
1210 if (! var_decl_component_p (var))
1211 return NULL_TREE;
1213 length = tree_to_uhwi (len);
1214 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1215 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1216 return NULL_TREE;
1218 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1219 return NULL_TREE;
1221 if (integer_zerop (c))
1222 cval = 0;
1223 else
1225 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1226 return NULL_TREE;
1228 cval = TREE_INT_CST_LOW (c);
1229 cval &= 0xff;
1230 cval |= cval << 8;
1231 cval |= cval << 16;
1232 cval |= (cval << 31) << 1;
1235 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1236 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1237 gimple_set_vuse (store, gimple_vuse (stmt));
1238 tree vdef = gimple_vdef (stmt);
1239 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1241 gimple_set_vdef (store, gimple_vdef (stmt));
1242 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1244 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1245 if (gimple_call_lhs (stmt))
1247 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1248 gsi_replace (gsi, asgn, false);
1250 else
1252 gimple_stmt_iterator gsi2 = *gsi;
1253 gsi_prev (gsi);
1254 gsi_remove (&gsi2, true);
1257 return true;
1261 /* Obtain the minimum and maximum string length or minimum and maximum
1262 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1263 If ARG is an SSA name variable, follow its use-def chains. When
1264 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1265 if we are unable to determine the length or value, return false.
1266 VISITED is a bitmap of visited variables.
1267 TYPE is 0 if string length should be obtained, 1 for maximum string
1268 length and 2 for maximum value ARG can have.
1269 When FUZZY is non-zero and the length of a string cannot be determined,
1270 the function instead considers as the maximum possible length the
1271 size of a character array it may refer to. If FUZZY is 2, it will handle
1272 PHIs and COND_EXPRs optimistically, if we can determine string length
1273 minimum and maximum, it will use the minimum from the ones where it
1274 can be determined.
1275 Set *FLEXP to true if the range of the string lengths has been
1276 obtained from the upper bound of an array at the end of a struct.
1277 Such an array may hold a string that's longer than its upper bound
1278 due to it being used as a poor-man's flexible array member. */
1280 static bool
1281 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1282 int fuzzy, bool *flexp)
1284 tree var, val = NULL_TREE;
1285 gimple *def_stmt;
1287 /* The minimum and maximum length. */
1288 tree *const minlen = length;
1289 tree *const maxlen = length + 1;
1291 if (TREE_CODE (arg) != SSA_NAME)
1293 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1294 if (TREE_CODE (arg) == ADDR_EXPR
1295 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1297 tree op = TREE_OPERAND (arg, 0);
1298 if (integer_zerop (TREE_OPERAND (op, 1)))
1300 tree aop0 = TREE_OPERAND (op, 0);
1301 if (TREE_CODE (aop0) == INDIRECT_REF
1302 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1303 return get_range_strlen (TREE_OPERAND (aop0, 0),
1304 length, visited, type, fuzzy, flexp);
1306 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF && fuzzy)
1308 /* Fail if an array is the last member of a struct object
1309 since it could be treated as a (fake) flexible array
1310 member. */
1311 tree idx = TREE_OPERAND (op, 1);
1313 arg = TREE_OPERAND (op, 0);
1314 tree optype = TREE_TYPE (arg);
1315 if (tree dom = TYPE_DOMAIN (optype))
1316 if (tree bound = TYPE_MAX_VALUE (dom))
1317 if (TREE_CODE (bound) == INTEGER_CST
1318 && TREE_CODE (idx) == INTEGER_CST
1319 && tree_int_cst_lt (bound, idx))
1320 return false;
1324 if (type == 2)
1326 val = arg;
1327 if (TREE_CODE (val) != INTEGER_CST
1328 || tree_int_cst_sgn (val) < 0)
1329 return false;
1331 else
1332 val = c_strlen (arg, 1);
1334 if (!val && fuzzy)
1336 if (TREE_CODE (arg) == ADDR_EXPR)
1337 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1338 visited, type, fuzzy, flexp);
1340 if (TREE_CODE (arg) == ARRAY_REF)
1342 tree type = TREE_TYPE (TREE_OPERAND (arg, 0));
1344 /* Determine the "innermost" array type. */
1345 while (TREE_CODE (type) == ARRAY_TYPE
1346 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1347 type = TREE_TYPE (type);
1349 /* Avoid arrays of pointers. */
1350 tree eltype = TREE_TYPE (type);
1351 if (TREE_CODE (type) != ARRAY_TYPE
1352 || !INTEGRAL_TYPE_P (eltype))
1353 return false;
1355 val = TYPE_SIZE_UNIT (type);
1356 if (!val || integer_zerop (val))
1357 return false;
1359 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1360 integer_one_node);
1361 /* Set the minimum size to zero since the string in
1362 the array could have zero length. */
1363 *minlen = ssize_int (0);
1365 if (TREE_CODE (TREE_OPERAND (arg, 0)) == COMPONENT_REF
1366 && type == TREE_TYPE (TREE_OPERAND (arg, 0))
1367 && array_at_struct_end_p (TREE_OPERAND (arg, 0)))
1368 *flexp = true;
1370 else if (TREE_CODE (arg) == COMPONENT_REF
1371 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1372 == ARRAY_TYPE))
1374 /* Use the type of the member array to determine the upper
1375 bound on the length of the array. This may be overly
1376 optimistic if the array itself isn't NUL-terminated and
1377 the caller relies on the subsequent member to contain
1378 the NUL but that would only be considered valid if
1379 the array were the last member of a struct.
1380 Set *FLEXP to true if the array whose bound is being
1381 used is at the end of a struct. */
1382 if (array_at_struct_end_p (arg))
1383 *flexp = true;
1385 arg = TREE_OPERAND (arg, 1);
1387 tree type = TREE_TYPE (arg);
1389 while (TREE_CODE (type) == ARRAY_TYPE
1390 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1391 type = TREE_TYPE (type);
1393 /* Fail when the array bound is unknown or zero. */
1394 val = TYPE_SIZE_UNIT (type);
1395 if (!val || integer_zerop (val))
1396 return false;
1397 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1398 integer_one_node);
1399 /* Set the minimum size to zero since the string in
1400 the array could have zero length. */
1401 *minlen = ssize_int (0);
1404 if (VAR_P (arg))
1406 tree type = TREE_TYPE (arg);
1407 if (POINTER_TYPE_P (type))
1408 type = TREE_TYPE (type);
1410 if (TREE_CODE (type) == ARRAY_TYPE)
1412 val = TYPE_SIZE_UNIT (type);
1413 if (!val
1414 || TREE_CODE (val) != INTEGER_CST
1415 || integer_zerop (val))
1416 return false;
1417 val = wide_int_to_tree (TREE_TYPE (val),
1418 wi::sub (wi::to_wide (val), 1));
1419 /* Set the minimum size to zero since the string in
1420 the array could have zero length. */
1421 *minlen = ssize_int (0);
1426 if (!val)
1427 return false;
1429 if (!*minlen
1430 || (type > 0
1431 && TREE_CODE (*minlen) == INTEGER_CST
1432 && TREE_CODE (val) == INTEGER_CST
1433 && tree_int_cst_lt (val, *minlen)))
1434 *minlen = val;
1436 if (*maxlen)
1438 if (type > 0)
1440 if (TREE_CODE (*maxlen) != INTEGER_CST
1441 || TREE_CODE (val) != INTEGER_CST)
1442 return false;
1444 if (tree_int_cst_lt (*maxlen, val))
1445 *maxlen = val;
1446 return true;
1448 else if (simple_cst_equal (val, *maxlen) != 1)
1449 return false;
1452 *maxlen = val;
1453 return true;
1456 /* If ARG is registered for SSA update we cannot look at its defining
1457 statement. */
1458 if (name_registered_for_update_p (arg))
1459 return false;
1461 /* If we were already here, break the infinite cycle. */
1462 if (!*visited)
1463 *visited = BITMAP_ALLOC (NULL);
1464 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1465 return true;
1467 var = arg;
1468 def_stmt = SSA_NAME_DEF_STMT (var);
1470 switch (gimple_code (def_stmt))
1472 case GIMPLE_ASSIGN:
1473 /* The RHS of the statement defining VAR must either have a
1474 constant length or come from another SSA_NAME with a constant
1475 length. */
1476 if (gimple_assign_single_p (def_stmt)
1477 || gimple_assign_unary_nop_p (def_stmt))
1479 tree rhs = gimple_assign_rhs1 (def_stmt);
1480 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
1482 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1484 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1485 gimple_assign_rhs3 (def_stmt) };
1487 for (unsigned int i = 0; i < 2; i++)
1488 if (!get_range_strlen (ops[i], length, visited, type, fuzzy,
1489 flexp))
1491 if (fuzzy == 2)
1492 *maxlen = build_all_ones_cst (size_type_node);
1493 else
1494 return false;
1496 return true;
1498 return false;
1500 case GIMPLE_PHI:
1501 /* All the arguments of the PHI node must have the same constant
1502 length. */
1503 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1505 tree arg = gimple_phi_arg (def_stmt, i)->def;
1507 /* If this PHI has itself as an argument, we cannot
1508 determine the string length of this argument. However,
1509 if we can find a constant string length for the other
1510 PHI args then we can still be sure that this is a
1511 constant string length. So be optimistic and just
1512 continue with the next argument. */
1513 if (arg == gimple_phi_result (def_stmt))
1514 continue;
1516 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
1518 if (fuzzy == 2)
1519 *maxlen = build_all_ones_cst (size_type_node);
1520 else
1521 return false;
1524 return true;
1526 default:
1527 return false;
1531 /* Determine the minimum and maximum value or string length that ARG
1532 refers to and store each in the first two elements of MINMAXLEN.
1533 For expressions that point to strings of unknown lengths that are
1534 character arrays, use the upper bound of the array as the maximum
1535 length. For example, given an expression like 'x ? array : "xyz"'
1536 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1537 to 0 and MINMAXLEN[1] to 7, the longest string that could be
1538 stored in array.
1539 Return true if the range of the string lengths has been obtained
1540 from the upper bound of an array at the end of a struct. Such
1541 an array may hold a string that's longer than its upper bound
1542 due to it being used as a poor-man's flexible array member.
1544 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1545 and false if PHIs and COND_EXPRs are to be handled optimistically,
1546 if we can determine string length minimum and maximum; it will use
1547 the minimum from the ones where it can be determined.
1548 STRICT false should be only used for warning code. */
1550 bool
1551 get_range_strlen (tree arg, tree minmaxlen[2], bool strict)
1553 bitmap visited = NULL;
1555 minmaxlen[0] = NULL_TREE;
1556 minmaxlen[1] = NULL_TREE;
1558 bool flexarray = false;
1559 if (!get_range_strlen (arg, minmaxlen, &visited, 1, strict ? 1 : 2,
1560 &flexarray))
1562 minmaxlen[0] = NULL_TREE;
1563 minmaxlen[1] = NULL_TREE;
1566 if (visited)
1567 BITMAP_FREE (visited);
1569 return flexarray;
1572 tree
1573 get_maxval_strlen (tree arg, int type)
1575 bitmap visited = NULL;
1576 tree len[2] = { NULL_TREE, NULL_TREE };
1578 bool dummy;
1579 if (!get_range_strlen (arg, len, &visited, type, 0, &dummy))
1580 len[1] = NULL_TREE;
1581 if (visited)
1582 BITMAP_FREE (visited);
1584 return len[1];
1588 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1589 If LEN is not NULL, it represents the length of the string to be
1590 copied. Return NULL_TREE if no simplification can be made. */
1592 static bool
1593 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1594 tree dest, tree src)
1596 gimple *stmt = gsi_stmt (*gsi);
1597 location_t loc = gimple_location (stmt);
1598 tree fn;
1600 /* If SRC and DEST are the same (and not volatile), return DEST. */
1601 if (operand_equal_p (src, dest, 0))
1603 /* Issue -Wrestrict unless the pointers are null (those do
1604 not point to objects and so do not indicate an overlap;
1605 such calls could be the result of sanitization and jump
1606 threading). */
1607 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1609 tree func = gimple_call_fndecl (stmt);
1611 warning_at (loc, OPT_Wrestrict,
1612 "%qD source argument is the same as destination",
1613 func);
1616 replace_call_with_value (gsi, dest);
1617 return true;
1620 if (optimize_function_for_size_p (cfun))
1621 return false;
1623 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1624 if (!fn)
1625 return false;
1627 tree len = get_maxval_strlen (src, 0);
1628 if (!len)
1629 return false;
1631 len = fold_convert_loc (loc, size_type_node, len);
1632 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1633 len = force_gimple_operand_gsi (gsi, len, true,
1634 NULL_TREE, true, GSI_SAME_STMT);
1635 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1636 replace_call_with_call_and_fold (gsi, repl);
1637 return true;
1640 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1641 If SLEN is not NULL, it represents the length of the source string.
1642 Return NULL_TREE if no simplification can be made. */
1644 static bool
1645 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1646 tree dest, tree src, tree len)
1648 gimple *stmt = gsi_stmt (*gsi);
1649 location_t loc = gimple_location (stmt);
1650 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1652 /* If the LEN parameter is zero, return DEST. */
1653 if (integer_zerop (len))
1655 /* Avoid warning if the destination refers to a an array/pointer
1656 decorate with attribute nonstring. */
1657 if (!nonstring)
1659 tree fndecl = gimple_call_fndecl (stmt);
1661 /* Warn about the lack of nul termination: the result is not
1662 a (nul-terminated) string. */
1663 tree slen = get_maxval_strlen (src, 0);
1664 if (slen && !integer_zerop (slen))
1665 warning_at (loc, OPT_Wstringop_truncation,
1666 "%G%qD destination unchanged after copying no bytes "
1667 "from a string of length %E",
1668 stmt, fndecl, slen);
1669 else
1670 warning_at (loc, OPT_Wstringop_truncation,
1671 "%G%qD destination unchanged after copying no bytes",
1672 stmt, fndecl);
1675 replace_call_with_value (gsi, dest);
1676 return true;
1679 /* We can't compare slen with len as constants below if len is not a
1680 constant. */
1681 if (TREE_CODE (len) != INTEGER_CST)
1682 return false;
1684 /* Now, we must be passed a constant src ptr parameter. */
1685 tree slen = get_maxval_strlen (src, 0);
1686 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1687 return false;
1689 /* The size of the source string including the terminating nul. */
1690 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1692 /* We do not support simplification of this case, though we do
1693 support it when expanding trees into RTL. */
1694 /* FIXME: generate a call to __builtin_memset. */
1695 if (tree_int_cst_lt (ssize, len))
1696 return false;
1698 /* Diagnose truncation that leaves the copy unterminated. */
1699 maybe_diag_stxncpy_trunc (*gsi, src, len);
1701 /* OK transform into builtin memcpy. */
1702 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1703 if (!fn)
1704 return false;
1706 len = fold_convert_loc (loc, size_type_node, len);
1707 len = force_gimple_operand_gsi (gsi, len, true,
1708 NULL_TREE, true, GSI_SAME_STMT);
1709 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1710 replace_call_with_call_and_fold (gsi, repl);
1712 return true;
1715 /* Fold function call to builtin strchr or strrchr.
1716 If both arguments are constant, evaluate and fold the result,
1717 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1718 In general strlen is significantly faster than strchr
1719 due to being a simpler operation. */
1720 static bool
1721 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1723 gimple *stmt = gsi_stmt (*gsi);
1724 tree str = gimple_call_arg (stmt, 0);
1725 tree c = gimple_call_arg (stmt, 1);
1726 location_t loc = gimple_location (stmt);
1727 const char *p;
1728 char ch;
1730 if (!gimple_call_lhs (stmt))
1731 return false;
1733 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1735 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1737 if (p1 == NULL)
1739 replace_call_with_value (gsi, integer_zero_node);
1740 return true;
1743 tree len = build_int_cst (size_type_node, p1 - p);
1744 gimple_seq stmts = NULL;
1745 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1746 POINTER_PLUS_EXPR, str, len);
1747 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1748 gsi_replace_with_seq_vops (gsi, stmts);
1749 return true;
1752 if (!integer_zerop (c))
1753 return false;
1755 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1756 if (is_strrchr && optimize_function_for_size_p (cfun))
1758 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1760 if (strchr_fn)
1762 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1763 replace_call_with_call_and_fold (gsi, repl);
1764 return true;
1767 return false;
1770 tree len;
1771 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1773 if (!strlen_fn)
1774 return false;
1776 /* Create newstr = strlen (str). */
1777 gimple_seq stmts = NULL;
1778 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1779 gimple_set_location (new_stmt, loc);
1780 len = create_tmp_reg_or_ssa_name (size_type_node);
1781 gimple_call_set_lhs (new_stmt, len);
1782 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1784 /* Create (str p+ strlen (str)). */
1785 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1786 POINTER_PLUS_EXPR, str, len);
1787 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1788 gsi_replace_with_seq_vops (gsi, stmts);
1789 /* gsi now points at the assignment to the lhs, get a
1790 stmt iterator to the strlen.
1791 ??? We can't use gsi_for_stmt as that doesn't work when the
1792 CFG isn't built yet. */
1793 gimple_stmt_iterator gsi2 = *gsi;
1794 gsi_prev (&gsi2);
1795 fold_stmt (&gsi2);
1796 return true;
1799 /* Fold function call to builtin strstr.
1800 If both arguments are constant, evaluate and fold the result,
1801 additionally fold strstr (x, "") into x and strstr (x, "c")
1802 into strchr (x, 'c'). */
1803 static bool
1804 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1806 gimple *stmt = gsi_stmt (*gsi);
1807 tree haystack = gimple_call_arg (stmt, 0);
1808 tree needle = gimple_call_arg (stmt, 1);
1809 const char *p, *q;
1811 if (!gimple_call_lhs (stmt))
1812 return false;
1814 q = c_getstr (needle);
1815 if (q == NULL)
1816 return false;
1818 if ((p = c_getstr (haystack)))
1820 const char *r = strstr (p, q);
1822 if (r == NULL)
1824 replace_call_with_value (gsi, integer_zero_node);
1825 return true;
1828 tree len = build_int_cst (size_type_node, r - p);
1829 gimple_seq stmts = NULL;
1830 gimple *new_stmt
1831 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1832 haystack, len);
1833 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1834 gsi_replace_with_seq_vops (gsi, stmts);
1835 return true;
1838 /* For strstr (x, "") return x. */
1839 if (q[0] == '\0')
1841 replace_call_with_value (gsi, haystack);
1842 return true;
1845 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1846 if (q[1] == '\0')
1848 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1849 if (strchr_fn)
1851 tree c = build_int_cst (integer_type_node, q[0]);
1852 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1853 replace_call_with_call_and_fold (gsi, repl);
1854 return true;
1858 return false;
1861 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1862 to the call.
1864 Return NULL_TREE if no simplification was possible, otherwise return the
1865 simplified form of the call as a tree.
1867 The simplified form may be a constant or other expression which
1868 computes the same value, but in a more efficient manner (including
1869 calls to other builtin functions).
1871 The call may contain arguments which need to be evaluated, but
1872 which are not useful to determine the result of the call. In
1873 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1874 COMPOUND_EXPR will be an argument which must be evaluated.
1875 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1876 COMPOUND_EXPR in the chain will contain the tree for the simplified
1877 form of the builtin function call. */
1879 static bool
1880 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1882 gimple *stmt = gsi_stmt (*gsi);
1883 location_t loc = gimple_location (stmt);
1885 const char *p = c_getstr (src);
1887 /* If the string length is zero, return the dst parameter. */
1888 if (p && *p == '\0')
1890 replace_call_with_value (gsi, dst);
1891 return true;
1894 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1895 return false;
1897 /* See if we can store by pieces into (dst + strlen(dst)). */
1898 tree newdst;
1899 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1900 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1902 if (!strlen_fn || !memcpy_fn)
1903 return false;
1905 /* If the length of the source string isn't computable don't
1906 split strcat into strlen and memcpy. */
1907 tree len = get_maxval_strlen (src, 0);
1908 if (! len)
1909 return false;
1911 /* Create strlen (dst). */
1912 gimple_seq stmts = NULL, stmts2;
1913 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1914 gimple_set_location (repl, loc);
1915 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1916 gimple_call_set_lhs (repl, newdst);
1917 gimple_seq_add_stmt_without_update (&stmts, repl);
1919 /* Create (dst p+ strlen (dst)). */
1920 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1921 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1922 gimple_seq_add_seq_without_update (&stmts, stmts2);
1924 len = fold_convert_loc (loc, size_type_node, len);
1925 len = size_binop_loc (loc, PLUS_EXPR, len,
1926 build_int_cst (size_type_node, 1));
1927 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1928 gimple_seq_add_seq_without_update (&stmts, stmts2);
1930 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1931 gimple_seq_add_stmt_without_update (&stmts, repl);
1932 if (gimple_call_lhs (stmt))
1934 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1935 gimple_seq_add_stmt_without_update (&stmts, repl);
1936 gsi_replace_with_seq_vops (gsi, stmts);
1937 /* gsi now points at the assignment to the lhs, get a
1938 stmt iterator to the memcpy call.
1939 ??? We can't use gsi_for_stmt as that doesn't work when the
1940 CFG isn't built yet. */
1941 gimple_stmt_iterator gsi2 = *gsi;
1942 gsi_prev (&gsi2);
1943 fold_stmt (&gsi2);
1945 else
1947 gsi_replace_with_seq_vops (gsi, stmts);
1948 fold_stmt (gsi);
1950 return true;
1953 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1954 are the arguments to the call. */
1956 static bool
1957 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1959 gimple *stmt = gsi_stmt (*gsi);
1960 tree dest = gimple_call_arg (stmt, 0);
1961 tree src = gimple_call_arg (stmt, 1);
1962 tree size = gimple_call_arg (stmt, 2);
1963 tree fn;
1964 const char *p;
1967 p = c_getstr (src);
1968 /* If the SRC parameter is "", return DEST. */
1969 if (p && *p == '\0')
1971 replace_call_with_value (gsi, dest);
1972 return true;
1975 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1976 return false;
1978 /* If __builtin_strcat_chk is used, assume strcat is available. */
1979 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1980 if (!fn)
1981 return false;
1983 gimple *repl = gimple_build_call (fn, 2, dest, src);
1984 replace_call_with_call_and_fold (gsi, repl);
1985 return true;
1988 /* Simplify a call to the strncat builtin. */
1990 static bool
1991 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1993 gimple *stmt = gsi_stmt (*gsi);
1994 tree dst = gimple_call_arg (stmt, 0);
1995 tree src = gimple_call_arg (stmt, 1);
1996 tree len = gimple_call_arg (stmt, 2);
1998 const char *p = c_getstr (src);
2000 /* If the requested length is zero, or the src parameter string
2001 length is zero, return the dst parameter. */
2002 if (integer_zerop (len) || (p && *p == '\0'))
2004 replace_call_with_value (gsi, dst);
2005 return true;
2008 if (TREE_CODE (len) != INTEGER_CST || !p)
2009 return false;
2011 unsigned srclen = strlen (p);
2013 int cmpsrc = compare_tree_int (len, srclen);
2015 /* Return early if the requested len is less than the string length.
2016 Warnings will be issued elsewhere later. */
2017 if (cmpsrc < 0)
2018 return false;
2020 unsigned HOST_WIDE_INT dstsize;
2022 bool nowarn = gimple_no_warning_p (stmt);
2024 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2026 int cmpdst = compare_tree_int (len, dstsize);
2028 if (cmpdst >= 0)
2030 tree fndecl = gimple_call_fndecl (stmt);
2032 /* Strncat copies (at most) LEN bytes and always appends
2033 the terminating NUL so the specified bound should never
2034 be equal to (or greater than) the size of the destination.
2035 If it is, the copy could overflow. */
2036 location_t loc = gimple_location (stmt);
2037 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2038 cmpdst == 0
2039 ? G_("%G%qD specified bound %E equals "
2040 "destination size")
2041 : G_("%G%qD specified bound %E exceeds "
2042 "destination size %wu"),
2043 stmt, fndecl, len, dstsize);
2044 if (nowarn)
2045 gimple_set_no_warning (stmt, true);
2049 if (!nowarn && cmpsrc == 0)
2051 tree fndecl = gimple_call_fndecl (stmt);
2052 location_t loc = gimple_location (stmt);
2054 /* To avoid possible overflow the specified bound should also
2055 not be equal to the length of the source, even when the size
2056 of the destination is unknown (it's not an uncommon mistake
2057 to specify as the bound to strncpy the length of the source). */
2058 if (warning_at (loc, OPT_Wstringop_overflow_,
2059 "%G%qD specified bound %E equals source length",
2060 stmt, fndecl, len))
2061 gimple_set_no_warning (stmt, true);
2064 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2066 /* If the replacement _DECL isn't initialized, don't do the
2067 transformation. */
2068 if (!fn)
2069 return false;
2071 /* Otherwise, emit a call to strcat. */
2072 gcall *repl = gimple_build_call (fn, 2, dst, src);
2073 replace_call_with_call_and_fold (gsi, repl);
2074 return true;
2077 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2078 LEN, and SIZE. */
2080 static bool
2081 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2083 gimple *stmt = gsi_stmt (*gsi);
2084 tree dest = gimple_call_arg (stmt, 0);
2085 tree src = gimple_call_arg (stmt, 1);
2086 tree len = gimple_call_arg (stmt, 2);
2087 tree size = gimple_call_arg (stmt, 3);
2088 tree fn;
2089 const char *p;
2091 p = c_getstr (src);
2092 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2093 if ((p && *p == '\0')
2094 || integer_zerop (len))
2096 replace_call_with_value (gsi, dest);
2097 return true;
2100 if (! tree_fits_uhwi_p (size))
2101 return false;
2103 if (! integer_all_onesp (size))
2105 tree src_len = c_strlen (src, 1);
2106 if (src_len
2107 && tree_fits_uhwi_p (src_len)
2108 && tree_fits_uhwi_p (len)
2109 && ! tree_int_cst_lt (len, src_len))
2111 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2112 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2113 if (!fn)
2114 return false;
2116 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2117 replace_call_with_call_and_fold (gsi, repl);
2118 return true;
2120 return false;
2123 /* If __builtin_strncat_chk is used, assume strncat is available. */
2124 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2125 if (!fn)
2126 return false;
2128 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2129 replace_call_with_call_and_fold (gsi, repl);
2130 return true;
2133 /* Build and append gimple statements to STMTS that would load a first
2134 character of a memory location identified by STR. LOC is location
2135 of the statement. */
2137 static tree
2138 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2140 tree var;
2142 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2143 tree cst_uchar_ptr_node
2144 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2145 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2147 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2148 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2149 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2151 gimple_assign_set_lhs (stmt, var);
2152 gimple_seq_add_stmt_without_update (stmts, stmt);
2154 return var;
2157 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2158 FCODE is the name of the builtin. */
2160 static bool
2161 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2163 gimple *stmt = gsi_stmt (*gsi);
2164 tree callee = gimple_call_fndecl (stmt);
2165 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2167 tree type = integer_type_node;
2168 tree str1 = gimple_call_arg (stmt, 0);
2169 tree str2 = gimple_call_arg (stmt, 1);
2170 tree lhs = gimple_call_lhs (stmt);
2171 HOST_WIDE_INT length = -1;
2173 /* Handle strncmp and strncasecmp functions. */
2174 if (gimple_call_num_args (stmt) == 3)
2176 tree len = gimple_call_arg (stmt, 2);
2177 if (tree_fits_uhwi_p (len))
2178 length = tree_to_uhwi (len);
2181 /* If the LEN parameter is zero, return zero. */
2182 if (length == 0)
2184 replace_call_with_value (gsi, integer_zero_node);
2185 return true;
2188 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2189 if (operand_equal_p (str1, str2, 0))
2191 replace_call_with_value (gsi, integer_zero_node);
2192 return true;
2195 const char *p1 = c_getstr (str1);
2196 const char *p2 = c_getstr (str2);
2198 /* For known strings, return an immediate value. */
2199 if (p1 && p2)
2201 int r = 0;
2202 bool known_result = false;
2204 switch (fcode)
2206 case BUILT_IN_STRCMP:
2207 case BUILT_IN_STRCMP_EQ:
2209 r = strcmp (p1, p2);
2210 known_result = true;
2211 break;
2213 case BUILT_IN_STRNCMP:
2214 case BUILT_IN_STRNCMP_EQ:
2216 if (length == -1)
2217 break;
2218 r = strncmp (p1, p2, length);
2219 known_result = true;
2220 break;
2222 /* Only handleable situation is where the string are equal (result 0),
2223 which is already handled by operand_equal_p case. */
2224 case BUILT_IN_STRCASECMP:
2225 break;
2226 case BUILT_IN_STRNCASECMP:
2228 if (length == -1)
2229 break;
2230 r = strncmp (p1, p2, length);
2231 if (r == 0)
2232 known_result = true;
2233 break;
2235 default:
2236 gcc_unreachable ();
2239 if (known_result)
2241 replace_call_with_value (gsi, build_cmp_result (type, r));
2242 return true;
2246 bool nonzero_length = length >= 1
2247 || fcode == BUILT_IN_STRCMP
2248 || fcode == BUILT_IN_STRCMP_EQ
2249 || fcode == BUILT_IN_STRCASECMP;
2251 location_t loc = gimple_location (stmt);
2253 /* If the second arg is "", return *(const unsigned char*)arg1. */
2254 if (p2 && *p2 == '\0' && nonzero_length)
2256 gimple_seq stmts = NULL;
2257 tree var = gimple_load_first_char (loc, str1, &stmts);
2258 if (lhs)
2260 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2261 gimple_seq_add_stmt_without_update (&stmts, stmt);
2264 gsi_replace_with_seq_vops (gsi, stmts);
2265 return true;
2268 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2269 if (p1 && *p1 == '\0' && nonzero_length)
2271 gimple_seq stmts = NULL;
2272 tree var = gimple_load_first_char (loc, str2, &stmts);
2274 if (lhs)
2276 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2277 stmt = gimple_build_assign (c, NOP_EXPR, var);
2278 gimple_seq_add_stmt_without_update (&stmts, stmt);
2280 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2281 gimple_seq_add_stmt_without_update (&stmts, stmt);
2284 gsi_replace_with_seq_vops (gsi, stmts);
2285 return true;
2288 /* If len parameter is one, return an expression corresponding to
2289 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2290 if (fcode == BUILT_IN_STRNCMP && length == 1)
2292 gimple_seq stmts = NULL;
2293 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2294 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2296 if (lhs)
2298 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2299 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2300 gimple_seq_add_stmt_without_update (&stmts, convert1);
2302 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2303 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2304 gimple_seq_add_stmt_without_update (&stmts, convert2);
2306 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2307 gimple_seq_add_stmt_without_update (&stmts, stmt);
2310 gsi_replace_with_seq_vops (gsi, stmts);
2311 return true;
2314 /* If length is larger than the length of one constant string,
2315 replace strncmp with corresponding strcmp */
2316 if (fcode == BUILT_IN_STRNCMP
2317 && length > 0
2318 && ((p2 && (size_t) length > strlen (p2))
2319 || (p1 && (size_t) length > strlen (p1))))
2321 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2322 if (!fn)
2323 return false;
2324 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2325 replace_call_with_call_and_fold (gsi, repl);
2326 return true;
2329 return false;
2332 /* Fold a call to the memchr pointed by GSI iterator. */
2334 static bool
2335 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2337 gimple *stmt = gsi_stmt (*gsi);
2338 tree lhs = gimple_call_lhs (stmt);
2339 tree arg1 = gimple_call_arg (stmt, 0);
2340 tree arg2 = gimple_call_arg (stmt, 1);
2341 tree len = gimple_call_arg (stmt, 2);
2343 /* If the LEN parameter is zero, return zero. */
2344 if (integer_zerop (len))
2346 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2347 return true;
2350 char c;
2351 if (TREE_CODE (arg2) != INTEGER_CST
2352 || !tree_fits_uhwi_p (len)
2353 || !target_char_cst_p (arg2, &c))
2354 return false;
2356 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2357 unsigned HOST_WIDE_INT string_length;
2358 const char *p1 = c_getstr (arg1, &string_length);
2360 if (p1)
2362 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2363 if (r == NULL)
2365 if (length <= string_length)
2367 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2368 return true;
2371 else
2373 unsigned HOST_WIDE_INT offset = r - p1;
2374 gimple_seq stmts = NULL;
2375 if (lhs != NULL_TREE)
2377 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2378 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2379 arg1, offset_cst);
2380 gimple_seq_add_stmt_without_update (&stmts, stmt);
2382 else
2383 gimple_seq_add_stmt_without_update (&stmts,
2384 gimple_build_nop ());
2386 gsi_replace_with_seq_vops (gsi, stmts);
2387 return true;
2391 return false;
2394 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2395 to the call. IGNORE is true if the value returned
2396 by the builtin will be ignored. UNLOCKED is true is true if this
2397 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2398 the known length of the string. Return NULL_TREE if no simplification
2399 was possible. */
2401 static bool
2402 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2403 tree arg0, tree arg1,
2404 bool unlocked)
2406 gimple *stmt = gsi_stmt (*gsi);
2408 /* If we're using an unlocked function, assume the other unlocked
2409 functions exist explicitly. */
2410 tree const fn_fputc = (unlocked
2411 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2412 : builtin_decl_implicit (BUILT_IN_FPUTC));
2413 tree const fn_fwrite = (unlocked
2414 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2415 : builtin_decl_implicit (BUILT_IN_FWRITE));
2417 /* If the return value is used, don't do the transformation. */
2418 if (gimple_call_lhs (stmt))
2419 return false;
2421 /* Get the length of the string passed to fputs. If the length
2422 can't be determined, punt. */
2423 tree len = get_maxval_strlen (arg0, 0);
2424 if (!len
2425 || TREE_CODE (len) != INTEGER_CST)
2426 return false;
2428 switch (compare_tree_int (len, 1))
2430 case -1: /* length is 0, delete the call entirely . */
2431 replace_call_with_value (gsi, integer_zero_node);
2432 return true;
2434 case 0: /* length is 1, call fputc. */
2436 const char *p = c_getstr (arg0);
2437 if (p != NULL)
2439 if (!fn_fputc)
2440 return false;
2442 gimple *repl = gimple_build_call (fn_fputc, 2,
2443 build_int_cst
2444 (integer_type_node, p[0]), arg1);
2445 replace_call_with_call_and_fold (gsi, repl);
2446 return true;
2449 /* FALLTHROUGH */
2450 case 1: /* length is greater than 1, call fwrite. */
2452 /* If optimizing for size keep fputs. */
2453 if (optimize_function_for_size_p (cfun))
2454 return false;
2455 /* New argument list transforming fputs(string, stream) to
2456 fwrite(string, 1, len, stream). */
2457 if (!fn_fwrite)
2458 return false;
2460 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2461 size_one_node, len, arg1);
2462 replace_call_with_call_and_fold (gsi, repl);
2463 return true;
2465 default:
2466 gcc_unreachable ();
2468 return false;
2471 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2472 DEST, SRC, LEN, and SIZE are the arguments to the call.
2473 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2474 code of the builtin. If MAXLEN is not NULL, it is maximum length
2475 passed as third argument. */
2477 static bool
2478 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2479 tree dest, tree src, tree len, tree size,
2480 enum built_in_function fcode)
2482 gimple *stmt = gsi_stmt (*gsi);
2483 location_t loc = gimple_location (stmt);
2484 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2485 tree fn;
2487 /* If SRC and DEST are the same (and not volatile), return DEST
2488 (resp. DEST+LEN for __mempcpy_chk). */
2489 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2491 if (fcode != BUILT_IN_MEMPCPY_CHK)
2493 replace_call_with_value (gsi, dest);
2494 return true;
2496 else
2498 gimple_seq stmts = NULL;
2499 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2500 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2501 TREE_TYPE (dest), dest, len);
2502 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2503 replace_call_with_value (gsi, temp);
2504 return true;
2508 if (! tree_fits_uhwi_p (size))
2509 return false;
2511 tree maxlen = get_maxval_strlen (len, 2);
2512 if (! integer_all_onesp (size))
2514 if (! tree_fits_uhwi_p (len))
2516 /* If LEN is not constant, try MAXLEN too.
2517 For MAXLEN only allow optimizing into non-_ocs function
2518 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2519 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2521 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2523 /* (void) __mempcpy_chk () can be optimized into
2524 (void) __memcpy_chk (). */
2525 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2526 if (!fn)
2527 return false;
2529 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2530 replace_call_with_call_and_fold (gsi, repl);
2531 return true;
2533 return false;
2536 else
2537 maxlen = len;
2539 if (tree_int_cst_lt (size, maxlen))
2540 return false;
2543 fn = NULL_TREE;
2544 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2545 mem{cpy,pcpy,move,set} is available. */
2546 switch (fcode)
2548 case BUILT_IN_MEMCPY_CHK:
2549 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2550 break;
2551 case BUILT_IN_MEMPCPY_CHK:
2552 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2553 break;
2554 case BUILT_IN_MEMMOVE_CHK:
2555 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2556 break;
2557 case BUILT_IN_MEMSET_CHK:
2558 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2559 break;
2560 default:
2561 break;
2564 if (!fn)
2565 return false;
2567 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2568 replace_call_with_call_and_fold (gsi, repl);
2569 return true;
2572 /* Fold a call to the __st[rp]cpy_chk builtin.
2573 DEST, SRC, and SIZE are the arguments to the call.
2574 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2575 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2576 strings passed as second argument. */
2578 static bool
2579 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2580 tree dest,
2581 tree src, tree size,
2582 enum built_in_function fcode)
2584 gimple *stmt = gsi_stmt (*gsi);
2585 location_t loc = gimple_location (stmt);
2586 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2587 tree len, fn;
2589 /* If SRC and DEST are the same (and not volatile), return DEST. */
2590 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2592 /* Issue -Wrestrict unless the pointers are null (those do
2593 not point to objects and so do not indicate an overlap;
2594 such calls could be the result of sanitization and jump
2595 threading). */
2596 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2598 tree func = gimple_call_fndecl (stmt);
2600 warning_at (loc, OPT_Wrestrict,
2601 "%qD source argument is the same as destination",
2602 func);
2605 replace_call_with_value (gsi, dest);
2606 return true;
2609 if (! tree_fits_uhwi_p (size))
2610 return false;
2612 tree maxlen = get_maxval_strlen (src, 1);
2613 if (! integer_all_onesp (size))
2615 len = c_strlen (src, 1);
2616 if (! len || ! tree_fits_uhwi_p (len))
2618 /* If LEN is not constant, try MAXLEN too.
2619 For MAXLEN only allow optimizing into non-_ocs function
2620 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2621 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2623 if (fcode == BUILT_IN_STPCPY_CHK)
2625 if (! ignore)
2626 return false;
2628 /* If return value of __stpcpy_chk is ignored,
2629 optimize into __strcpy_chk. */
2630 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2631 if (!fn)
2632 return false;
2634 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2635 replace_call_with_call_and_fold (gsi, repl);
2636 return true;
2639 if (! len || TREE_SIDE_EFFECTS (len))
2640 return false;
2642 /* If c_strlen returned something, but not a constant,
2643 transform __strcpy_chk into __memcpy_chk. */
2644 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2645 if (!fn)
2646 return false;
2648 gimple_seq stmts = NULL;
2649 len = gimple_convert (&stmts, loc, size_type_node, len);
2650 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2651 build_int_cst (size_type_node, 1));
2652 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2653 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2654 replace_call_with_call_and_fold (gsi, repl);
2655 return true;
2658 else
2659 maxlen = len;
2661 if (! tree_int_cst_lt (maxlen, size))
2662 return false;
2665 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2666 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2667 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2668 if (!fn)
2669 return false;
2671 gimple *repl = gimple_build_call (fn, 2, dest, src);
2672 replace_call_with_call_and_fold (gsi, repl);
2673 return true;
2676 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2677 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2678 length passed as third argument. IGNORE is true if return value can be
2679 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2681 static bool
2682 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2683 tree dest, tree src,
2684 tree len, tree size,
2685 enum built_in_function fcode)
2687 gimple *stmt = gsi_stmt (*gsi);
2688 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2689 tree fn;
2691 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2693 /* If return value of __stpncpy_chk is ignored,
2694 optimize into __strncpy_chk. */
2695 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2696 if (fn)
2698 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2699 replace_call_with_call_and_fold (gsi, repl);
2700 return true;
2704 if (! tree_fits_uhwi_p (size))
2705 return false;
2707 tree maxlen = get_maxval_strlen (len, 2);
2708 if (! integer_all_onesp (size))
2710 if (! tree_fits_uhwi_p (len))
2712 /* If LEN is not constant, try MAXLEN too.
2713 For MAXLEN only allow optimizing into non-_ocs function
2714 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2715 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2716 return false;
2718 else
2719 maxlen = len;
2721 if (tree_int_cst_lt (size, maxlen))
2722 return false;
2725 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2726 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2727 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2728 if (!fn)
2729 return false;
2731 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2732 replace_call_with_call_and_fold (gsi, repl);
2733 return true;
2736 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2737 Return NULL_TREE if no simplification can be made. */
2739 static bool
2740 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2742 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2743 location_t loc = gimple_location (stmt);
2744 tree dest = gimple_call_arg (stmt, 0);
2745 tree src = gimple_call_arg (stmt, 1);
2746 tree fn, len, lenp1;
2748 /* If the result is unused, replace stpcpy with strcpy. */
2749 if (gimple_call_lhs (stmt) == NULL_TREE)
2751 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2752 if (!fn)
2753 return false;
2754 gimple_call_set_fndecl (stmt, fn);
2755 fold_stmt (gsi);
2756 return true;
2759 len = c_strlen (src, 1);
2760 if (!len
2761 || TREE_CODE (len) != INTEGER_CST)
2762 return false;
2764 if (optimize_function_for_size_p (cfun)
2765 /* If length is zero it's small enough. */
2766 && !integer_zerop (len))
2767 return false;
2769 /* If the source has a known length replace stpcpy with memcpy. */
2770 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2771 if (!fn)
2772 return false;
2774 gimple_seq stmts = NULL;
2775 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2776 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2777 tem, build_int_cst (size_type_node, 1));
2778 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2779 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2780 gimple_set_vuse (repl, gimple_vuse (stmt));
2781 gimple_set_vdef (repl, gimple_vdef (stmt));
2782 if (gimple_vdef (repl)
2783 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2784 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2785 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2786 /* Replace the result with dest + len. */
2787 stmts = NULL;
2788 tem = gimple_convert (&stmts, loc, sizetype, len);
2789 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2790 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2791 POINTER_PLUS_EXPR, dest, tem);
2792 gsi_replace (gsi, ret, false);
2793 /* Finally fold the memcpy call. */
2794 gimple_stmt_iterator gsi2 = *gsi;
2795 gsi_prev (&gsi2);
2796 fold_stmt (&gsi2);
2797 return true;
2800 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2801 NULL_TREE if a normal call should be emitted rather than expanding
2802 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2803 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2804 passed as second argument. */
2806 static bool
2807 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2808 enum built_in_function fcode)
2810 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2811 tree dest, size, len, fn, fmt, flag;
2812 const char *fmt_str;
2814 /* Verify the required arguments in the original call. */
2815 if (gimple_call_num_args (stmt) < 5)
2816 return false;
2818 dest = gimple_call_arg (stmt, 0);
2819 len = gimple_call_arg (stmt, 1);
2820 flag = gimple_call_arg (stmt, 2);
2821 size = gimple_call_arg (stmt, 3);
2822 fmt = gimple_call_arg (stmt, 4);
2824 if (! tree_fits_uhwi_p (size))
2825 return false;
2827 if (! integer_all_onesp (size))
2829 tree maxlen = get_maxval_strlen (len, 2);
2830 if (! tree_fits_uhwi_p (len))
2832 /* If LEN is not constant, try MAXLEN too.
2833 For MAXLEN only allow optimizing into non-_ocs function
2834 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2835 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2836 return false;
2838 else
2839 maxlen = len;
2841 if (tree_int_cst_lt (size, maxlen))
2842 return false;
2845 if (!init_target_chars ())
2846 return false;
2848 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2849 or if format doesn't contain % chars or is "%s". */
2850 if (! integer_zerop (flag))
2852 fmt_str = c_getstr (fmt);
2853 if (fmt_str == NULL)
2854 return false;
2855 if (strchr (fmt_str, target_percent) != NULL
2856 && strcmp (fmt_str, target_percent_s))
2857 return false;
2860 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2861 available. */
2862 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2863 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2864 if (!fn)
2865 return false;
2867 /* Replace the called function and the first 5 argument by 3 retaining
2868 trailing varargs. */
2869 gimple_call_set_fndecl (stmt, fn);
2870 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2871 gimple_call_set_arg (stmt, 0, dest);
2872 gimple_call_set_arg (stmt, 1, len);
2873 gimple_call_set_arg (stmt, 2, fmt);
2874 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2875 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2876 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2877 fold_stmt (gsi);
2878 return true;
2881 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2882 Return NULL_TREE if a normal call should be emitted rather than
2883 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2884 or BUILT_IN_VSPRINTF_CHK. */
2886 static bool
2887 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2888 enum built_in_function fcode)
2890 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2891 tree dest, size, len, fn, fmt, flag;
2892 const char *fmt_str;
2893 unsigned nargs = gimple_call_num_args (stmt);
2895 /* Verify the required arguments in the original call. */
2896 if (nargs < 4)
2897 return false;
2898 dest = gimple_call_arg (stmt, 0);
2899 flag = gimple_call_arg (stmt, 1);
2900 size = gimple_call_arg (stmt, 2);
2901 fmt = gimple_call_arg (stmt, 3);
2903 if (! tree_fits_uhwi_p (size))
2904 return false;
2906 len = NULL_TREE;
2908 if (!init_target_chars ())
2909 return false;
2911 /* Check whether the format is a literal string constant. */
2912 fmt_str = c_getstr (fmt);
2913 if (fmt_str != NULL)
2915 /* If the format doesn't contain % args or %%, we know the size. */
2916 if (strchr (fmt_str, target_percent) == 0)
2918 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2919 len = build_int_cstu (size_type_node, strlen (fmt_str));
2921 /* If the format is "%s" and first ... argument is a string literal,
2922 we know the size too. */
2923 else if (fcode == BUILT_IN_SPRINTF_CHK
2924 && strcmp (fmt_str, target_percent_s) == 0)
2926 tree arg;
2928 if (nargs == 5)
2930 arg = gimple_call_arg (stmt, 4);
2931 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2933 len = c_strlen (arg, 1);
2934 if (! len || ! tree_fits_uhwi_p (len))
2935 len = NULL_TREE;
2941 if (! integer_all_onesp (size))
2943 if (! len || ! tree_int_cst_lt (len, size))
2944 return false;
2947 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2948 or if format doesn't contain % chars or is "%s". */
2949 if (! integer_zerop (flag))
2951 if (fmt_str == NULL)
2952 return false;
2953 if (strchr (fmt_str, target_percent) != NULL
2954 && strcmp (fmt_str, target_percent_s))
2955 return false;
2958 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2959 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2960 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2961 if (!fn)
2962 return false;
2964 /* Replace the called function and the first 4 argument by 2 retaining
2965 trailing varargs. */
2966 gimple_call_set_fndecl (stmt, fn);
2967 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2968 gimple_call_set_arg (stmt, 0, dest);
2969 gimple_call_set_arg (stmt, 1, fmt);
2970 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2971 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2972 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2973 fold_stmt (gsi);
2974 return true;
2977 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2978 ORIG may be null if this is a 2-argument call. We don't attempt to
2979 simplify calls with more than 3 arguments.
2981 Return true if simplification was possible, otherwise false. */
2983 bool
2984 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2986 gimple *stmt = gsi_stmt (*gsi);
2987 tree dest = gimple_call_arg (stmt, 0);
2988 tree fmt = gimple_call_arg (stmt, 1);
2989 tree orig = NULL_TREE;
2990 const char *fmt_str = NULL;
2992 /* Verify the required arguments in the original call. We deal with two
2993 types of sprintf() calls: 'sprintf (str, fmt)' and
2994 'sprintf (dest, "%s", orig)'. */
2995 if (gimple_call_num_args (stmt) > 3)
2996 return false;
2998 if (gimple_call_num_args (stmt) == 3)
2999 orig = gimple_call_arg (stmt, 2);
3001 /* Check whether the format is a literal string constant. */
3002 fmt_str = c_getstr (fmt);
3003 if (fmt_str == NULL)
3004 return false;
3006 if (!init_target_chars ())
3007 return false;
3009 /* If the format doesn't contain % args or %%, use strcpy. */
3010 if (strchr (fmt_str, target_percent) == NULL)
3012 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3014 if (!fn)
3015 return false;
3017 /* Don't optimize sprintf (buf, "abc", ptr++). */
3018 if (orig)
3019 return false;
3021 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3022 'format' is known to contain no % formats. */
3023 gimple_seq stmts = NULL;
3024 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3025 gimple_seq_add_stmt_without_update (&stmts, repl);
3026 if (gimple_call_lhs (stmt))
3028 repl = gimple_build_assign (gimple_call_lhs (stmt),
3029 build_int_cst (integer_type_node,
3030 strlen (fmt_str)));
3031 gimple_seq_add_stmt_without_update (&stmts, repl);
3032 gsi_replace_with_seq_vops (gsi, stmts);
3033 /* gsi now points at the assignment to the lhs, get a
3034 stmt iterator to the memcpy call.
3035 ??? We can't use gsi_for_stmt as that doesn't work when the
3036 CFG isn't built yet. */
3037 gimple_stmt_iterator gsi2 = *gsi;
3038 gsi_prev (&gsi2);
3039 fold_stmt (&gsi2);
3041 else
3043 gsi_replace_with_seq_vops (gsi, stmts);
3044 fold_stmt (gsi);
3046 return true;
3049 /* If the format is "%s", use strcpy if the result isn't used. */
3050 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3052 tree fn;
3053 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3055 if (!fn)
3056 return false;
3058 /* Don't crash on sprintf (str1, "%s"). */
3059 if (!orig)
3060 return false;
3062 tree orig_len = NULL_TREE;
3063 if (gimple_call_lhs (stmt))
3065 orig_len = get_maxval_strlen (orig, 0);
3066 if (!orig_len)
3067 return false;
3070 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3071 gimple_seq stmts = NULL;
3072 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3073 gimple_seq_add_stmt_without_update (&stmts, repl);
3074 if (gimple_call_lhs (stmt))
3076 if (!useless_type_conversion_p (integer_type_node,
3077 TREE_TYPE (orig_len)))
3078 orig_len = fold_convert (integer_type_node, orig_len);
3079 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3080 gimple_seq_add_stmt_without_update (&stmts, repl);
3081 gsi_replace_with_seq_vops (gsi, stmts);
3082 /* gsi now points at the assignment to the lhs, get a
3083 stmt iterator to the memcpy call.
3084 ??? We can't use gsi_for_stmt as that doesn't work when the
3085 CFG isn't built yet. */
3086 gimple_stmt_iterator gsi2 = *gsi;
3087 gsi_prev (&gsi2);
3088 fold_stmt (&gsi2);
3090 else
3092 gsi_replace_with_seq_vops (gsi, stmts);
3093 fold_stmt (gsi);
3095 return true;
3097 return false;
3100 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3101 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3102 attempt to simplify calls with more than 4 arguments.
3104 Return true if simplification was possible, otherwise false. */
3106 bool
3107 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3109 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3110 tree dest = gimple_call_arg (stmt, 0);
3111 tree destsize = gimple_call_arg (stmt, 1);
3112 tree fmt = gimple_call_arg (stmt, 2);
3113 tree orig = NULL_TREE;
3114 const char *fmt_str = NULL;
3116 if (gimple_call_num_args (stmt) > 4)
3117 return false;
3119 if (gimple_call_num_args (stmt) == 4)
3120 orig = gimple_call_arg (stmt, 3);
3122 if (!tree_fits_uhwi_p (destsize))
3123 return false;
3124 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3126 /* Check whether the format is a literal string constant. */
3127 fmt_str = c_getstr (fmt);
3128 if (fmt_str == NULL)
3129 return false;
3131 if (!init_target_chars ())
3132 return false;
3134 /* If the format doesn't contain % args or %%, use strcpy. */
3135 if (strchr (fmt_str, target_percent) == NULL)
3137 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3138 if (!fn)
3139 return false;
3141 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3142 if (orig)
3143 return false;
3145 /* We could expand this as
3146 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3147 or to
3148 memcpy (str, fmt_with_nul_at_cstm1, cst);
3149 but in the former case that might increase code size
3150 and in the latter case grow .rodata section too much.
3151 So punt for now. */
3152 size_t len = strlen (fmt_str);
3153 if (len >= destlen)
3154 return false;
3156 gimple_seq stmts = NULL;
3157 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3158 gimple_seq_add_stmt_without_update (&stmts, repl);
3159 if (gimple_call_lhs (stmt))
3161 repl = gimple_build_assign (gimple_call_lhs (stmt),
3162 build_int_cst (integer_type_node, len));
3163 gimple_seq_add_stmt_without_update (&stmts, repl);
3164 gsi_replace_with_seq_vops (gsi, stmts);
3165 /* gsi now points at the assignment to the lhs, get a
3166 stmt iterator to the memcpy call.
3167 ??? We can't use gsi_for_stmt as that doesn't work when the
3168 CFG isn't built yet. */
3169 gimple_stmt_iterator gsi2 = *gsi;
3170 gsi_prev (&gsi2);
3171 fold_stmt (&gsi2);
3173 else
3175 gsi_replace_with_seq_vops (gsi, stmts);
3176 fold_stmt (gsi);
3178 return true;
3181 /* If the format is "%s", use strcpy if the result isn't used. */
3182 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3184 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3185 if (!fn)
3186 return false;
3188 /* Don't crash on snprintf (str1, cst, "%s"). */
3189 if (!orig)
3190 return false;
3192 tree orig_len = get_maxval_strlen (orig, 0);
3193 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3194 return false;
3196 /* We could expand this as
3197 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3198 or to
3199 memcpy (str1, str2_with_nul_at_cstm1, cst);
3200 but in the former case that might increase code size
3201 and in the latter case grow .rodata section too much.
3202 So punt for now. */
3203 if (compare_tree_int (orig_len, destlen) >= 0)
3204 return false;
3206 /* Convert snprintf (str1, cst, "%s", str2) into
3207 strcpy (str1, str2) if strlen (str2) < cst. */
3208 gimple_seq stmts = NULL;
3209 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3210 gimple_seq_add_stmt_without_update (&stmts, repl);
3211 if (gimple_call_lhs (stmt))
3213 if (!useless_type_conversion_p (integer_type_node,
3214 TREE_TYPE (orig_len)))
3215 orig_len = fold_convert (integer_type_node, orig_len);
3216 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3217 gimple_seq_add_stmt_without_update (&stmts, repl);
3218 gsi_replace_with_seq_vops (gsi, stmts);
3219 /* gsi now points at the assignment to the lhs, get a
3220 stmt iterator to the memcpy call.
3221 ??? We can't use gsi_for_stmt as that doesn't work when the
3222 CFG isn't built yet. */
3223 gimple_stmt_iterator gsi2 = *gsi;
3224 gsi_prev (&gsi2);
3225 fold_stmt (&gsi2);
3227 else
3229 gsi_replace_with_seq_vops (gsi, stmts);
3230 fold_stmt (gsi);
3232 return true;
3234 return false;
3237 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3238 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3239 more than 3 arguments, and ARG may be null in the 2-argument case.
3241 Return NULL_TREE if no simplification was possible, otherwise return the
3242 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3243 code of the function to be simplified. */
3245 static bool
3246 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3247 tree fp, tree fmt, tree arg,
3248 enum built_in_function fcode)
3250 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3251 tree fn_fputc, fn_fputs;
3252 const char *fmt_str = NULL;
3254 /* If the return value is used, don't do the transformation. */
3255 if (gimple_call_lhs (stmt) != NULL_TREE)
3256 return false;
3258 /* Check whether the format is a literal string constant. */
3259 fmt_str = c_getstr (fmt);
3260 if (fmt_str == NULL)
3261 return false;
3263 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3265 /* If we're using an unlocked function, assume the other
3266 unlocked functions exist explicitly. */
3267 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3268 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3270 else
3272 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3273 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3276 if (!init_target_chars ())
3277 return false;
3279 /* If the format doesn't contain % args or %%, use strcpy. */
3280 if (strchr (fmt_str, target_percent) == NULL)
3282 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3283 && arg)
3284 return false;
3286 /* If the format specifier was "", fprintf does nothing. */
3287 if (fmt_str[0] == '\0')
3289 replace_call_with_value (gsi, NULL_TREE);
3290 return true;
3293 /* When "string" doesn't contain %, replace all cases of
3294 fprintf (fp, string) with fputs (string, fp). The fputs
3295 builtin will take care of special cases like length == 1. */
3296 if (fn_fputs)
3298 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3299 replace_call_with_call_and_fold (gsi, repl);
3300 return true;
3304 /* The other optimizations can be done only on the non-va_list variants. */
3305 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3306 return false;
3308 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3309 else if (strcmp (fmt_str, target_percent_s) == 0)
3311 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3312 return false;
3313 if (fn_fputs)
3315 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3316 replace_call_with_call_and_fold (gsi, repl);
3317 return true;
3321 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3322 else if (strcmp (fmt_str, target_percent_c) == 0)
3324 if (!arg
3325 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3326 return false;
3327 if (fn_fputc)
3329 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3330 replace_call_with_call_and_fold (gsi, repl);
3331 return true;
3335 return false;
3338 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3339 FMT and ARG are the arguments to the call; we don't fold cases with
3340 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3342 Return NULL_TREE if no simplification was possible, otherwise return the
3343 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3344 code of the function to be simplified. */
3346 static bool
3347 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3348 tree arg, enum built_in_function fcode)
3350 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3351 tree fn_putchar, fn_puts, newarg;
3352 const char *fmt_str = NULL;
3354 /* If the return value is used, don't do the transformation. */
3355 if (gimple_call_lhs (stmt) != NULL_TREE)
3356 return false;
3358 /* Check whether the format is a literal string constant. */
3359 fmt_str = c_getstr (fmt);
3360 if (fmt_str == NULL)
3361 return false;
3363 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3365 /* If we're using an unlocked function, assume the other
3366 unlocked functions exist explicitly. */
3367 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3368 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3370 else
3372 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3373 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3376 if (!init_target_chars ())
3377 return false;
3379 if (strcmp (fmt_str, target_percent_s) == 0
3380 || strchr (fmt_str, target_percent) == NULL)
3382 const char *str;
3384 if (strcmp (fmt_str, target_percent_s) == 0)
3386 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3387 return false;
3389 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3390 return false;
3392 str = c_getstr (arg);
3393 if (str == NULL)
3394 return false;
3396 else
3398 /* The format specifier doesn't contain any '%' characters. */
3399 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3400 && arg)
3401 return false;
3402 str = fmt_str;
3405 /* If the string was "", printf does nothing. */
3406 if (str[0] == '\0')
3408 replace_call_with_value (gsi, NULL_TREE);
3409 return true;
3412 /* If the string has length of 1, call putchar. */
3413 if (str[1] == '\0')
3415 /* Given printf("c"), (where c is any one character,)
3416 convert "c"[0] to an int and pass that to the replacement
3417 function. */
3418 newarg = build_int_cst (integer_type_node, str[0]);
3419 if (fn_putchar)
3421 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3422 replace_call_with_call_and_fold (gsi, repl);
3423 return true;
3426 else
3428 /* If the string was "string\n", call puts("string"). */
3429 size_t len = strlen (str);
3430 if ((unsigned char)str[len - 1] == target_newline
3431 && (size_t) (int) len == len
3432 && (int) len > 0)
3434 char *newstr;
3436 /* Create a NUL-terminated string that's one char shorter
3437 than the original, stripping off the trailing '\n'. */
3438 newstr = xstrdup (str);
3439 newstr[len - 1] = '\0';
3440 newarg = build_string_literal (len, newstr);
3441 free (newstr);
3442 if (fn_puts)
3444 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3445 replace_call_with_call_and_fold (gsi, repl);
3446 return true;
3449 else
3450 /* We'd like to arrange to call fputs(string,stdout) here,
3451 but we need stdout and don't have a way to get it yet. */
3452 return false;
3456 /* The other optimizations can be done only on the non-va_list variants. */
3457 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3458 return false;
3460 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3461 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3463 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3464 return false;
3465 if (fn_puts)
3467 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3468 replace_call_with_call_and_fold (gsi, repl);
3469 return true;
3473 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3474 else if (strcmp (fmt_str, target_percent_c) == 0)
3476 if (!arg || ! useless_type_conversion_p (integer_type_node,
3477 TREE_TYPE (arg)))
3478 return false;
3479 if (fn_putchar)
3481 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3482 replace_call_with_call_and_fold (gsi, repl);
3483 return true;
3487 return false;
3492 /* Fold a call to __builtin_strlen with known length LEN. */
3494 static bool
3495 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3497 gimple *stmt = gsi_stmt (*gsi);
3499 wide_int minlen;
3500 wide_int maxlen;
3502 tree lenrange[2];
3503 if (!get_range_strlen (gimple_call_arg (stmt, 0), lenrange, true)
3504 && lenrange[0] && TREE_CODE (lenrange[0]) == INTEGER_CST
3505 && lenrange[1] && TREE_CODE (lenrange[1]) == INTEGER_CST)
3507 /* The range of lengths refers to either a single constant
3508 string or to the longest and shortest constant string
3509 referenced by the argument of the strlen() call, or to
3510 the strings that can possibly be stored in the arrays
3511 the argument refers to. */
3512 minlen = wi::to_wide (lenrange[0]);
3513 maxlen = wi::to_wide (lenrange[1]);
3515 else
3517 unsigned prec = TYPE_PRECISION (sizetype);
3519 minlen = wi::shwi (0, prec);
3520 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3523 if (minlen == maxlen)
3525 lenrange[0] = force_gimple_operand_gsi (gsi, lenrange[0], true, NULL,
3526 true, GSI_SAME_STMT);
3527 replace_call_with_value (gsi, lenrange[0]);
3528 return true;
3531 if (tree lhs = gimple_call_lhs (stmt))
3532 if (TREE_CODE (lhs) == SSA_NAME
3533 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
3534 set_range_info (lhs, VR_RANGE, minlen, maxlen);
3536 return false;
3539 /* Fold a call to __builtin_acc_on_device. */
3541 static bool
3542 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3544 /* Defer folding until we know which compiler we're in. */
3545 if (symtab->state != EXPANSION)
3546 return false;
3548 unsigned val_host = GOMP_DEVICE_HOST;
3549 unsigned val_dev = GOMP_DEVICE_NONE;
3551 #ifdef ACCEL_COMPILER
3552 val_host = GOMP_DEVICE_NOT_HOST;
3553 val_dev = ACCEL_COMPILER_acc_device;
3554 #endif
3556 location_t loc = gimple_location (gsi_stmt (*gsi));
3558 tree host_eq = make_ssa_name (boolean_type_node);
3559 gimple *host_ass = gimple_build_assign
3560 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3561 gimple_set_location (host_ass, loc);
3562 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3564 tree dev_eq = make_ssa_name (boolean_type_node);
3565 gimple *dev_ass = gimple_build_assign
3566 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3567 gimple_set_location (dev_ass, loc);
3568 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3570 tree result = make_ssa_name (boolean_type_node);
3571 gimple *result_ass = gimple_build_assign
3572 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3573 gimple_set_location (result_ass, loc);
3574 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3576 replace_call_with_value (gsi, result);
3578 return true;
3581 /* Fold realloc (0, n) -> malloc (n). */
3583 static bool
3584 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3586 gimple *stmt = gsi_stmt (*gsi);
3587 tree arg = gimple_call_arg (stmt, 0);
3588 tree size = gimple_call_arg (stmt, 1);
3590 if (operand_equal_p (arg, null_pointer_node, 0))
3592 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3593 if (fn_malloc)
3595 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3596 replace_call_with_call_and_fold (gsi, repl);
3597 return true;
3600 return false;
3603 /* Fold the non-target builtin at *GSI and return whether any simplification
3604 was made. */
3606 static bool
3607 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3609 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3610 tree callee = gimple_call_fndecl (stmt);
3612 /* Give up for always_inline inline builtins until they are
3613 inlined. */
3614 if (avoid_folding_inline_builtin (callee))
3615 return false;
3617 unsigned n = gimple_call_num_args (stmt);
3618 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3619 switch (fcode)
3621 case BUILT_IN_BCMP:
3622 return gimple_fold_builtin_bcmp (gsi);
3623 case BUILT_IN_BCOPY:
3624 return gimple_fold_builtin_bcopy (gsi);
3625 case BUILT_IN_BZERO:
3626 return gimple_fold_builtin_bzero (gsi);
3628 case BUILT_IN_MEMSET:
3629 return gimple_fold_builtin_memset (gsi,
3630 gimple_call_arg (stmt, 1),
3631 gimple_call_arg (stmt, 2));
3632 case BUILT_IN_MEMCPY:
3633 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3634 gimple_call_arg (stmt, 1), 0);
3635 case BUILT_IN_MEMPCPY:
3636 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3637 gimple_call_arg (stmt, 1), 1);
3638 case BUILT_IN_MEMMOVE:
3639 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3640 gimple_call_arg (stmt, 1), 3);
3641 case BUILT_IN_SPRINTF_CHK:
3642 case BUILT_IN_VSPRINTF_CHK:
3643 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3644 case BUILT_IN_STRCAT_CHK:
3645 return gimple_fold_builtin_strcat_chk (gsi);
3646 case BUILT_IN_STRNCAT_CHK:
3647 return gimple_fold_builtin_strncat_chk (gsi);
3648 case BUILT_IN_STRLEN:
3649 return gimple_fold_builtin_strlen (gsi);
3650 case BUILT_IN_STRCPY:
3651 return gimple_fold_builtin_strcpy (gsi,
3652 gimple_call_arg (stmt, 0),
3653 gimple_call_arg (stmt, 1));
3654 case BUILT_IN_STRNCPY:
3655 return gimple_fold_builtin_strncpy (gsi,
3656 gimple_call_arg (stmt, 0),
3657 gimple_call_arg (stmt, 1),
3658 gimple_call_arg (stmt, 2));
3659 case BUILT_IN_STRCAT:
3660 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3661 gimple_call_arg (stmt, 1));
3662 case BUILT_IN_STRNCAT:
3663 return gimple_fold_builtin_strncat (gsi);
3664 case BUILT_IN_INDEX:
3665 case BUILT_IN_STRCHR:
3666 return gimple_fold_builtin_strchr (gsi, false);
3667 case BUILT_IN_RINDEX:
3668 case BUILT_IN_STRRCHR:
3669 return gimple_fold_builtin_strchr (gsi, true);
3670 case BUILT_IN_STRSTR:
3671 return gimple_fold_builtin_strstr (gsi);
3672 case BUILT_IN_STRCMP:
3673 case BUILT_IN_STRCMP_EQ:
3674 case BUILT_IN_STRCASECMP:
3675 case BUILT_IN_STRNCMP:
3676 case BUILT_IN_STRNCMP_EQ:
3677 case BUILT_IN_STRNCASECMP:
3678 return gimple_fold_builtin_string_compare (gsi);
3679 case BUILT_IN_MEMCHR:
3680 return gimple_fold_builtin_memchr (gsi);
3681 case BUILT_IN_FPUTS:
3682 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3683 gimple_call_arg (stmt, 1), false);
3684 case BUILT_IN_FPUTS_UNLOCKED:
3685 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3686 gimple_call_arg (stmt, 1), true);
3687 case BUILT_IN_MEMCPY_CHK:
3688 case BUILT_IN_MEMPCPY_CHK:
3689 case BUILT_IN_MEMMOVE_CHK:
3690 case BUILT_IN_MEMSET_CHK:
3691 return gimple_fold_builtin_memory_chk (gsi,
3692 gimple_call_arg (stmt, 0),
3693 gimple_call_arg (stmt, 1),
3694 gimple_call_arg (stmt, 2),
3695 gimple_call_arg (stmt, 3),
3696 fcode);
3697 case BUILT_IN_STPCPY:
3698 return gimple_fold_builtin_stpcpy (gsi);
3699 case BUILT_IN_STRCPY_CHK:
3700 case BUILT_IN_STPCPY_CHK:
3701 return gimple_fold_builtin_stxcpy_chk (gsi,
3702 gimple_call_arg (stmt, 0),
3703 gimple_call_arg (stmt, 1),
3704 gimple_call_arg (stmt, 2),
3705 fcode);
3706 case BUILT_IN_STRNCPY_CHK:
3707 case BUILT_IN_STPNCPY_CHK:
3708 return gimple_fold_builtin_stxncpy_chk (gsi,
3709 gimple_call_arg (stmt, 0),
3710 gimple_call_arg (stmt, 1),
3711 gimple_call_arg (stmt, 2),
3712 gimple_call_arg (stmt, 3),
3713 fcode);
3714 case BUILT_IN_SNPRINTF_CHK:
3715 case BUILT_IN_VSNPRINTF_CHK:
3716 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3718 case BUILT_IN_FPRINTF:
3719 case BUILT_IN_FPRINTF_UNLOCKED:
3720 case BUILT_IN_VFPRINTF:
3721 if (n == 2 || n == 3)
3722 return gimple_fold_builtin_fprintf (gsi,
3723 gimple_call_arg (stmt, 0),
3724 gimple_call_arg (stmt, 1),
3725 n == 3
3726 ? gimple_call_arg (stmt, 2)
3727 : NULL_TREE,
3728 fcode);
3729 break;
3730 case BUILT_IN_FPRINTF_CHK:
3731 case BUILT_IN_VFPRINTF_CHK:
3732 if (n == 3 || n == 4)
3733 return gimple_fold_builtin_fprintf (gsi,
3734 gimple_call_arg (stmt, 0),
3735 gimple_call_arg (stmt, 2),
3736 n == 4
3737 ? gimple_call_arg (stmt, 3)
3738 : NULL_TREE,
3739 fcode);
3740 break;
3741 case BUILT_IN_PRINTF:
3742 case BUILT_IN_PRINTF_UNLOCKED:
3743 case BUILT_IN_VPRINTF:
3744 if (n == 1 || n == 2)
3745 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3746 n == 2
3747 ? gimple_call_arg (stmt, 1)
3748 : NULL_TREE, fcode);
3749 break;
3750 case BUILT_IN_PRINTF_CHK:
3751 case BUILT_IN_VPRINTF_CHK:
3752 if (n == 2 || n == 3)
3753 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3754 n == 3
3755 ? gimple_call_arg (stmt, 2)
3756 : NULL_TREE, fcode);
3757 break;
3758 case BUILT_IN_ACC_ON_DEVICE:
3759 return gimple_fold_builtin_acc_on_device (gsi,
3760 gimple_call_arg (stmt, 0));
3761 case BUILT_IN_REALLOC:
3762 return gimple_fold_builtin_realloc (gsi);
3764 default:;
3767 /* Try the generic builtin folder. */
3768 bool ignore = (gimple_call_lhs (stmt) == NULL);
3769 tree result = fold_call_stmt (stmt, ignore);
3770 if (result)
3772 if (ignore)
3773 STRIP_NOPS (result);
3774 else
3775 result = fold_convert (gimple_call_return_type (stmt), result);
3776 if (!update_call_from_tree (gsi, result))
3777 gimplify_and_update_call_from_tree (gsi, result);
3778 return true;
3781 return false;
3784 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3785 function calls to constants, where possible. */
3787 static tree
3788 fold_internal_goacc_dim (const gimple *call)
3790 int axis = oacc_get_ifn_dim_arg (call);
3791 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3792 tree result = NULL_TREE;
3793 tree type = TREE_TYPE (gimple_call_lhs (call));
3795 switch (gimple_call_internal_fn (call))
3797 case IFN_GOACC_DIM_POS:
3798 /* If the size is 1, we know the answer. */
3799 if (size == 1)
3800 result = build_int_cst (type, 0);
3801 break;
3802 case IFN_GOACC_DIM_SIZE:
3803 /* If the size is not dynamic, we know the answer. */
3804 if (size)
3805 result = build_int_cst (type, size);
3806 break;
3807 default:
3808 break;
3811 return result;
3814 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3815 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3816 &var where var is only addressable because of such calls. */
3818 bool
3819 optimize_atomic_compare_exchange_p (gimple *stmt)
3821 if (gimple_call_num_args (stmt) != 6
3822 || !flag_inline_atomics
3823 || !optimize
3824 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3825 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3826 || !gimple_vdef (stmt)
3827 || !gimple_vuse (stmt))
3828 return false;
3830 tree fndecl = gimple_call_fndecl (stmt);
3831 switch (DECL_FUNCTION_CODE (fndecl))
3833 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3834 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3835 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3836 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3837 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3838 break;
3839 default:
3840 return false;
3843 tree expected = gimple_call_arg (stmt, 1);
3844 if (TREE_CODE (expected) != ADDR_EXPR
3845 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3846 return false;
3848 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3849 if (!is_gimple_reg_type (etype)
3850 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3851 || TREE_THIS_VOLATILE (etype)
3852 || VECTOR_TYPE_P (etype)
3853 || TREE_CODE (etype) == COMPLEX_TYPE
3854 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3855 might not preserve all the bits. See PR71716. */
3856 || SCALAR_FLOAT_TYPE_P (etype)
3857 || maybe_ne (TYPE_PRECISION (etype),
3858 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3859 return false;
3861 tree weak = gimple_call_arg (stmt, 3);
3862 if (!integer_zerop (weak) && !integer_onep (weak))
3863 return false;
3865 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3866 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3867 machine_mode mode = TYPE_MODE (itype);
3869 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3870 == CODE_FOR_nothing
3871 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3872 return false;
3874 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
3875 return false;
3877 return true;
3880 /* Fold
3881 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3882 into
3883 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3884 i = IMAGPART_EXPR <t>;
3885 r = (_Bool) i;
3886 e = REALPART_EXPR <t>; */
3888 void
3889 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3891 gimple *stmt = gsi_stmt (*gsi);
3892 tree fndecl = gimple_call_fndecl (stmt);
3893 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3894 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3895 tree ctype = build_complex_type (itype);
3896 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3897 bool throws = false;
3898 edge e = NULL;
3899 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3900 expected);
3901 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3902 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3903 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3905 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3906 build1 (VIEW_CONVERT_EXPR, itype,
3907 gimple_assign_lhs (g)));
3908 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3910 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3911 + int_size_in_bytes (itype);
3912 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3913 gimple_call_arg (stmt, 0),
3914 gimple_assign_lhs (g),
3915 gimple_call_arg (stmt, 2),
3916 build_int_cst (integer_type_node, flag),
3917 gimple_call_arg (stmt, 4),
3918 gimple_call_arg (stmt, 5));
3919 tree lhs = make_ssa_name (ctype);
3920 gimple_call_set_lhs (g, lhs);
3921 gimple_set_vdef (g, gimple_vdef (stmt));
3922 gimple_set_vuse (g, gimple_vuse (stmt));
3923 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3924 tree oldlhs = gimple_call_lhs (stmt);
3925 if (stmt_can_throw_internal (stmt))
3927 throws = true;
3928 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3930 gimple_call_set_nothrow (as_a <gcall *> (g),
3931 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3932 gimple_call_set_lhs (stmt, NULL_TREE);
3933 gsi_replace (gsi, g, true);
3934 if (oldlhs)
3936 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3937 build1 (IMAGPART_EXPR, itype, lhs));
3938 if (throws)
3940 gsi_insert_on_edge_immediate (e, g);
3941 *gsi = gsi_for_stmt (g);
3943 else
3944 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3945 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3946 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3948 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3949 build1 (REALPART_EXPR, itype, lhs));
3950 if (throws && oldlhs == NULL_TREE)
3952 gsi_insert_on_edge_immediate (e, g);
3953 *gsi = gsi_for_stmt (g);
3955 else
3956 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3957 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3959 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3960 VIEW_CONVERT_EXPR,
3961 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3962 gimple_assign_lhs (g)));
3963 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3965 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3966 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3967 *gsi = gsiret;
3970 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3971 doesn't fit into TYPE. The test for overflow should be regardless of
3972 -fwrapv, and even for unsigned types. */
3974 bool
3975 arith_overflowed_p (enum tree_code code, const_tree type,
3976 const_tree arg0, const_tree arg1)
3978 widest2_int warg0 = widest2_int_cst (arg0);
3979 widest2_int warg1 = widest2_int_cst (arg1);
3980 widest2_int wres;
3981 switch (code)
3983 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3984 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3985 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3986 default: gcc_unreachable ();
3988 signop sign = TYPE_SIGN (type);
3989 if (sign == UNSIGNED && wi::neg_p (wres))
3990 return true;
3991 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3994 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3995 The statement may be replaced by another statement, e.g., if the call
3996 simplifies to a constant value. Return true if any changes were made.
3997 It is assumed that the operands have been previously folded. */
3999 static bool
4000 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4002 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4003 tree callee;
4004 bool changed = false;
4005 unsigned i;
4007 /* Fold *& in call arguments. */
4008 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4009 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4011 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4012 if (tmp)
4014 gimple_call_set_arg (stmt, i, tmp);
4015 changed = true;
4019 /* Check for virtual calls that became direct calls. */
4020 callee = gimple_call_fn (stmt);
4021 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4023 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4025 if (dump_file && virtual_method_call_p (callee)
4026 && !possible_polymorphic_call_target_p
4027 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4028 (OBJ_TYPE_REF_EXPR (callee)))))
4030 fprintf (dump_file,
4031 "Type inheritance inconsistent devirtualization of ");
4032 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4033 fprintf (dump_file, " to ");
4034 print_generic_expr (dump_file, callee, TDF_SLIM);
4035 fprintf (dump_file, "\n");
4038 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4039 changed = true;
4041 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4043 bool final;
4044 vec <cgraph_node *>targets
4045 = possible_polymorphic_call_targets (callee, stmt, &final);
4046 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4048 tree lhs = gimple_call_lhs (stmt);
4049 if (dump_enabled_p ())
4051 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4052 "folding virtual function call to %s\n",
4053 targets.length () == 1
4054 ? targets[0]->name ()
4055 : "__builtin_unreachable");
4057 if (targets.length () == 1)
4059 tree fndecl = targets[0]->decl;
4060 gimple_call_set_fndecl (stmt, fndecl);
4061 changed = true;
4062 /* If changing the call to __cxa_pure_virtual
4063 or similar noreturn function, adjust gimple_call_fntype
4064 too. */
4065 if (gimple_call_noreturn_p (stmt)
4066 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4067 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4068 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4069 == void_type_node))
4070 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4071 /* If the call becomes noreturn, remove the lhs. */
4072 if (lhs
4073 && gimple_call_noreturn_p (stmt)
4074 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4075 || should_remove_lhs_p (lhs)))
4077 if (TREE_CODE (lhs) == SSA_NAME)
4079 tree var = create_tmp_var (TREE_TYPE (lhs));
4080 tree def = get_or_create_ssa_default_def (cfun, var);
4081 gimple *new_stmt = gimple_build_assign (lhs, def);
4082 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4084 gimple_call_set_lhs (stmt, NULL_TREE);
4086 maybe_remove_unused_call_args (cfun, stmt);
4088 else
4090 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4091 gimple *new_stmt = gimple_build_call (fndecl, 0);
4092 gimple_set_location (new_stmt, gimple_location (stmt));
4093 /* If the call had a SSA name as lhs morph that into
4094 an uninitialized value. */
4095 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4097 tree var = create_tmp_var (TREE_TYPE (lhs));
4098 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4099 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4100 set_ssa_default_def (cfun, var, lhs);
4102 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4103 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4104 gsi_replace (gsi, new_stmt, false);
4105 return true;
4111 /* Check for indirect calls that became direct calls, and then
4112 no longer require a static chain. */
4113 if (gimple_call_chain (stmt))
4115 tree fn = gimple_call_fndecl (stmt);
4116 if (fn && !DECL_STATIC_CHAIN (fn))
4118 gimple_call_set_chain (stmt, NULL);
4119 changed = true;
4121 else
4123 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4124 if (tmp)
4126 gimple_call_set_chain (stmt, tmp);
4127 changed = true;
4132 if (inplace)
4133 return changed;
4135 /* Check for builtins that CCP can handle using information not
4136 available in the generic fold routines. */
4137 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4139 if (gimple_fold_builtin (gsi))
4140 changed = true;
4142 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4144 changed |= targetm.gimple_fold_builtin (gsi);
4146 else if (gimple_call_internal_p (stmt))
4148 enum tree_code subcode = ERROR_MARK;
4149 tree result = NULL_TREE;
4150 bool cplx_result = false;
4151 tree overflow = NULL_TREE;
4152 switch (gimple_call_internal_fn (stmt))
4154 case IFN_BUILTIN_EXPECT:
4155 result = fold_builtin_expect (gimple_location (stmt),
4156 gimple_call_arg (stmt, 0),
4157 gimple_call_arg (stmt, 1),
4158 gimple_call_arg (stmt, 2));
4159 break;
4160 case IFN_UBSAN_OBJECT_SIZE:
4162 tree offset = gimple_call_arg (stmt, 1);
4163 tree objsize = gimple_call_arg (stmt, 2);
4164 if (integer_all_onesp (objsize)
4165 || (TREE_CODE (offset) == INTEGER_CST
4166 && TREE_CODE (objsize) == INTEGER_CST
4167 && tree_int_cst_le (offset, objsize)))
4169 replace_call_with_value (gsi, NULL_TREE);
4170 return true;
4173 break;
4174 case IFN_UBSAN_PTR:
4175 if (integer_zerop (gimple_call_arg (stmt, 1)))
4177 replace_call_with_value (gsi, NULL_TREE);
4178 return true;
4180 break;
4181 case IFN_UBSAN_BOUNDS:
4183 tree index = gimple_call_arg (stmt, 1);
4184 tree bound = gimple_call_arg (stmt, 2);
4185 if (TREE_CODE (index) == INTEGER_CST
4186 && TREE_CODE (bound) == INTEGER_CST)
4188 index = fold_convert (TREE_TYPE (bound), index);
4189 if (TREE_CODE (index) == INTEGER_CST
4190 && tree_int_cst_le (index, bound))
4192 replace_call_with_value (gsi, NULL_TREE);
4193 return true;
4197 break;
4198 case IFN_GOACC_DIM_SIZE:
4199 case IFN_GOACC_DIM_POS:
4200 result = fold_internal_goacc_dim (stmt);
4201 break;
4202 case IFN_UBSAN_CHECK_ADD:
4203 subcode = PLUS_EXPR;
4204 break;
4205 case IFN_UBSAN_CHECK_SUB:
4206 subcode = MINUS_EXPR;
4207 break;
4208 case IFN_UBSAN_CHECK_MUL:
4209 subcode = MULT_EXPR;
4210 break;
4211 case IFN_ADD_OVERFLOW:
4212 subcode = PLUS_EXPR;
4213 cplx_result = true;
4214 break;
4215 case IFN_SUB_OVERFLOW:
4216 subcode = MINUS_EXPR;
4217 cplx_result = true;
4218 break;
4219 case IFN_MUL_OVERFLOW:
4220 subcode = MULT_EXPR;
4221 cplx_result = true;
4222 break;
4223 default:
4224 break;
4226 if (subcode != ERROR_MARK)
4228 tree arg0 = gimple_call_arg (stmt, 0);
4229 tree arg1 = gimple_call_arg (stmt, 1);
4230 tree type = TREE_TYPE (arg0);
4231 if (cplx_result)
4233 tree lhs = gimple_call_lhs (stmt);
4234 if (lhs == NULL_TREE)
4235 type = NULL_TREE;
4236 else
4237 type = TREE_TYPE (TREE_TYPE (lhs));
4239 if (type == NULL_TREE)
4241 /* x = y + 0; x = y - 0; x = y * 0; */
4242 else if (integer_zerop (arg1))
4243 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4244 /* x = 0 + y; x = 0 * y; */
4245 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4246 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4247 /* x = y - y; */
4248 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4249 result = integer_zero_node;
4250 /* x = y * 1; x = 1 * y; */
4251 else if (subcode == MULT_EXPR && integer_onep (arg1))
4252 result = arg0;
4253 else if (subcode == MULT_EXPR && integer_onep (arg0))
4254 result = arg1;
4255 else if (TREE_CODE (arg0) == INTEGER_CST
4256 && TREE_CODE (arg1) == INTEGER_CST)
4258 if (cplx_result)
4259 result = int_const_binop (subcode, fold_convert (type, arg0),
4260 fold_convert (type, arg1));
4261 else
4262 result = int_const_binop (subcode, arg0, arg1);
4263 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4265 if (cplx_result)
4266 overflow = build_one_cst (type);
4267 else
4268 result = NULL_TREE;
4271 if (result)
4273 if (result == integer_zero_node)
4274 result = build_zero_cst (type);
4275 else if (cplx_result && TREE_TYPE (result) != type)
4277 if (TREE_CODE (result) == INTEGER_CST)
4279 if (arith_overflowed_p (PLUS_EXPR, type, result,
4280 integer_zero_node))
4281 overflow = build_one_cst (type);
4283 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4284 && TYPE_UNSIGNED (type))
4285 || (TYPE_PRECISION (type)
4286 < (TYPE_PRECISION (TREE_TYPE (result))
4287 + (TYPE_UNSIGNED (TREE_TYPE (result))
4288 && !TYPE_UNSIGNED (type)))))
4289 result = NULL_TREE;
4290 if (result)
4291 result = fold_convert (type, result);
4296 if (result)
4298 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4299 result = drop_tree_overflow (result);
4300 if (cplx_result)
4302 if (overflow == NULL_TREE)
4303 overflow = build_zero_cst (TREE_TYPE (result));
4304 tree ctype = build_complex_type (TREE_TYPE (result));
4305 if (TREE_CODE (result) == INTEGER_CST
4306 && TREE_CODE (overflow) == INTEGER_CST)
4307 result = build_complex (ctype, result, overflow);
4308 else
4309 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4310 ctype, result, overflow);
4312 if (!update_call_from_tree (gsi, result))
4313 gimplify_and_update_call_from_tree (gsi, result);
4314 changed = true;
4318 return changed;
4322 /* Return true whether NAME has a use on STMT. */
4324 static bool
4325 has_use_on_stmt (tree name, gimple *stmt)
4327 imm_use_iterator iter;
4328 use_operand_p use_p;
4329 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4330 if (USE_STMT (use_p) == stmt)
4331 return true;
4332 return false;
4335 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4336 gimple_simplify.
4338 Replaces *GSI with the simplification result in RCODE and OPS
4339 and the associated statements in *SEQ. Does the replacement
4340 according to INPLACE and returns true if the operation succeeded. */
4342 static bool
4343 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4344 gimple_match_op *res_op,
4345 gimple_seq *seq, bool inplace)
4347 gimple *stmt = gsi_stmt (*gsi);
4348 tree *ops = res_op->ops;
4349 unsigned int num_ops = res_op->num_ops;
4351 /* Play safe and do not allow abnormals to be mentioned in
4352 newly created statements. See also maybe_push_res_to_seq.
4353 As an exception allow such uses if there was a use of the
4354 same SSA name on the old stmt. */
4355 for (unsigned int i = 0; i < num_ops; ++i)
4356 if (TREE_CODE (ops[i]) == SSA_NAME
4357 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4358 && !has_use_on_stmt (ops[i], stmt))
4359 return false;
4361 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4362 for (unsigned int i = 0; i < 2; ++i)
4363 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4364 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4365 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4366 return false;
4368 /* Don't insert new statements when INPLACE is true, even if we could
4369 reuse STMT for the final statement. */
4370 if (inplace && !gimple_seq_empty_p (*seq))
4371 return false;
4373 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4375 gcc_assert (res_op->code.is_tree_code ());
4376 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4377 /* GIMPLE_CONDs condition may not throw. */
4378 && (!flag_exceptions
4379 || !cfun->can_throw_non_call_exceptions
4380 || !operation_could_trap_p (res_op->code,
4381 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4382 false, NULL_TREE)))
4383 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4384 else if (res_op->code == SSA_NAME)
4385 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4386 build_zero_cst (TREE_TYPE (ops[0])));
4387 else if (res_op->code == INTEGER_CST)
4389 if (integer_zerop (ops[0]))
4390 gimple_cond_make_false (cond_stmt);
4391 else
4392 gimple_cond_make_true (cond_stmt);
4394 else if (!inplace)
4396 tree res = maybe_push_res_to_seq (res_op, seq);
4397 if (!res)
4398 return false;
4399 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4400 build_zero_cst (TREE_TYPE (res)));
4402 else
4403 return false;
4404 if (dump_file && (dump_flags & TDF_DETAILS))
4406 fprintf (dump_file, "gimple_simplified to ");
4407 if (!gimple_seq_empty_p (*seq))
4408 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4409 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4410 0, TDF_SLIM);
4412 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4413 return true;
4415 else if (is_gimple_assign (stmt)
4416 && res_op->code.is_tree_code ())
4418 if (!inplace
4419 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4421 maybe_build_generic_op (res_op);
4422 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4423 res_op->op_or_null (0),
4424 res_op->op_or_null (1),
4425 res_op->op_or_null (2));
4426 if (dump_file && (dump_flags & TDF_DETAILS))
4428 fprintf (dump_file, "gimple_simplified to ");
4429 if (!gimple_seq_empty_p (*seq))
4430 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4431 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4432 0, TDF_SLIM);
4434 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4435 return true;
4438 else if (res_op->code.is_fn_code ()
4439 && gimple_call_combined_fn (stmt) == res_op->code)
4441 gcc_assert (num_ops == gimple_call_num_args (stmt));
4442 for (unsigned int i = 0; i < num_ops; ++i)
4443 gimple_call_set_arg (stmt, i, ops[i]);
4444 if (dump_file && (dump_flags & TDF_DETAILS))
4446 fprintf (dump_file, "gimple_simplified to ");
4447 if (!gimple_seq_empty_p (*seq))
4448 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4449 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4451 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4452 return true;
4454 else if (!inplace)
4456 if (gimple_has_lhs (stmt))
4458 tree lhs = gimple_get_lhs (stmt);
4459 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4460 return false;
4461 if (dump_file && (dump_flags & TDF_DETAILS))
4463 fprintf (dump_file, "gimple_simplified to ");
4464 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4466 gsi_replace_with_seq_vops (gsi, *seq);
4467 return true;
4469 else
4470 gcc_unreachable ();
4473 return false;
4476 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4478 static bool
4479 maybe_canonicalize_mem_ref_addr (tree *t)
4481 bool res = false;
4483 if (TREE_CODE (*t) == ADDR_EXPR)
4484 t = &TREE_OPERAND (*t, 0);
4486 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4487 generic vector extension. The actual vector referenced is
4488 view-converted to an array type for this purpose. If the index
4489 is constant the canonical representation in the middle-end is a
4490 BIT_FIELD_REF so re-write the former to the latter here. */
4491 if (TREE_CODE (*t) == ARRAY_REF
4492 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4493 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4494 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4496 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4497 if (VECTOR_TYPE_P (vtype))
4499 tree low = array_ref_low_bound (*t);
4500 if (TREE_CODE (low) == INTEGER_CST)
4502 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4504 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4505 wi::to_widest (low));
4506 idx = wi::mul (idx, wi::to_widest
4507 (TYPE_SIZE (TREE_TYPE (*t))));
4508 widest_int ext
4509 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4510 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4512 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4513 TREE_TYPE (*t),
4514 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4515 TYPE_SIZE (TREE_TYPE (*t)),
4516 wide_int_to_tree (bitsizetype, idx));
4517 res = true;
4524 while (handled_component_p (*t))
4525 t = &TREE_OPERAND (*t, 0);
4527 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4528 of invariant addresses into a SSA name MEM_REF address. */
4529 if (TREE_CODE (*t) == MEM_REF
4530 || TREE_CODE (*t) == TARGET_MEM_REF)
4532 tree addr = TREE_OPERAND (*t, 0);
4533 if (TREE_CODE (addr) == ADDR_EXPR
4534 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4535 || handled_component_p (TREE_OPERAND (addr, 0))))
4537 tree base;
4538 poly_int64 coffset;
4539 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4540 &coffset);
4541 if (!base)
4542 gcc_unreachable ();
4544 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4545 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4546 TREE_OPERAND (*t, 1),
4547 size_int (coffset));
4548 res = true;
4550 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4551 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4554 /* Canonicalize back MEM_REFs to plain reference trees if the object
4555 accessed is a decl that has the same access semantics as the MEM_REF. */
4556 if (TREE_CODE (*t) == MEM_REF
4557 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4558 && integer_zerop (TREE_OPERAND (*t, 1))
4559 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4561 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4562 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4563 if (/* Same volatile qualification. */
4564 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4565 /* Same TBAA behavior with -fstrict-aliasing. */
4566 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4567 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4568 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4569 /* Same alignment. */
4570 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4571 /* We have to look out here to not drop a required conversion
4572 from the rhs to the lhs if *t appears on the lhs or vice-versa
4573 if it appears on the rhs. Thus require strict type
4574 compatibility. */
4575 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4577 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4578 res = true;
4582 /* Canonicalize TARGET_MEM_REF in particular with respect to
4583 the indexes becoming constant. */
4584 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4586 tree tem = maybe_fold_tmr (*t);
4587 if (tem)
4589 *t = tem;
4590 res = true;
4594 return res;
4597 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4598 distinguishes both cases. */
4600 static bool
4601 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4603 bool changed = false;
4604 gimple *stmt = gsi_stmt (*gsi);
4605 bool nowarning = gimple_no_warning_p (stmt);
4606 unsigned i;
4607 fold_defer_overflow_warnings ();
4609 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4610 after propagation.
4611 ??? This shouldn't be done in generic folding but in the
4612 propagation helpers which also know whether an address was
4613 propagated.
4614 Also canonicalize operand order. */
4615 switch (gimple_code (stmt))
4617 case GIMPLE_ASSIGN:
4618 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4620 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4621 if ((REFERENCE_CLASS_P (*rhs)
4622 || TREE_CODE (*rhs) == ADDR_EXPR)
4623 && maybe_canonicalize_mem_ref_addr (rhs))
4624 changed = true;
4625 tree *lhs = gimple_assign_lhs_ptr (stmt);
4626 if (REFERENCE_CLASS_P (*lhs)
4627 && maybe_canonicalize_mem_ref_addr (lhs))
4628 changed = true;
4630 else
4632 /* Canonicalize operand order. */
4633 enum tree_code code = gimple_assign_rhs_code (stmt);
4634 if (TREE_CODE_CLASS (code) == tcc_comparison
4635 || commutative_tree_code (code)
4636 || commutative_ternary_tree_code (code))
4638 tree rhs1 = gimple_assign_rhs1 (stmt);
4639 tree rhs2 = gimple_assign_rhs2 (stmt);
4640 if (tree_swap_operands_p (rhs1, rhs2))
4642 gimple_assign_set_rhs1 (stmt, rhs2);
4643 gimple_assign_set_rhs2 (stmt, rhs1);
4644 if (TREE_CODE_CLASS (code) == tcc_comparison)
4645 gimple_assign_set_rhs_code (stmt,
4646 swap_tree_comparison (code));
4647 changed = true;
4651 break;
4652 case GIMPLE_CALL:
4654 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4656 tree *arg = gimple_call_arg_ptr (stmt, i);
4657 if (REFERENCE_CLASS_P (*arg)
4658 && maybe_canonicalize_mem_ref_addr (arg))
4659 changed = true;
4661 tree *lhs = gimple_call_lhs_ptr (stmt);
4662 if (*lhs
4663 && REFERENCE_CLASS_P (*lhs)
4664 && maybe_canonicalize_mem_ref_addr (lhs))
4665 changed = true;
4666 break;
4668 case GIMPLE_ASM:
4670 gasm *asm_stmt = as_a <gasm *> (stmt);
4671 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4673 tree link = gimple_asm_output_op (asm_stmt, i);
4674 tree op = TREE_VALUE (link);
4675 if (REFERENCE_CLASS_P (op)
4676 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4677 changed = true;
4679 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4681 tree link = gimple_asm_input_op (asm_stmt, i);
4682 tree op = TREE_VALUE (link);
4683 if ((REFERENCE_CLASS_P (op)
4684 || TREE_CODE (op) == ADDR_EXPR)
4685 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4686 changed = true;
4689 break;
4690 case GIMPLE_DEBUG:
4691 if (gimple_debug_bind_p (stmt))
4693 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4694 if (*val
4695 && (REFERENCE_CLASS_P (*val)
4696 || TREE_CODE (*val) == ADDR_EXPR)
4697 && maybe_canonicalize_mem_ref_addr (val))
4698 changed = true;
4700 break;
4701 case GIMPLE_COND:
4703 /* Canonicalize operand order. */
4704 tree lhs = gimple_cond_lhs (stmt);
4705 tree rhs = gimple_cond_rhs (stmt);
4706 if (tree_swap_operands_p (lhs, rhs))
4708 gcond *gc = as_a <gcond *> (stmt);
4709 gimple_cond_set_lhs (gc, rhs);
4710 gimple_cond_set_rhs (gc, lhs);
4711 gimple_cond_set_code (gc,
4712 swap_tree_comparison (gimple_cond_code (gc)));
4713 changed = true;
4716 default:;
4719 /* Dispatch to pattern-based folding. */
4720 if (!inplace
4721 || is_gimple_assign (stmt)
4722 || gimple_code (stmt) == GIMPLE_COND)
4724 gimple_seq seq = NULL;
4725 gimple_match_op res_op;
4726 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
4727 valueize, valueize))
4729 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
4730 changed = true;
4731 else
4732 gimple_seq_discard (seq);
4736 stmt = gsi_stmt (*gsi);
4738 /* Fold the main computation performed by the statement. */
4739 switch (gimple_code (stmt))
4741 case GIMPLE_ASSIGN:
4743 /* Try to canonicalize for boolean-typed X the comparisons
4744 X == 0, X == 1, X != 0, and X != 1. */
4745 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4746 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4748 tree lhs = gimple_assign_lhs (stmt);
4749 tree op1 = gimple_assign_rhs1 (stmt);
4750 tree op2 = gimple_assign_rhs2 (stmt);
4751 tree type = TREE_TYPE (op1);
4753 /* Check whether the comparison operands are of the same boolean
4754 type as the result type is.
4755 Check that second operand is an integer-constant with value
4756 one or zero. */
4757 if (TREE_CODE (op2) == INTEGER_CST
4758 && (integer_zerop (op2) || integer_onep (op2))
4759 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4761 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4762 bool is_logical_not = false;
4764 /* X == 0 and X != 1 is a logical-not.of X
4765 X == 1 and X != 0 is X */
4766 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4767 || (cmp_code == NE_EXPR && integer_onep (op2)))
4768 is_logical_not = true;
4770 if (is_logical_not == false)
4771 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4772 /* Only for one-bit precision typed X the transformation
4773 !X -> ~X is valied. */
4774 else if (TYPE_PRECISION (type) == 1)
4775 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4776 /* Otherwise we use !X -> X ^ 1. */
4777 else
4778 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4779 build_int_cst (type, 1));
4780 changed = true;
4781 break;
4785 unsigned old_num_ops = gimple_num_ops (stmt);
4786 tree lhs = gimple_assign_lhs (stmt);
4787 tree new_rhs = fold_gimple_assign (gsi);
4788 if (new_rhs
4789 && !useless_type_conversion_p (TREE_TYPE (lhs),
4790 TREE_TYPE (new_rhs)))
4791 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4792 if (new_rhs
4793 && (!inplace
4794 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4796 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4797 changed = true;
4799 break;
4802 case GIMPLE_CALL:
4803 changed |= gimple_fold_call (gsi, inplace);
4804 break;
4806 case GIMPLE_ASM:
4807 /* Fold *& in asm operands. */
4809 gasm *asm_stmt = as_a <gasm *> (stmt);
4810 size_t noutputs;
4811 const char **oconstraints;
4812 const char *constraint;
4813 bool allows_mem, allows_reg;
4815 noutputs = gimple_asm_noutputs (asm_stmt);
4816 oconstraints = XALLOCAVEC (const char *, noutputs);
4818 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4820 tree link = gimple_asm_output_op (asm_stmt, i);
4821 tree op = TREE_VALUE (link);
4822 oconstraints[i]
4823 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4824 if (REFERENCE_CLASS_P (op)
4825 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4827 TREE_VALUE (link) = op;
4828 changed = true;
4831 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4833 tree link = gimple_asm_input_op (asm_stmt, i);
4834 tree op = TREE_VALUE (link);
4835 constraint
4836 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4837 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4838 oconstraints, &allows_mem, &allows_reg);
4839 if (REFERENCE_CLASS_P (op)
4840 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4841 != NULL_TREE)
4843 TREE_VALUE (link) = op;
4844 changed = true;
4848 break;
4850 case GIMPLE_DEBUG:
4851 if (gimple_debug_bind_p (stmt))
4853 tree val = gimple_debug_bind_get_value (stmt);
4854 if (val
4855 && REFERENCE_CLASS_P (val))
4857 tree tem = maybe_fold_reference (val, false);
4858 if (tem)
4860 gimple_debug_bind_set_value (stmt, tem);
4861 changed = true;
4864 else if (val
4865 && TREE_CODE (val) == ADDR_EXPR)
4867 tree ref = TREE_OPERAND (val, 0);
4868 tree tem = maybe_fold_reference (ref, false);
4869 if (tem)
4871 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4872 gimple_debug_bind_set_value (stmt, tem);
4873 changed = true;
4877 break;
4879 case GIMPLE_RETURN:
4881 greturn *ret_stmt = as_a<greturn *> (stmt);
4882 tree ret = gimple_return_retval(ret_stmt);
4884 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4886 tree val = valueize (ret);
4887 if (val && val != ret
4888 && may_propagate_copy (ret, val))
4890 gimple_return_set_retval (ret_stmt, val);
4891 changed = true;
4895 break;
4897 default:;
4900 stmt = gsi_stmt (*gsi);
4902 /* Fold *& on the lhs. */
4903 if (gimple_has_lhs (stmt))
4905 tree lhs = gimple_get_lhs (stmt);
4906 if (lhs && REFERENCE_CLASS_P (lhs))
4908 tree new_lhs = maybe_fold_reference (lhs, true);
4909 if (new_lhs)
4911 gimple_set_lhs (stmt, new_lhs);
4912 changed = true;
4917 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4918 return changed;
4921 /* Valueziation callback that ends up not following SSA edges. */
4923 tree
4924 no_follow_ssa_edges (tree)
4926 return NULL_TREE;
4929 /* Valueization callback that ends up following single-use SSA edges only. */
4931 tree
4932 follow_single_use_edges (tree val)
4934 if (TREE_CODE (val) == SSA_NAME
4935 && !has_single_use (val))
4936 return NULL_TREE;
4937 return val;
4940 /* Valueization callback that follows all SSA edges. */
4942 tree
4943 follow_all_ssa_edges (tree val)
4945 return val;
4948 /* Fold the statement pointed to by GSI. In some cases, this function may
4949 replace the whole statement with a new one. Returns true iff folding
4950 makes any changes.
4951 The statement pointed to by GSI should be in valid gimple form but may
4952 be in unfolded state as resulting from for example constant propagation
4953 which can produce *&x = 0. */
4955 bool
4956 fold_stmt (gimple_stmt_iterator *gsi)
4958 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4961 bool
4962 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4964 return fold_stmt_1 (gsi, false, valueize);
4967 /* Perform the minimal folding on statement *GSI. Only operations like
4968 *&x created by constant propagation are handled. The statement cannot
4969 be replaced with a new one. Return true if the statement was
4970 changed, false otherwise.
4971 The statement *GSI should be in valid gimple form but may
4972 be in unfolded state as resulting from for example constant propagation
4973 which can produce *&x = 0. */
4975 bool
4976 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4978 gimple *stmt = gsi_stmt (*gsi);
4979 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4980 gcc_assert (gsi_stmt (*gsi) == stmt);
4981 return changed;
4984 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4985 if EXPR is null or we don't know how.
4986 If non-null, the result always has boolean type. */
4988 static tree
4989 canonicalize_bool (tree expr, bool invert)
4991 if (!expr)
4992 return NULL_TREE;
4993 else if (invert)
4995 if (integer_nonzerop (expr))
4996 return boolean_false_node;
4997 else if (integer_zerop (expr))
4998 return boolean_true_node;
4999 else if (TREE_CODE (expr) == SSA_NAME)
5000 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5001 build_int_cst (TREE_TYPE (expr), 0));
5002 else if (COMPARISON_CLASS_P (expr))
5003 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5004 boolean_type_node,
5005 TREE_OPERAND (expr, 0),
5006 TREE_OPERAND (expr, 1));
5007 else
5008 return NULL_TREE;
5010 else
5012 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5013 return expr;
5014 if (integer_nonzerop (expr))
5015 return boolean_true_node;
5016 else if (integer_zerop (expr))
5017 return boolean_false_node;
5018 else if (TREE_CODE (expr) == SSA_NAME)
5019 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5020 build_int_cst (TREE_TYPE (expr), 0));
5021 else if (COMPARISON_CLASS_P (expr))
5022 return fold_build2 (TREE_CODE (expr),
5023 boolean_type_node,
5024 TREE_OPERAND (expr, 0),
5025 TREE_OPERAND (expr, 1));
5026 else
5027 return NULL_TREE;
5031 /* Check to see if a boolean expression EXPR is logically equivalent to the
5032 comparison (OP1 CODE OP2). Check for various identities involving
5033 SSA_NAMEs. */
5035 static bool
5036 same_bool_comparison_p (const_tree expr, enum tree_code code,
5037 const_tree op1, const_tree op2)
5039 gimple *s;
5041 /* The obvious case. */
5042 if (TREE_CODE (expr) == code
5043 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5044 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5045 return true;
5047 /* Check for comparing (name, name != 0) and the case where expr
5048 is an SSA_NAME with a definition matching the comparison. */
5049 if (TREE_CODE (expr) == SSA_NAME
5050 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5052 if (operand_equal_p (expr, op1, 0))
5053 return ((code == NE_EXPR && integer_zerop (op2))
5054 || (code == EQ_EXPR && integer_nonzerop (op2)));
5055 s = SSA_NAME_DEF_STMT (expr);
5056 if (is_gimple_assign (s)
5057 && gimple_assign_rhs_code (s) == code
5058 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5059 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5060 return true;
5063 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5064 of name is a comparison, recurse. */
5065 if (TREE_CODE (op1) == SSA_NAME
5066 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5068 s = SSA_NAME_DEF_STMT (op1);
5069 if (is_gimple_assign (s)
5070 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5072 enum tree_code c = gimple_assign_rhs_code (s);
5073 if ((c == NE_EXPR && integer_zerop (op2))
5074 || (c == EQ_EXPR && integer_nonzerop (op2)))
5075 return same_bool_comparison_p (expr, c,
5076 gimple_assign_rhs1 (s),
5077 gimple_assign_rhs2 (s));
5078 if ((c == EQ_EXPR && integer_zerop (op2))
5079 || (c == NE_EXPR && integer_nonzerop (op2)))
5080 return same_bool_comparison_p (expr,
5081 invert_tree_comparison (c, false),
5082 gimple_assign_rhs1 (s),
5083 gimple_assign_rhs2 (s));
5086 return false;
5089 /* Check to see if two boolean expressions OP1 and OP2 are logically
5090 equivalent. */
5092 static bool
5093 same_bool_result_p (const_tree op1, const_tree op2)
5095 /* Simple cases first. */
5096 if (operand_equal_p (op1, op2, 0))
5097 return true;
5099 /* Check the cases where at least one of the operands is a comparison.
5100 These are a bit smarter than operand_equal_p in that they apply some
5101 identifies on SSA_NAMEs. */
5102 if (COMPARISON_CLASS_P (op2)
5103 && same_bool_comparison_p (op1, TREE_CODE (op2),
5104 TREE_OPERAND (op2, 0),
5105 TREE_OPERAND (op2, 1)))
5106 return true;
5107 if (COMPARISON_CLASS_P (op1)
5108 && same_bool_comparison_p (op2, TREE_CODE (op1),
5109 TREE_OPERAND (op1, 0),
5110 TREE_OPERAND (op1, 1)))
5111 return true;
5113 /* Default case. */
5114 return false;
5117 /* Forward declarations for some mutually recursive functions. */
5119 static tree
5120 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5121 enum tree_code code2, tree op2a, tree op2b);
5122 static tree
5123 and_var_with_comparison (tree var, bool invert,
5124 enum tree_code code2, tree op2a, tree op2b);
5125 static tree
5126 and_var_with_comparison_1 (gimple *stmt,
5127 enum tree_code code2, tree op2a, tree op2b);
5128 static tree
5129 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5130 enum tree_code code2, tree op2a, tree op2b);
5131 static tree
5132 or_var_with_comparison (tree var, bool invert,
5133 enum tree_code code2, tree op2a, tree op2b);
5134 static tree
5135 or_var_with_comparison_1 (gimple *stmt,
5136 enum tree_code code2, tree op2a, tree op2b);
5138 /* Helper function for and_comparisons_1: try to simplify the AND of the
5139 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5140 If INVERT is true, invert the value of the VAR before doing the AND.
5141 Return NULL_EXPR if we can't simplify this to a single expression. */
5143 static tree
5144 and_var_with_comparison (tree var, bool invert,
5145 enum tree_code code2, tree op2a, tree op2b)
5147 tree t;
5148 gimple *stmt = SSA_NAME_DEF_STMT (var);
5150 /* We can only deal with variables whose definitions are assignments. */
5151 if (!is_gimple_assign (stmt))
5152 return NULL_TREE;
5154 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5155 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5156 Then we only have to consider the simpler non-inverted cases. */
5157 if (invert)
5158 t = or_var_with_comparison_1 (stmt,
5159 invert_tree_comparison (code2, false),
5160 op2a, op2b);
5161 else
5162 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5163 return canonicalize_bool (t, invert);
5166 /* Try to simplify the AND of the ssa variable defined by the assignment
5167 STMT with the comparison specified by (OP2A CODE2 OP2B).
5168 Return NULL_EXPR if we can't simplify this to a single expression. */
5170 static tree
5171 and_var_with_comparison_1 (gimple *stmt,
5172 enum tree_code code2, tree op2a, tree op2b)
5174 tree var = gimple_assign_lhs (stmt);
5175 tree true_test_var = NULL_TREE;
5176 tree false_test_var = NULL_TREE;
5177 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5179 /* Check for identities like (var AND (var == 0)) => false. */
5180 if (TREE_CODE (op2a) == SSA_NAME
5181 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5183 if ((code2 == NE_EXPR && integer_zerop (op2b))
5184 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5186 true_test_var = op2a;
5187 if (var == true_test_var)
5188 return var;
5190 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5191 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5193 false_test_var = op2a;
5194 if (var == false_test_var)
5195 return boolean_false_node;
5199 /* If the definition is a comparison, recurse on it. */
5200 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5202 tree t = and_comparisons_1 (innercode,
5203 gimple_assign_rhs1 (stmt),
5204 gimple_assign_rhs2 (stmt),
5205 code2,
5206 op2a,
5207 op2b);
5208 if (t)
5209 return t;
5212 /* If the definition is an AND or OR expression, we may be able to
5213 simplify by reassociating. */
5214 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5215 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5217 tree inner1 = gimple_assign_rhs1 (stmt);
5218 tree inner2 = gimple_assign_rhs2 (stmt);
5219 gimple *s;
5220 tree t;
5221 tree partial = NULL_TREE;
5222 bool is_and = (innercode == BIT_AND_EXPR);
5224 /* Check for boolean identities that don't require recursive examination
5225 of inner1/inner2:
5226 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5227 inner1 AND (inner1 OR inner2) => inner1
5228 !inner1 AND (inner1 AND inner2) => false
5229 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5230 Likewise for similar cases involving inner2. */
5231 if (inner1 == true_test_var)
5232 return (is_and ? var : inner1);
5233 else if (inner2 == true_test_var)
5234 return (is_and ? var : inner2);
5235 else if (inner1 == false_test_var)
5236 return (is_and
5237 ? boolean_false_node
5238 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5239 else if (inner2 == false_test_var)
5240 return (is_and
5241 ? boolean_false_node
5242 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5244 /* Next, redistribute/reassociate the AND across the inner tests.
5245 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5246 if (TREE_CODE (inner1) == SSA_NAME
5247 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5248 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5249 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5250 gimple_assign_rhs1 (s),
5251 gimple_assign_rhs2 (s),
5252 code2, op2a, op2b)))
5254 /* Handle the AND case, where we are reassociating:
5255 (inner1 AND inner2) AND (op2a code2 op2b)
5256 => (t AND inner2)
5257 If the partial result t is a constant, we win. Otherwise
5258 continue on to try reassociating with the other inner test. */
5259 if (is_and)
5261 if (integer_onep (t))
5262 return inner2;
5263 else if (integer_zerop (t))
5264 return boolean_false_node;
5267 /* Handle the OR case, where we are redistributing:
5268 (inner1 OR inner2) AND (op2a code2 op2b)
5269 => (t OR (inner2 AND (op2a code2 op2b))) */
5270 else if (integer_onep (t))
5271 return boolean_true_node;
5273 /* Save partial result for later. */
5274 partial = t;
5277 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5278 if (TREE_CODE (inner2) == SSA_NAME
5279 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5280 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5281 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5282 gimple_assign_rhs1 (s),
5283 gimple_assign_rhs2 (s),
5284 code2, op2a, op2b)))
5286 /* Handle the AND case, where we are reassociating:
5287 (inner1 AND inner2) AND (op2a code2 op2b)
5288 => (inner1 AND t) */
5289 if (is_and)
5291 if (integer_onep (t))
5292 return inner1;
5293 else if (integer_zerop (t))
5294 return boolean_false_node;
5295 /* If both are the same, we can apply the identity
5296 (x AND x) == x. */
5297 else if (partial && same_bool_result_p (t, partial))
5298 return t;
5301 /* Handle the OR case. where we are redistributing:
5302 (inner1 OR inner2) AND (op2a code2 op2b)
5303 => (t OR (inner1 AND (op2a code2 op2b)))
5304 => (t OR partial) */
5305 else
5307 if (integer_onep (t))
5308 return boolean_true_node;
5309 else if (partial)
5311 /* We already got a simplification for the other
5312 operand to the redistributed OR expression. The
5313 interesting case is when at least one is false.
5314 Or, if both are the same, we can apply the identity
5315 (x OR x) == x. */
5316 if (integer_zerop (partial))
5317 return t;
5318 else if (integer_zerop (t))
5319 return partial;
5320 else if (same_bool_result_p (t, partial))
5321 return t;
5326 return NULL_TREE;
5329 /* Try to simplify the AND of two comparisons defined by
5330 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5331 If this can be done without constructing an intermediate value,
5332 return the resulting tree; otherwise NULL_TREE is returned.
5333 This function is deliberately asymmetric as it recurses on SSA_DEFs
5334 in the first comparison but not the second. */
5336 static tree
5337 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5338 enum tree_code code2, tree op2a, tree op2b)
5340 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5342 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5343 if (operand_equal_p (op1a, op2a, 0)
5344 && operand_equal_p (op1b, op2b, 0))
5346 /* Result will be either NULL_TREE, or a combined comparison. */
5347 tree t = combine_comparisons (UNKNOWN_LOCATION,
5348 TRUTH_ANDIF_EXPR, code1, code2,
5349 truth_type, op1a, op1b);
5350 if (t)
5351 return t;
5354 /* Likewise the swapped case of the above. */
5355 if (operand_equal_p (op1a, op2b, 0)
5356 && operand_equal_p (op1b, op2a, 0))
5358 /* Result will be either NULL_TREE, or a combined comparison. */
5359 tree t = combine_comparisons (UNKNOWN_LOCATION,
5360 TRUTH_ANDIF_EXPR, code1,
5361 swap_tree_comparison (code2),
5362 truth_type, op1a, op1b);
5363 if (t)
5364 return t;
5367 /* If both comparisons are of the same value against constants, we might
5368 be able to merge them. */
5369 if (operand_equal_p (op1a, op2a, 0)
5370 && TREE_CODE (op1b) == INTEGER_CST
5371 && TREE_CODE (op2b) == INTEGER_CST)
5373 int cmp = tree_int_cst_compare (op1b, op2b);
5375 /* If we have (op1a == op1b), we should either be able to
5376 return that or FALSE, depending on whether the constant op1b
5377 also satisfies the other comparison against op2b. */
5378 if (code1 == EQ_EXPR)
5380 bool done = true;
5381 bool val;
5382 switch (code2)
5384 case EQ_EXPR: val = (cmp == 0); break;
5385 case NE_EXPR: val = (cmp != 0); break;
5386 case LT_EXPR: val = (cmp < 0); break;
5387 case GT_EXPR: val = (cmp > 0); break;
5388 case LE_EXPR: val = (cmp <= 0); break;
5389 case GE_EXPR: val = (cmp >= 0); break;
5390 default: done = false;
5392 if (done)
5394 if (val)
5395 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5396 else
5397 return boolean_false_node;
5400 /* Likewise if the second comparison is an == comparison. */
5401 else if (code2 == EQ_EXPR)
5403 bool done = true;
5404 bool val;
5405 switch (code1)
5407 case EQ_EXPR: val = (cmp == 0); break;
5408 case NE_EXPR: val = (cmp != 0); break;
5409 case LT_EXPR: val = (cmp > 0); break;
5410 case GT_EXPR: val = (cmp < 0); break;
5411 case LE_EXPR: val = (cmp >= 0); break;
5412 case GE_EXPR: val = (cmp <= 0); break;
5413 default: done = false;
5415 if (done)
5417 if (val)
5418 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5419 else
5420 return boolean_false_node;
5424 /* Same business with inequality tests. */
5425 else if (code1 == NE_EXPR)
5427 bool val;
5428 switch (code2)
5430 case EQ_EXPR: val = (cmp != 0); break;
5431 case NE_EXPR: val = (cmp == 0); break;
5432 case LT_EXPR: val = (cmp >= 0); break;
5433 case GT_EXPR: val = (cmp <= 0); break;
5434 case LE_EXPR: val = (cmp > 0); break;
5435 case GE_EXPR: val = (cmp < 0); break;
5436 default:
5437 val = false;
5439 if (val)
5440 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5442 else if (code2 == NE_EXPR)
5444 bool val;
5445 switch (code1)
5447 case EQ_EXPR: val = (cmp == 0); break;
5448 case NE_EXPR: val = (cmp != 0); break;
5449 case LT_EXPR: val = (cmp <= 0); break;
5450 case GT_EXPR: val = (cmp >= 0); break;
5451 case LE_EXPR: val = (cmp < 0); break;
5452 case GE_EXPR: val = (cmp > 0); break;
5453 default:
5454 val = false;
5456 if (val)
5457 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5460 /* Chose the more restrictive of two < or <= comparisons. */
5461 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5462 && (code2 == LT_EXPR || code2 == LE_EXPR))
5464 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5465 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5466 else
5467 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5470 /* Likewise chose the more restrictive of two > or >= comparisons. */
5471 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5472 && (code2 == GT_EXPR || code2 == GE_EXPR))
5474 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5475 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5476 else
5477 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5480 /* Check for singleton ranges. */
5481 else if (cmp == 0
5482 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5483 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5484 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5486 /* Check for disjoint ranges. */
5487 else if (cmp <= 0
5488 && (code1 == LT_EXPR || code1 == LE_EXPR)
5489 && (code2 == GT_EXPR || code2 == GE_EXPR))
5490 return boolean_false_node;
5491 else if (cmp >= 0
5492 && (code1 == GT_EXPR || code1 == GE_EXPR)
5493 && (code2 == LT_EXPR || code2 == LE_EXPR))
5494 return boolean_false_node;
5497 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5498 NAME's definition is a truth value. See if there are any simplifications
5499 that can be done against the NAME's definition. */
5500 if (TREE_CODE (op1a) == SSA_NAME
5501 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5502 && (integer_zerop (op1b) || integer_onep (op1b)))
5504 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5505 || (code1 == NE_EXPR && integer_onep (op1b)));
5506 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5507 switch (gimple_code (stmt))
5509 case GIMPLE_ASSIGN:
5510 /* Try to simplify by copy-propagating the definition. */
5511 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5513 case GIMPLE_PHI:
5514 /* If every argument to the PHI produces the same result when
5515 ANDed with the second comparison, we win.
5516 Do not do this unless the type is bool since we need a bool
5517 result here anyway. */
5518 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5520 tree result = NULL_TREE;
5521 unsigned i;
5522 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5524 tree arg = gimple_phi_arg_def (stmt, i);
5526 /* If this PHI has itself as an argument, ignore it.
5527 If all the other args produce the same result,
5528 we're still OK. */
5529 if (arg == gimple_phi_result (stmt))
5530 continue;
5531 else if (TREE_CODE (arg) == INTEGER_CST)
5533 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5535 if (!result)
5536 result = boolean_false_node;
5537 else if (!integer_zerop (result))
5538 return NULL_TREE;
5540 else if (!result)
5541 result = fold_build2 (code2, boolean_type_node,
5542 op2a, op2b);
5543 else if (!same_bool_comparison_p (result,
5544 code2, op2a, op2b))
5545 return NULL_TREE;
5547 else if (TREE_CODE (arg) == SSA_NAME
5548 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5550 tree temp;
5551 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5552 /* In simple cases we can look through PHI nodes,
5553 but we have to be careful with loops.
5554 See PR49073. */
5555 if (! dom_info_available_p (CDI_DOMINATORS)
5556 || gimple_bb (def_stmt) == gimple_bb (stmt)
5557 || dominated_by_p (CDI_DOMINATORS,
5558 gimple_bb (def_stmt),
5559 gimple_bb (stmt)))
5560 return NULL_TREE;
5561 temp = and_var_with_comparison (arg, invert, code2,
5562 op2a, op2b);
5563 if (!temp)
5564 return NULL_TREE;
5565 else if (!result)
5566 result = temp;
5567 else if (!same_bool_result_p (result, temp))
5568 return NULL_TREE;
5570 else
5571 return NULL_TREE;
5573 return result;
5576 default:
5577 break;
5580 return NULL_TREE;
5583 /* Try to simplify the AND of two comparisons, specified by
5584 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5585 If this can be simplified to a single expression (without requiring
5586 introducing more SSA variables to hold intermediate values),
5587 return the resulting tree. Otherwise return NULL_TREE.
5588 If the result expression is non-null, it has boolean type. */
5590 tree
5591 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5592 enum tree_code code2, tree op2a, tree op2b)
5594 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5595 if (t)
5596 return t;
5597 else
5598 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5601 /* Helper function for or_comparisons_1: try to simplify the OR of the
5602 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5603 If INVERT is true, invert the value of VAR before doing the OR.
5604 Return NULL_EXPR if we can't simplify this to a single expression. */
5606 static tree
5607 or_var_with_comparison (tree var, bool invert,
5608 enum tree_code code2, tree op2a, tree op2b)
5610 tree t;
5611 gimple *stmt = SSA_NAME_DEF_STMT (var);
5613 /* We can only deal with variables whose definitions are assignments. */
5614 if (!is_gimple_assign (stmt))
5615 return NULL_TREE;
5617 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5618 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5619 Then we only have to consider the simpler non-inverted cases. */
5620 if (invert)
5621 t = and_var_with_comparison_1 (stmt,
5622 invert_tree_comparison (code2, false),
5623 op2a, op2b);
5624 else
5625 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5626 return canonicalize_bool (t, invert);
5629 /* Try to simplify the OR of the ssa variable defined by the assignment
5630 STMT with the comparison specified by (OP2A CODE2 OP2B).
5631 Return NULL_EXPR if we can't simplify this to a single expression. */
5633 static tree
5634 or_var_with_comparison_1 (gimple *stmt,
5635 enum tree_code code2, tree op2a, tree op2b)
5637 tree var = gimple_assign_lhs (stmt);
5638 tree true_test_var = NULL_TREE;
5639 tree false_test_var = NULL_TREE;
5640 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5642 /* Check for identities like (var OR (var != 0)) => true . */
5643 if (TREE_CODE (op2a) == SSA_NAME
5644 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5646 if ((code2 == NE_EXPR && integer_zerop (op2b))
5647 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5649 true_test_var = op2a;
5650 if (var == true_test_var)
5651 return var;
5653 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5654 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5656 false_test_var = op2a;
5657 if (var == false_test_var)
5658 return boolean_true_node;
5662 /* If the definition is a comparison, recurse on it. */
5663 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5665 tree t = or_comparisons_1 (innercode,
5666 gimple_assign_rhs1 (stmt),
5667 gimple_assign_rhs2 (stmt),
5668 code2,
5669 op2a,
5670 op2b);
5671 if (t)
5672 return t;
5675 /* If the definition is an AND or OR expression, we may be able to
5676 simplify by reassociating. */
5677 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5678 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5680 tree inner1 = gimple_assign_rhs1 (stmt);
5681 tree inner2 = gimple_assign_rhs2 (stmt);
5682 gimple *s;
5683 tree t;
5684 tree partial = NULL_TREE;
5685 bool is_or = (innercode == BIT_IOR_EXPR);
5687 /* Check for boolean identities that don't require recursive examination
5688 of inner1/inner2:
5689 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5690 inner1 OR (inner1 AND inner2) => inner1
5691 !inner1 OR (inner1 OR inner2) => true
5692 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5694 if (inner1 == true_test_var)
5695 return (is_or ? var : inner1);
5696 else if (inner2 == true_test_var)
5697 return (is_or ? var : inner2);
5698 else if (inner1 == false_test_var)
5699 return (is_or
5700 ? boolean_true_node
5701 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5702 else if (inner2 == false_test_var)
5703 return (is_or
5704 ? boolean_true_node
5705 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5707 /* Next, redistribute/reassociate the OR across the inner tests.
5708 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5709 if (TREE_CODE (inner1) == SSA_NAME
5710 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5711 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5712 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5713 gimple_assign_rhs1 (s),
5714 gimple_assign_rhs2 (s),
5715 code2, op2a, op2b)))
5717 /* Handle the OR case, where we are reassociating:
5718 (inner1 OR inner2) OR (op2a code2 op2b)
5719 => (t OR inner2)
5720 If the partial result t is a constant, we win. Otherwise
5721 continue on to try reassociating with the other inner test. */
5722 if (is_or)
5724 if (integer_onep (t))
5725 return boolean_true_node;
5726 else if (integer_zerop (t))
5727 return inner2;
5730 /* Handle the AND case, where we are redistributing:
5731 (inner1 AND inner2) OR (op2a code2 op2b)
5732 => (t AND (inner2 OR (op2a code op2b))) */
5733 else if (integer_zerop (t))
5734 return boolean_false_node;
5736 /* Save partial result for later. */
5737 partial = t;
5740 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5741 if (TREE_CODE (inner2) == SSA_NAME
5742 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5743 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5744 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5745 gimple_assign_rhs1 (s),
5746 gimple_assign_rhs2 (s),
5747 code2, op2a, op2b)))
5749 /* Handle the OR case, where we are reassociating:
5750 (inner1 OR inner2) OR (op2a code2 op2b)
5751 => (inner1 OR t)
5752 => (t OR partial) */
5753 if (is_or)
5755 if (integer_zerop (t))
5756 return inner1;
5757 else if (integer_onep (t))
5758 return boolean_true_node;
5759 /* If both are the same, we can apply the identity
5760 (x OR x) == x. */
5761 else if (partial && same_bool_result_p (t, partial))
5762 return t;
5765 /* Handle the AND case, where we are redistributing:
5766 (inner1 AND inner2) OR (op2a code2 op2b)
5767 => (t AND (inner1 OR (op2a code2 op2b)))
5768 => (t AND partial) */
5769 else
5771 if (integer_zerop (t))
5772 return boolean_false_node;
5773 else if (partial)
5775 /* We already got a simplification for the other
5776 operand to the redistributed AND expression. The
5777 interesting case is when at least one is true.
5778 Or, if both are the same, we can apply the identity
5779 (x AND x) == x. */
5780 if (integer_onep (partial))
5781 return t;
5782 else if (integer_onep (t))
5783 return partial;
5784 else if (same_bool_result_p (t, partial))
5785 return t;
5790 return NULL_TREE;
5793 /* Try to simplify the OR of two comparisons defined by
5794 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5795 If this can be done without constructing an intermediate value,
5796 return the resulting tree; otherwise NULL_TREE is returned.
5797 This function is deliberately asymmetric as it recurses on SSA_DEFs
5798 in the first comparison but not the second. */
5800 static tree
5801 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5802 enum tree_code code2, tree op2a, tree op2b)
5804 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5806 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5807 if (operand_equal_p (op1a, op2a, 0)
5808 && operand_equal_p (op1b, op2b, 0))
5810 /* Result will be either NULL_TREE, or a combined comparison. */
5811 tree t = combine_comparisons (UNKNOWN_LOCATION,
5812 TRUTH_ORIF_EXPR, code1, code2,
5813 truth_type, op1a, op1b);
5814 if (t)
5815 return t;
5818 /* Likewise the swapped case of the above. */
5819 if (operand_equal_p (op1a, op2b, 0)
5820 && operand_equal_p (op1b, op2a, 0))
5822 /* Result will be either NULL_TREE, or a combined comparison. */
5823 tree t = combine_comparisons (UNKNOWN_LOCATION,
5824 TRUTH_ORIF_EXPR, code1,
5825 swap_tree_comparison (code2),
5826 truth_type, op1a, op1b);
5827 if (t)
5828 return t;
5831 /* If both comparisons are of the same value against constants, we might
5832 be able to merge them. */
5833 if (operand_equal_p (op1a, op2a, 0)
5834 && TREE_CODE (op1b) == INTEGER_CST
5835 && TREE_CODE (op2b) == INTEGER_CST)
5837 int cmp = tree_int_cst_compare (op1b, op2b);
5839 /* If we have (op1a != op1b), we should either be able to
5840 return that or TRUE, depending on whether the constant op1b
5841 also satisfies the other comparison against op2b. */
5842 if (code1 == NE_EXPR)
5844 bool done = true;
5845 bool val;
5846 switch (code2)
5848 case EQ_EXPR: val = (cmp == 0); break;
5849 case NE_EXPR: val = (cmp != 0); break;
5850 case LT_EXPR: val = (cmp < 0); break;
5851 case GT_EXPR: val = (cmp > 0); break;
5852 case LE_EXPR: val = (cmp <= 0); break;
5853 case GE_EXPR: val = (cmp >= 0); break;
5854 default: done = false;
5856 if (done)
5858 if (val)
5859 return boolean_true_node;
5860 else
5861 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5864 /* Likewise if the second comparison is a != comparison. */
5865 else if (code2 == NE_EXPR)
5867 bool done = true;
5868 bool val;
5869 switch (code1)
5871 case EQ_EXPR: val = (cmp == 0); break;
5872 case NE_EXPR: val = (cmp != 0); break;
5873 case LT_EXPR: val = (cmp > 0); break;
5874 case GT_EXPR: val = (cmp < 0); break;
5875 case LE_EXPR: val = (cmp >= 0); break;
5876 case GE_EXPR: val = (cmp <= 0); break;
5877 default: done = false;
5879 if (done)
5881 if (val)
5882 return boolean_true_node;
5883 else
5884 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5888 /* See if an equality test is redundant with the other comparison. */
5889 else if (code1 == EQ_EXPR)
5891 bool val;
5892 switch (code2)
5894 case EQ_EXPR: val = (cmp == 0); break;
5895 case NE_EXPR: val = (cmp != 0); break;
5896 case LT_EXPR: val = (cmp < 0); break;
5897 case GT_EXPR: val = (cmp > 0); break;
5898 case LE_EXPR: val = (cmp <= 0); break;
5899 case GE_EXPR: val = (cmp >= 0); break;
5900 default:
5901 val = false;
5903 if (val)
5904 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5906 else if (code2 == EQ_EXPR)
5908 bool val;
5909 switch (code1)
5911 case EQ_EXPR: val = (cmp == 0); break;
5912 case NE_EXPR: val = (cmp != 0); break;
5913 case LT_EXPR: val = (cmp > 0); break;
5914 case GT_EXPR: val = (cmp < 0); break;
5915 case LE_EXPR: val = (cmp >= 0); break;
5916 case GE_EXPR: val = (cmp <= 0); break;
5917 default:
5918 val = false;
5920 if (val)
5921 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5924 /* Chose the less restrictive of two < or <= comparisons. */
5925 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5926 && (code2 == LT_EXPR || code2 == LE_EXPR))
5928 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5929 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5930 else
5931 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5934 /* Likewise chose the less restrictive of two > or >= comparisons. */
5935 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5936 && (code2 == GT_EXPR || code2 == GE_EXPR))
5938 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5939 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5940 else
5941 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5944 /* Check for singleton ranges. */
5945 else if (cmp == 0
5946 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5947 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5948 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5950 /* Check for less/greater pairs that don't restrict the range at all. */
5951 else if (cmp >= 0
5952 && (code1 == LT_EXPR || code1 == LE_EXPR)
5953 && (code2 == GT_EXPR || code2 == GE_EXPR))
5954 return boolean_true_node;
5955 else if (cmp <= 0
5956 && (code1 == GT_EXPR || code1 == GE_EXPR)
5957 && (code2 == LT_EXPR || code2 == LE_EXPR))
5958 return boolean_true_node;
5961 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5962 NAME's definition is a truth value. See if there are any simplifications
5963 that can be done against the NAME's definition. */
5964 if (TREE_CODE (op1a) == SSA_NAME
5965 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5966 && (integer_zerop (op1b) || integer_onep (op1b)))
5968 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5969 || (code1 == NE_EXPR && integer_onep (op1b)));
5970 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5971 switch (gimple_code (stmt))
5973 case GIMPLE_ASSIGN:
5974 /* Try to simplify by copy-propagating the definition. */
5975 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5977 case GIMPLE_PHI:
5978 /* If every argument to the PHI produces the same result when
5979 ORed with the second comparison, we win.
5980 Do not do this unless the type is bool since we need a bool
5981 result here anyway. */
5982 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5984 tree result = NULL_TREE;
5985 unsigned i;
5986 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5988 tree arg = gimple_phi_arg_def (stmt, i);
5990 /* If this PHI has itself as an argument, ignore it.
5991 If all the other args produce the same result,
5992 we're still OK. */
5993 if (arg == gimple_phi_result (stmt))
5994 continue;
5995 else if (TREE_CODE (arg) == INTEGER_CST)
5997 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5999 if (!result)
6000 result = boolean_true_node;
6001 else if (!integer_onep (result))
6002 return NULL_TREE;
6004 else if (!result)
6005 result = fold_build2 (code2, boolean_type_node,
6006 op2a, op2b);
6007 else if (!same_bool_comparison_p (result,
6008 code2, op2a, op2b))
6009 return NULL_TREE;
6011 else if (TREE_CODE (arg) == SSA_NAME
6012 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6014 tree temp;
6015 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6016 /* In simple cases we can look through PHI nodes,
6017 but we have to be careful with loops.
6018 See PR49073. */
6019 if (! dom_info_available_p (CDI_DOMINATORS)
6020 || gimple_bb (def_stmt) == gimple_bb (stmt)
6021 || dominated_by_p (CDI_DOMINATORS,
6022 gimple_bb (def_stmt),
6023 gimple_bb (stmt)))
6024 return NULL_TREE;
6025 temp = or_var_with_comparison (arg, invert, code2,
6026 op2a, op2b);
6027 if (!temp)
6028 return NULL_TREE;
6029 else if (!result)
6030 result = temp;
6031 else if (!same_bool_result_p (result, temp))
6032 return NULL_TREE;
6034 else
6035 return NULL_TREE;
6037 return result;
6040 default:
6041 break;
6044 return NULL_TREE;
6047 /* Try to simplify the OR of two comparisons, specified by
6048 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6049 If this can be simplified to a single expression (without requiring
6050 introducing more SSA variables to hold intermediate values),
6051 return the resulting tree. Otherwise return NULL_TREE.
6052 If the result expression is non-null, it has boolean type. */
6054 tree
6055 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6056 enum tree_code code2, tree op2a, tree op2b)
6058 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6059 if (t)
6060 return t;
6061 else
6062 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6066 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6068 Either NULL_TREE, a simplified but non-constant or a constant
6069 is returned.
6071 ??? This should go into a gimple-fold-inline.h file to be eventually
6072 privatized with the single valueize function used in the various TUs
6073 to avoid the indirect function call overhead. */
6075 tree
6076 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6077 tree (*gvalueize) (tree))
6079 gimple_match_op res_op;
6080 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6081 edges if there are intermediate VARYING defs. For this reason
6082 do not follow SSA edges here even though SCCVN can technically
6083 just deal fine with that. */
6084 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6086 tree res = NULL_TREE;
6087 if (gimple_simplified_result_is_gimple_val (&res_op))
6088 res = res_op.ops[0];
6089 else if (mprts_hook)
6090 res = mprts_hook (&res_op);
6091 if (res)
6093 if (dump_file && dump_flags & TDF_DETAILS)
6095 fprintf (dump_file, "Match-and-simplified ");
6096 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6097 fprintf (dump_file, " to ");
6098 print_generic_expr (dump_file, res);
6099 fprintf (dump_file, "\n");
6101 return res;
6105 location_t loc = gimple_location (stmt);
6106 switch (gimple_code (stmt))
6108 case GIMPLE_ASSIGN:
6110 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6112 switch (get_gimple_rhs_class (subcode))
6114 case GIMPLE_SINGLE_RHS:
6116 tree rhs = gimple_assign_rhs1 (stmt);
6117 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6119 if (TREE_CODE (rhs) == SSA_NAME)
6121 /* If the RHS is an SSA_NAME, return its known constant value,
6122 if any. */
6123 return (*valueize) (rhs);
6125 /* Handle propagating invariant addresses into address
6126 operations. */
6127 else if (TREE_CODE (rhs) == ADDR_EXPR
6128 && !is_gimple_min_invariant (rhs))
6130 poly_int64 offset = 0;
6131 tree base;
6132 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6133 &offset,
6134 valueize);
6135 if (base
6136 && (CONSTANT_CLASS_P (base)
6137 || decl_address_invariant_p (base)))
6138 return build_invariant_address (TREE_TYPE (rhs),
6139 base, offset);
6141 else if (TREE_CODE (rhs) == CONSTRUCTOR
6142 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6143 && known_eq (CONSTRUCTOR_NELTS (rhs),
6144 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6146 unsigned i, nelts;
6147 tree val;
6149 nelts = CONSTRUCTOR_NELTS (rhs);
6150 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6151 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6153 val = (*valueize) (val);
6154 if (TREE_CODE (val) == INTEGER_CST
6155 || TREE_CODE (val) == REAL_CST
6156 || TREE_CODE (val) == FIXED_CST)
6157 vec.quick_push (val);
6158 else
6159 return NULL_TREE;
6162 return vec.build ();
6164 if (subcode == OBJ_TYPE_REF)
6166 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6167 /* If callee is constant, we can fold away the wrapper. */
6168 if (is_gimple_min_invariant (val))
6169 return val;
6172 if (kind == tcc_reference)
6174 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6175 || TREE_CODE (rhs) == REALPART_EXPR
6176 || TREE_CODE (rhs) == IMAGPART_EXPR)
6177 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6179 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6180 return fold_unary_loc (EXPR_LOCATION (rhs),
6181 TREE_CODE (rhs),
6182 TREE_TYPE (rhs), val);
6184 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6185 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6187 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6188 return fold_ternary_loc (EXPR_LOCATION (rhs),
6189 TREE_CODE (rhs),
6190 TREE_TYPE (rhs), val,
6191 TREE_OPERAND (rhs, 1),
6192 TREE_OPERAND (rhs, 2));
6194 else if (TREE_CODE (rhs) == MEM_REF
6195 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6197 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6198 if (TREE_CODE (val) == ADDR_EXPR
6199 && is_gimple_min_invariant (val))
6201 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6202 unshare_expr (val),
6203 TREE_OPERAND (rhs, 1));
6204 if (tem)
6205 rhs = tem;
6208 return fold_const_aggregate_ref_1 (rhs, valueize);
6210 else if (kind == tcc_declaration)
6211 return get_symbol_constant_value (rhs);
6212 return rhs;
6215 case GIMPLE_UNARY_RHS:
6216 return NULL_TREE;
6218 case GIMPLE_BINARY_RHS:
6219 /* Translate &x + CST into an invariant form suitable for
6220 further propagation. */
6221 if (subcode == POINTER_PLUS_EXPR)
6223 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6224 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6225 if (TREE_CODE (op0) == ADDR_EXPR
6226 && TREE_CODE (op1) == INTEGER_CST)
6228 tree off = fold_convert (ptr_type_node, op1);
6229 return build_fold_addr_expr_loc
6230 (loc,
6231 fold_build2 (MEM_REF,
6232 TREE_TYPE (TREE_TYPE (op0)),
6233 unshare_expr (op0), off));
6236 /* Canonicalize bool != 0 and bool == 0 appearing after
6237 valueization. While gimple_simplify handles this
6238 it can get confused by the ~X == 1 -> X == 0 transform
6239 which we cant reduce to a SSA name or a constant
6240 (and we have no way to tell gimple_simplify to not
6241 consider those transforms in the first place). */
6242 else if (subcode == EQ_EXPR
6243 || subcode == NE_EXPR)
6245 tree lhs = gimple_assign_lhs (stmt);
6246 tree op0 = gimple_assign_rhs1 (stmt);
6247 if (useless_type_conversion_p (TREE_TYPE (lhs),
6248 TREE_TYPE (op0)))
6250 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6251 op0 = (*valueize) (op0);
6252 if (TREE_CODE (op0) == INTEGER_CST)
6253 std::swap (op0, op1);
6254 if (TREE_CODE (op1) == INTEGER_CST
6255 && ((subcode == NE_EXPR && integer_zerop (op1))
6256 || (subcode == EQ_EXPR && integer_onep (op1))))
6257 return op0;
6260 return NULL_TREE;
6262 case GIMPLE_TERNARY_RHS:
6264 /* Handle ternary operators that can appear in GIMPLE form. */
6265 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6266 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6267 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6268 return fold_ternary_loc (loc, subcode,
6269 gimple_expr_type (stmt), op0, op1, op2);
6272 default:
6273 gcc_unreachable ();
6277 case GIMPLE_CALL:
6279 tree fn;
6280 gcall *call_stmt = as_a <gcall *> (stmt);
6282 if (gimple_call_internal_p (stmt))
6284 enum tree_code subcode = ERROR_MARK;
6285 switch (gimple_call_internal_fn (stmt))
6287 case IFN_UBSAN_CHECK_ADD:
6288 subcode = PLUS_EXPR;
6289 break;
6290 case IFN_UBSAN_CHECK_SUB:
6291 subcode = MINUS_EXPR;
6292 break;
6293 case IFN_UBSAN_CHECK_MUL:
6294 subcode = MULT_EXPR;
6295 break;
6296 case IFN_BUILTIN_EXPECT:
6298 tree arg0 = gimple_call_arg (stmt, 0);
6299 tree op0 = (*valueize) (arg0);
6300 if (TREE_CODE (op0) == INTEGER_CST)
6301 return op0;
6302 return NULL_TREE;
6304 default:
6305 return NULL_TREE;
6307 tree arg0 = gimple_call_arg (stmt, 0);
6308 tree arg1 = gimple_call_arg (stmt, 1);
6309 tree op0 = (*valueize) (arg0);
6310 tree op1 = (*valueize) (arg1);
6312 if (TREE_CODE (op0) != INTEGER_CST
6313 || TREE_CODE (op1) != INTEGER_CST)
6315 switch (subcode)
6317 case MULT_EXPR:
6318 /* x * 0 = 0 * x = 0 without overflow. */
6319 if (integer_zerop (op0) || integer_zerop (op1))
6320 return build_zero_cst (TREE_TYPE (arg0));
6321 break;
6322 case MINUS_EXPR:
6323 /* y - y = 0 without overflow. */
6324 if (operand_equal_p (op0, op1, 0))
6325 return build_zero_cst (TREE_TYPE (arg0));
6326 break;
6327 default:
6328 break;
6331 tree res
6332 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6333 if (res
6334 && TREE_CODE (res) == INTEGER_CST
6335 && !TREE_OVERFLOW (res))
6336 return res;
6337 return NULL_TREE;
6340 fn = (*valueize) (gimple_call_fn (stmt));
6341 if (TREE_CODE (fn) == ADDR_EXPR
6342 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6343 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6344 && gimple_builtin_call_types_compatible_p (stmt,
6345 TREE_OPERAND (fn, 0)))
6347 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6348 tree retval;
6349 unsigned i;
6350 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6351 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6352 retval = fold_builtin_call_array (loc,
6353 gimple_call_return_type (call_stmt),
6354 fn, gimple_call_num_args (stmt), args);
6355 if (retval)
6357 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6358 STRIP_NOPS (retval);
6359 retval = fold_convert (gimple_call_return_type (call_stmt),
6360 retval);
6362 return retval;
6364 return NULL_TREE;
6367 default:
6368 return NULL_TREE;
6372 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6373 Returns NULL_TREE if folding to a constant is not possible, otherwise
6374 returns a constant according to is_gimple_min_invariant. */
6376 tree
6377 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6379 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6380 if (res && is_gimple_min_invariant (res))
6381 return res;
6382 return NULL_TREE;
6386 /* The following set of functions are supposed to fold references using
6387 their constant initializers. */
6389 /* See if we can find constructor defining value of BASE.
6390 When we know the consructor with constant offset (such as
6391 base is array[40] and we do know constructor of array), then
6392 BIT_OFFSET is adjusted accordingly.
6394 As a special case, return error_mark_node when constructor
6395 is not explicitly available, but it is known to be zero
6396 such as 'static const int a;'. */
6397 static tree
6398 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6399 tree (*valueize)(tree))
6401 poly_int64 bit_offset2, size, max_size;
6402 bool reverse;
6404 if (TREE_CODE (base) == MEM_REF)
6406 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6407 if (!boff.to_shwi (bit_offset))
6408 return NULL_TREE;
6410 if (valueize
6411 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6412 base = valueize (TREE_OPERAND (base, 0));
6413 if (!base || TREE_CODE (base) != ADDR_EXPR)
6414 return NULL_TREE;
6415 base = TREE_OPERAND (base, 0);
6417 else if (valueize
6418 && TREE_CODE (base) == SSA_NAME)
6419 base = valueize (base);
6421 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6422 DECL_INITIAL. If BASE is a nested reference into another
6423 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6424 the inner reference. */
6425 switch (TREE_CODE (base))
6427 case VAR_DECL:
6428 case CONST_DECL:
6430 tree init = ctor_for_folding (base);
6432 /* Our semantic is exact opposite of ctor_for_folding;
6433 NULL means unknown, while error_mark_node is 0. */
6434 if (init == error_mark_node)
6435 return NULL_TREE;
6436 if (!init)
6437 return error_mark_node;
6438 return init;
6441 case VIEW_CONVERT_EXPR:
6442 return get_base_constructor (TREE_OPERAND (base, 0),
6443 bit_offset, valueize);
6445 case ARRAY_REF:
6446 case COMPONENT_REF:
6447 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6448 &reverse);
6449 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6450 return NULL_TREE;
6451 *bit_offset += bit_offset2;
6452 return get_base_constructor (base, bit_offset, valueize);
6454 case CONSTRUCTOR:
6455 return base;
6457 default:
6458 if (CONSTANT_CLASS_P (base))
6459 return base;
6461 return NULL_TREE;
6465 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6466 to the memory at bit OFFSET. When non-null, TYPE is the expected
6467 type of the reference; otherwise the type of the referenced element
6468 is used instead. When SIZE is zero, attempt to fold a reference to
6469 the entire element which OFFSET refers to. Increment *SUBOFF by
6470 the bit offset of the accessed element. */
6472 static tree
6473 fold_array_ctor_reference (tree type, tree ctor,
6474 unsigned HOST_WIDE_INT offset,
6475 unsigned HOST_WIDE_INT size,
6476 tree from_decl,
6477 unsigned HOST_WIDE_INT *suboff)
6479 offset_int low_bound;
6480 offset_int elt_size;
6481 offset_int access_index;
6482 tree domain_type = NULL_TREE;
6483 HOST_WIDE_INT inner_offset;
6485 /* Compute low bound and elt size. */
6486 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6487 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6488 if (domain_type && TYPE_MIN_VALUE (domain_type))
6490 /* Static constructors for variably sized objects makes no sense. */
6491 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6492 return NULL_TREE;
6493 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6495 else
6496 low_bound = 0;
6497 /* Static constructors for variably sized objects makes no sense. */
6498 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6499 return NULL_TREE;
6500 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6502 /* When TYPE is non-null, verify that it specifies a constant-sized
6503 accessed not larger than size of array element. */
6504 if (type
6505 && (!TYPE_SIZE_UNIT (type)
6506 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6507 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6508 || elt_size == 0))
6509 return NULL_TREE;
6511 /* Compute the array index we look for. */
6512 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6513 elt_size);
6514 access_index += low_bound;
6516 /* And offset within the access. */
6517 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6519 /* See if the array field is large enough to span whole access. We do not
6520 care to fold accesses spanning multiple array indexes. */
6521 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6522 return NULL_TREE;
6523 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6525 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6527 /* For the final reference to the entire accessed element
6528 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6529 may be null) in favor of the type of the element, and set
6530 SIZE to the size of the accessed element. */
6531 inner_offset = 0;
6532 type = TREE_TYPE (val);
6533 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6536 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6537 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6538 suboff);
6541 /* Memory not explicitly mentioned in constructor is 0 (or
6542 the reference is out of range). */
6543 return type ? build_zero_cst (type) : NULL_TREE;
6546 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6547 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6548 is the expected type of the reference; otherwise the type of
6549 the referenced member is used instead. When SIZE is zero,
6550 attempt to fold a reference to the entire member which OFFSET
6551 refers to; in this case. Increment *SUBOFF by the bit offset
6552 of the accessed member. */
6554 static tree
6555 fold_nonarray_ctor_reference (tree type, tree ctor,
6556 unsigned HOST_WIDE_INT offset,
6557 unsigned HOST_WIDE_INT size,
6558 tree from_decl,
6559 unsigned HOST_WIDE_INT *suboff)
6561 unsigned HOST_WIDE_INT cnt;
6562 tree cfield, cval;
6564 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6565 cval)
6567 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6568 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6569 tree field_size = DECL_SIZE (cfield);
6571 if (!field_size)
6573 /* Determine the size of the flexible array member from
6574 the size of the initializer provided for it. */
6575 field_size = TYPE_SIZE (TREE_TYPE (cval));
6578 /* Variable sized objects in static constructors makes no sense,
6579 but field_size can be NULL for flexible array members. */
6580 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6581 && TREE_CODE (byte_offset) == INTEGER_CST
6582 && (field_size != NULL_TREE
6583 ? TREE_CODE (field_size) == INTEGER_CST
6584 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6586 /* Compute bit offset of the field. */
6587 offset_int bitoffset
6588 = (wi::to_offset (field_offset)
6589 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6590 /* Compute bit offset where the field ends. */
6591 offset_int bitoffset_end;
6592 if (field_size != NULL_TREE)
6593 bitoffset_end = bitoffset + wi::to_offset (field_size);
6594 else
6595 bitoffset_end = 0;
6597 /* Compute the bit offset of the end of the desired access.
6598 As a special case, if the size of the desired access is
6599 zero, assume the access is to the entire field (and let
6600 the caller make any necessary adjustments by storing
6601 the actual bounds of the field in FIELDBOUNDS). */
6602 offset_int access_end = offset_int (offset);
6603 if (size)
6604 access_end += size;
6605 else
6606 access_end = bitoffset_end;
6608 /* Is there any overlap between the desired access at
6609 [OFFSET, OFFSET+SIZE) and the offset of the field within
6610 the object at [BITOFFSET, BITOFFSET_END)? */
6611 if (wi::cmps (access_end, bitoffset) > 0
6612 && (field_size == NULL_TREE
6613 || wi::lts_p (offset, bitoffset_end)))
6615 *suboff += bitoffset.to_uhwi ();
6617 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6619 /* For the final reference to the entire accessed member
6620 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6621 be null) in favor of the type of the member, and set
6622 SIZE to the size of the accessed member. */
6623 offset = bitoffset.to_uhwi ();
6624 type = TREE_TYPE (cval);
6625 size = (bitoffset_end - bitoffset).to_uhwi ();
6628 /* We do have overlap. Now see if the field is large enough
6629 to cover the access. Give up for accesses that extend
6630 beyond the end of the object or that span multiple fields. */
6631 if (wi::cmps (access_end, bitoffset_end) > 0)
6632 return NULL_TREE;
6633 if (offset < bitoffset)
6634 return NULL_TREE;
6636 offset_int inner_offset = offset_int (offset) - bitoffset;
6637 return fold_ctor_reference (type, cval,
6638 inner_offset.to_uhwi (), size,
6639 from_decl, suboff);
6642 /* Memory not explicitly mentioned in constructor is 0. */
6643 return type ? build_zero_cst (type) : NULL_TREE;
6646 /* CTOR is value initializing memory. Fold a reference of TYPE and
6647 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
6648 is zero, attempt to fold a reference to the entire subobject
6649 which OFFSET refers to. This is used when folding accesses to
6650 string members of aggregates. When non-null, set *SUBOFF to
6651 the bit offset of the accessed subobject. */
6653 tree
6654 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6655 const poly_uint64 &poly_size, tree from_decl,
6656 unsigned HOST_WIDE_INT *suboff /* = NULL */)
6658 tree ret;
6660 /* We found the field with exact match. */
6661 if (type
6662 && useless_type_conversion_p (type, TREE_TYPE (ctor))
6663 && known_eq (poly_offset, 0U))
6664 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6666 /* The remaining optimizations need a constant size and offset. */
6667 unsigned HOST_WIDE_INT size, offset;
6668 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6669 return NULL_TREE;
6671 /* We are at the end of walk, see if we can view convert the
6672 result. */
6673 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6674 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6675 && !compare_tree_int (TYPE_SIZE (type), size)
6676 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6678 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6679 if (ret)
6681 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6682 if (ret)
6683 STRIP_USELESS_TYPE_CONVERSION (ret);
6685 return ret;
6687 /* For constants and byte-aligned/sized reads try to go through
6688 native_encode/interpret. */
6689 if (CONSTANT_CLASS_P (ctor)
6690 && BITS_PER_UNIT == 8
6691 && offset % BITS_PER_UNIT == 0
6692 && size % BITS_PER_UNIT == 0
6693 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6695 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6696 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6697 offset / BITS_PER_UNIT);
6698 if (len > 0)
6699 return native_interpret_expr (type, buf, len);
6701 if (TREE_CODE (ctor) == CONSTRUCTOR)
6703 unsigned HOST_WIDE_INT dummy = 0;
6704 if (!suboff)
6705 suboff = &dummy;
6707 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6708 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6709 return fold_array_ctor_reference (type, ctor, offset, size,
6710 from_decl, suboff);
6712 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6713 from_decl, suboff);
6716 return NULL_TREE;
6719 /* Return the tree representing the element referenced by T if T is an
6720 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6721 names using VALUEIZE. Return NULL_TREE otherwise. */
6723 tree
6724 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6726 tree ctor, idx, base;
6727 poly_int64 offset, size, max_size;
6728 tree tem;
6729 bool reverse;
6731 if (TREE_THIS_VOLATILE (t))
6732 return NULL_TREE;
6734 if (DECL_P (t))
6735 return get_symbol_constant_value (t);
6737 tem = fold_read_from_constant_string (t);
6738 if (tem)
6739 return tem;
6741 switch (TREE_CODE (t))
6743 case ARRAY_REF:
6744 case ARRAY_RANGE_REF:
6745 /* Constant indexes are handled well by get_base_constructor.
6746 Only special case variable offsets.
6747 FIXME: This code can't handle nested references with variable indexes
6748 (they will be handled only by iteration of ccp). Perhaps we can bring
6749 get_ref_base_and_extent here and make it use a valueize callback. */
6750 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6751 && valueize
6752 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6753 && poly_int_tree_p (idx))
6755 tree low_bound, unit_size;
6757 /* If the resulting bit-offset is constant, track it. */
6758 if ((low_bound = array_ref_low_bound (t),
6759 poly_int_tree_p (low_bound))
6760 && (unit_size = array_ref_element_size (t),
6761 tree_fits_uhwi_p (unit_size)))
6763 poly_offset_int woffset
6764 = wi::sext (wi::to_poly_offset (idx)
6765 - wi::to_poly_offset (low_bound),
6766 TYPE_PRECISION (TREE_TYPE (idx)));
6768 if (woffset.to_shwi (&offset))
6770 /* TODO: This code seems wrong, multiply then check
6771 to see if it fits. */
6772 offset *= tree_to_uhwi (unit_size);
6773 offset *= BITS_PER_UNIT;
6775 base = TREE_OPERAND (t, 0);
6776 ctor = get_base_constructor (base, &offset, valueize);
6777 /* Empty constructor. Always fold to 0. */
6778 if (ctor == error_mark_node)
6779 return build_zero_cst (TREE_TYPE (t));
6780 /* Out of bound array access. Value is undefined,
6781 but don't fold. */
6782 if (maybe_lt (offset, 0))
6783 return NULL_TREE;
6784 /* We can not determine ctor. */
6785 if (!ctor)
6786 return NULL_TREE;
6787 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6788 tree_to_uhwi (unit_size)
6789 * BITS_PER_UNIT,
6790 base);
6794 /* Fallthru. */
6796 case COMPONENT_REF:
6797 case BIT_FIELD_REF:
6798 case TARGET_MEM_REF:
6799 case MEM_REF:
6800 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6801 ctor = get_base_constructor (base, &offset, valueize);
6803 /* Empty constructor. Always fold to 0. */
6804 if (ctor == error_mark_node)
6805 return build_zero_cst (TREE_TYPE (t));
6806 /* We do not know precise address. */
6807 if (!known_size_p (max_size) || maybe_ne (max_size, size))
6808 return NULL_TREE;
6809 /* We can not determine ctor. */
6810 if (!ctor)
6811 return NULL_TREE;
6813 /* Out of bound array access. Value is undefined, but don't fold. */
6814 if (maybe_lt (offset, 0))
6815 return NULL_TREE;
6817 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6818 base);
6820 case REALPART_EXPR:
6821 case IMAGPART_EXPR:
6823 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6824 if (c && TREE_CODE (c) == COMPLEX_CST)
6825 return fold_build1_loc (EXPR_LOCATION (t),
6826 TREE_CODE (t), TREE_TYPE (t), c);
6827 break;
6830 default:
6831 break;
6834 return NULL_TREE;
6837 tree
6838 fold_const_aggregate_ref (tree t)
6840 return fold_const_aggregate_ref_1 (t, NULL);
6843 /* Lookup virtual method with index TOKEN in a virtual table V
6844 at OFFSET.
6845 Set CAN_REFER if non-NULL to false if method
6846 is not referable or if the virtual table is ill-formed (such as rewriten
6847 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6849 tree
6850 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6851 tree v,
6852 unsigned HOST_WIDE_INT offset,
6853 bool *can_refer)
6855 tree vtable = v, init, fn;
6856 unsigned HOST_WIDE_INT size;
6857 unsigned HOST_WIDE_INT elt_size, access_index;
6858 tree domain_type;
6860 if (can_refer)
6861 *can_refer = true;
6863 /* First of all double check we have virtual table. */
6864 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6866 /* Pass down that we lost track of the target. */
6867 if (can_refer)
6868 *can_refer = false;
6869 return NULL_TREE;
6872 init = ctor_for_folding (v);
6874 /* The virtual tables should always be born with constructors
6875 and we always should assume that they are avaialble for
6876 folding. At the moment we do not stream them in all cases,
6877 but it should never happen that ctor seem unreachable. */
6878 gcc_assert (init);
6879 if (init == error_mark_node)
6881 /* Pass down that we lost track of the target. */
6882 if (can_refer)
6883 *can_refer = false;
6884 return NULL_TREE;
6886 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6887 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6888 offset *= BITS_PER_UNIT;
6889 offset += token * size;
6891 /* Lookup the value in the constructor that is assumed to be array.
6892 This is equivalent to
6893 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6894 offset, size, NULL);
6895 but in a constant time. We expect that frontend produced a simple
6896 array without indexed initializers. */
6898 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6899 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6900 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6901 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6903 access_index = offset / BITS_PER_UNIT / elt_size;
6904 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6906 /* This code makes an assumption that there are no
6907 indexed fileds produced by C++ FE, so we can directly index the array. */
6908 if (access_index < CONSTRUCTOR_NELTS (init))
6910 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6911 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6912 STRIP_NOPS (fn);
6914 else
6915 fn = NULL;
6917 /* For type inconsistent program we may end up looking up virtual method
6918 in virtual table that does not contain TOKEN entries. We may overrun
6919 the virtual table and pick up a constant or RTTI info pointer.
6920 In any case the call is undefined. */
6921 if (!fn
6922 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6923 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6924 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6925 else
6927 fn = TREE_OPERAND (fn, 0);
6929 /* When cgraph node is missing and function is not public, we cannot
6930 devirtualize. This can happen in WHOPR when the actual method
6931 ends up in other partition, because we found devirtualization
6932 possibility too late. */
6933 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6935 if (can_refer)
6937 *can_refer = false;
6938 return fn;
6940 return NULL_TREE;
6944 /* Make sure we create a cgraph node for functions we'll reference.
6945 They can be non-existent if the reference comes from an entry
6946 of an external vtable for example. */
6947 cgraph_node::get_create (fn);
6949 return fn;
6952 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6953 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6954 KNOWN_BINFO carries the binfo describing the true type of
6955 OBJ_TYPE_REF_OBJECT(REF).
6956 Set CAN_REFER if non-NULL to false if method
6957 is not referable or if the virtual table is ill-formed (such as rewriten
6958 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6960 tree
6961 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6962 bool *can_refer)
6964 unsigned HOST_WIDE_INT offset;
6965 tree v;
6967 v = BINFO_VTABLE (known_binfo);
6968 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6969 if (!v)
6970 return NULL_TREE;
6972 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6974 if (can_refer)
6975 *can_refer = false;
6976 return NULL_TREE;
6978 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6981 /* Given a pointer value T, return a simplified version of an
6982 indirection through T, or NULL_TREE if no simplification is
6983 possible. Note that the resulting type may be different from
6984 the type pointed to in the sense that it is still compatible
6985 from the langhooks point of view. */
6987 tree
6988 gimple_fold_indirect_ref (tree t)
6990 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6991 tree sub = t;
6992 tree subtype;
6994 STRIP_NOPS (sub);
6995 subtype = TREE_TYPE (sub);
6996 if (!POINTER_TYPE_P (subtype)
6997 || TYPE_REF_CAN_ALIAS_ALL (ptype))
6998 return NULL_TREE;
7000 if (TREE_CODE (sub) == ADDR_EXPR)
7002 tree op = TREE_OPERAND (sub, 0);
7003 tree optype = TREE_TYPE (op);
7004 /* *&p => p */
7005 if (useless_type_conversion_p (type, optype))
7006 return op;
7008 /* *(foo *)&fooarray => fooarray[0] */
7009 if (TREE_CODE (optype) == ARRAY_TYPE
7010 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7011 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7013 tree type_domain = TYPE_DOMAIN (optype);
7014 tree min_val = size_zero_node;
7015 if (type_domain && TYPE_MIN_VALUE (type_domain))
7016 min_val = TYPE_MIN_VALUE (type_domain);
7017 if (TREE_CODE (min_val) == INTEGER_CST)
7018 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7020 /* *(foo *)&complexfoo => __real__ complexfoo */
7021 else if (TREE_CODE (optype) == COMPLEX_TYPE
7022 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7023 return fold_build1 (REALPART_EXPR, type, op);
7024 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7025 else if (TREE_CODE (optype) == VECTOR_TYPE
7026 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7028 tree part_width = TYPE_SIZE (type);
7029 tree index = bitsize_int (0);
7030 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7034 /* *(p + CST) -> ... */
7035 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7036 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7038 tree addr = TREE_OPERAND (sub, 0);
7039 tree off = TREE_OPERAND (sub, 1);
7040 tree addrtype;
7042 STRIP_NOPS (addr);
7043 addrtype = TREE_TYPE (addr);
7045 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7046 if (TREE_CODE (addr) == ADDR_EXPR
7047 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7048 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7049 && tree_fits_uhwi_p (off))
7051 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7052 tree part_width = TYPE_SIZE (type);
7053 unsigned HOST_WIDE_INT part_widthi
7054 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7055 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7056 tree index = bitsize_int (indexi);
7057 if (known_lt (offset / part_widthi,
7058 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7059 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7060 part_width, index);
7063 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7064 if (TREE_CODE (addr) == ADDR_EXPR
7065 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7066 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7068 tree size = TYPE_SIZE_UNIT (type);
7069 if (tree_int_cst_equal (size, off))
7070 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7073 /* *(p + CST) -> MEM_REF <p, CST>. */
7074 if (TREE_CODE (addr) != ADDR_EXPR
7075 || DECL_P (TREE_OPERAND (addr, 0)))
7076 return fold_build2 (MEM_REF, type,
7077 addr,
7078 wide_int_to_tree (ptype, wi::to_wide (off)));
7081 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7082 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7083 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7084 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7086 tree type_domain;
7087 tree min_val = size_zero_node;
7088 tree osub = sub;
7089 sub = gimple_fold_indirect_ref (sub);
7090 if (! sub)
7091 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7092 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7093 if (type_domain && TYPE_MIN_VALUE (type_domain))
7094 min_val = TYPE_MIN_VALUE (type_domain);
7095 if (TREE_CODE (min_val) == INTEGER_CST)
7096 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7099 return NULL_TREE;
7102 /* Return true if CODE is an operation that when operating on signed
7103 integer types involves undefined behavior on overflow and the
7104 operation can be expressed with unsigned arithmetic. */
7106 bool
7107 arith_code_with_undefined_signed_overflow (tree_code code)
7109 switch (code)
7111 case PLUS_EXPR:
7112 case MINUS_EXPR:
7113 case MULT_EXPR:
7114 case NEGATE_EXPR:
7115 case POINTER_PLUS_EXPR:
7116 return true;
7117 default:
7118 return false;
7122 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7123 operation that can be transformed to unsigned arithmetic by converting
7124 its operand, carrying out the operation in the corresponding unsigned
7125 type and converting the result back to the original type.
7127 Returns a sequence of statements that replace STMT and also contain
7128 a modified form of STMT itself. */
7130 gimple_seq
7131 rewrite_to_defined_overflow (gimple *stmt)
7133 if (dump_file && (dump_flags & TDF_DETAILS))
7135 fprintf (dump_file, "rewriting stmt with undefined signed "
7136 "overflow ");
7137 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7140 tree lhs = gimple_assign_lhs (stmt);
7141 tree type = unsigned_type_for (TREE_TYPE (lhs));
7142 gimple_seq stmts = NULL;
7143 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7145 tree op = gimple_op (stmt, i);
7146 op = gimple_convert (&stmts, type, op);
7147 gimple_set_op (stmt, i, op);
7149 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7150 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7151 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7152 gimple_seq_add_stmt (&stmts, stmt);
7153 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7154 gimple_seq_add_stmt (&stmts, cvt);
7156 return stmts;
7160 /* The valueization hook we use for the gimple_build API simplification.
7161 This makes us match fold_buildN behavior by only combining with
7162 statements in the sequence(s) we are currently building. */
7164 static tree
7165 gimple_build_valueize (tree op)
7167 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7168 return op;
7169 return NULL_TREE;
7172 /* Build the expression CODE OP0 of type TYPE with location LOC,
7173 simplifying it first if possible. Returns the built
7174 expression value and appends statements possibly defining it
7175 to SEQ. */
7177 tree
7178 gimple_build (gimple_seq *seq, location_t loc,
7179 enum tree_code code, tree type, tree op0)
7181 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7182 if (!res)
7184 res = create_tmp_reg_or_ssa_name (type);
7185 gimple *stmt;
7186 if (code == REALPART_EXPR
7187 || code == IMAGPART_EXPR
7188 || code == VIEW_CONVERT_EXPR)
7189 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7190 else
7191 stmt = gimple_build_assign (res, code, op0);
7192 gimple_set_location (stmt, loc);
7193 gimple_seq_add_stmt_without_update (seq, stmt);
7195 return res;
7198 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7199 simplifying it first if possible. Returns the built
7200 expression value and appends statements possibly defining it
7201 to SEQ. */
7203 tree
7204 gimple_build (gimple_seq *seq, location_t loc,
7205 enum tree_code code, tree type, tree op0, tree op1)
7207 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7208 if (!res)
7210 res = create_tmp_reg_or_ssa_name (type);
7211 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7212 gimple_set_location (stmt, loc);
7213 gimple_seq_add_stmt_without_update (seq, stmt);
7215 return res;
7218 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7219 simplifying it first if possible. Returns the built
7220 expression value and appends statements possibly defining it
7221 to SEQ. */
7223 tree
7224 gimple_build (gimple_seq *seq, location_t loc,
7225 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7227 tree res = gimple_simplify (code, type, op0, op1, op2,
7228 seq, gimple_build_valueize);
7229 if (!res)
7231 res = create_tmp_reg_or_ssa_name (type);
7232 gimple *stmt;
7233 if (code == BIT_FIELD_REF)
7234 stmt = gimple_build_assign (res, code,
7235 build3 (code, type, op0, op1, op2));
7236 else
7237 stmt = gimple_build_assign (res, code, op0, op1, op2);
7238 gimple_set_location (stmt, loc);
7239 gimple_seq_add_stmt_without_update (seq, stmt);
7241 return res;
7244 /* Build the call FN (ARG0) with a result of type TYPE
7245 (or no result if TYPE is void) with location LOC,
7246 simplifying it first if possible. Returns the built
7247 expression value (or NULL_TREE if TYPE is void) and appends
7248 statements possibly defining it to SEQ. */
7250 tree
7251 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7252 tree type, tree arg0)
7254 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7255 if (!res)
7257 gcall *stmt;
7258 if (internal_fn_p (fn))
7259 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7260 else
7262 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7263 stmt = gimple_build_call (decl, 1, arg0);
7265 if (!VOID_TYPE_P (type))
7267 res = create_tmp_reg_or_ssa_name (type);
7268 gimple_call_set_lhs (stmt, res);
7270 gimple_set_location (stmt, loc);
7271 gimple_seq_add_stmt_without_update (seq, stmt);
7273 return res;
7276 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7277 (or no result if TYPE is void) with location LOC,
7278 simplifying it first if possible. Returns the built
7279 expression value (or NULL_TREE if TYPE is void) and appends
7280 statements possibly defining it to SEQ. */
7282 tree
7283 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7284 tree type, tree arg0, tree arg1)
7286 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7287 if (!res)
7289 gcall *stmt;
7290 if (internal_fn_p (fn))
7291 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7292 else
7294 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7295 stmt = gimple_build_call (decl, 2, arg0, arg1);
7297 if (!VOID_TYPE_P (type))
7299 res = create_tmp_reg_or_ssa_name (type);
7300 gimple_call_set_lhs (stmt, res);
7302 gimple_set_location (stmt, loc);
7303 gimple_seq_add_stmt_without_update (seq, stmt);
7305 return res;
7308 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7309 (or no result if TYPE is void) with location LOC,
7310 simplifying it first if possible. Returns the built
7311 expression value (or NULL_TREE if TYPE is void) and appends
7312 statements possibly defining it to SEQ. */
7314 tree
7315 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7316 tree type, tree arg0, tree arg1, tree arg2)
7318 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7319 seq, gimple_build_valueize);
7320 if (!res)
7322 gcall *stmt;
7323 if (internal_fn_p (fn))
7324 stmt = gimple_build_call_internal (as_internal_fn (fn),
7325 3, arg0, arg1, arg2);
7326 else
7328 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7329 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7331 if (!VOID_TYPE_P (type))
7333 res = create_tmp_reg_or_ssa_name (type);
7334 gimple_call_set_lhs (stmt, res);
7336 gimple_set_location (stmt, loc);
7337 gimple_seq_add_stmt_without_update (seq, stmt);
7339 return res;
7342 /* Build the conversion (TYPE) OP with a result of type TYPE
7343 with location LOC if such conversion is neccesary in GIMPLE,
7344 simplifying it first.
7345 Returns the built expression value and appends
7346 statements possibly defining it to SEQ. */
7348 tree
7349 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7351 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7352 return op;
7353 return gimple_build (seq, loc, NOP_EXPR, type, op);
7356 /* Build the conversion (ptrofftype) OP with a result of a type
7357 compatible with ptrofftype with location LOC if such conversion
7358 is neccesary in GIMPLE, simplifying it first.
7359 Returns the built expression value and appends
7360 statements possibly defining it to SEQ. */
7362 tree
7363 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7365 if (ptrofftype_p (TREE_TYPE (op)))
7366 return op;
7367 return gimple_convert (seq, loc, sizetype, op);
7370 /* Build a vector of type TYPE in which each element has the value OP.
7371 Return a gimple value for the result, appending any new statements
7372 to SEQ. */
7374 tree
7375 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7376 tree op)
7378 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7379 && !CONSTANT_CLASS_P (op))
7380 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7382 tree res, vec = build_vector_from_val (type, op);
7383 if (is_gimple_val (vec))
7384 return vec;
7385 if (gimple_in_ssa_p (cfun))
7386 res = make_ssa_name (type);
7387 else
7388 res = create_tmp_reg (type);
7389 gimple *stmt = gimple_build_assign (res, vec);
7390 gimple_set_location (stmt, loc);
7391 gimple_seq_add_stmt_without_update (seq, stmt);
7392 return res;
7395 /* Build a vector from BUILDER, handling the case in which some elements
7396 are non-constant. Return a gimple value for the result, appending any
7397 new instructions to SEQ.
7399 BUILDER must not have a stepped encoding on entry. This is because
7400 the function is not geared up to handle the arithmetic that would
7401 be needed in the variable case, and any code building a vector that
7402 is known to be constant should use BUILDER->build () directly. */
7404 tree
7405 gimple_build_vector (gimple_seq *seq, location_t loc,
7406 tree_vector_builder *builder)
7408 gcc_assert (builder->nelts_per_pattern () <= 2);
7409 unsigned int encoded_nelts = builder->encoded_nelts ();
7410 for (unsigned int i = 0; i < encoded_nelts; ++i)
7411 if (!TREE_CONSTANT ((*builder)[i]))
7413 tree type = builder->type ();
7414 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7415 vec<constructor_elt, va_gc> *v;
7416 vec_alloc (v, nelts);
7417 for (i = 0; i < nelts; ++i)
7418 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7420 tree res;
7421 if (gimple_in_ssa_p (cfun))
7422 res = make_ssa_name (type);
7423 else
7424 res = create_tmp_reg (type);
7425 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7426 gimple_set_location (stmt, loc);
7427 gimple_seq_add_stmt_without_update (seq, stmt);
7428 return res;
7430 return builder->build ();
7433 /* Return true if the result of assignment STMT is known to be non-negative.
7434 If the return value is based on the assumption that signed overflow is
7435 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7436 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7438 static bool
7439 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7440 int depth)
7442 enum tree_code code = gimple_assign_rhs_code (stmt);
7443 switch (get_gimple_rhs_class (code))
7445 case GIMPLE_UNARY_RHS:
7446 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7447 gimple_expr_type (stmt),
7448 gimple_assign_rhs1 (stmt),
7449 strict_overflow_p, depth);
7450 case GIMPLE_BINARY_RHS:
7451 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7452 gimple_expr_type (stmt),
7453 gimple_assign_rhs1 (stmt),
7454 gimple_assign_rhs2 (stmt),
7455 strict_overflow_p, depth);
7456 case GIMPLE_TERNARY_RHS:
7457 return false;
7458 case GIMPLE_SINGLE_RHS:
7459 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7460 strict_overflow_p, depth);
7461 case GIMPLE_INVALID_RHS:
7462 break;
7464 gcc_unreachable ();
7467 /* Return true if return value of call STMT is known to be non-negative.
7468 If the return value is based on the assumption that signed overflow is
7469 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7470 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7472 static bool
7473 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7474 int depth)
7476 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7477 gimple_call_arg (stmt, 0) : NULL_TREE;
7478 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7479 gimple_call_arg (stmt, 1) : NULL_TREE;
7481 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7482 gimple_call_combined_fn (stmt),
7483 arg0,
7484 arg1,
7485 strict_overflow_p, depth);
7488 /* Return true if return value of call STMT is known to be non-negative.
7489 If the return value is based on the assumption that signed overflow is
7490 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7491 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7493 static bool
7494 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7495 int depth)
7497 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7499 tree arg = gimple_phi_arg_def (stmt, i);
7500 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7501 return false;
7503 return true;
7506 /* Return true if STMT is known to compute a non-negative value.
7507 If the return value is based on the assumption that signed overflow is
7508 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7509 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7511 bool
7512 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7513 int depth)
7515 switch (gimple_code (stmt))
7517 case GIMPLE_ASSIGN:
7518 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7519 depth);
7520 case GIMPLE_CALL:
7521 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7522 depth);
7523 case GIMPLE_PHI:
7524 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7525 depth);
7526 default:
7527 return false;
7531 /* Return true if the floating-point value computed by assignment STMT
7532 is known to have an integer value. We also allow +Inf, -Inf and NaN
7533 to be considered integer values. Return false for signaling NaN.
7535 DEPTH is the current nesting depth of the query. */
7537 static bool
7538 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7540 enum tree_code code = gimple_assign_rhs_code (stmt);
7541 switch (get_gimple_rhs_class (code))
7543 case GIMPLE_UNARY_RHS:
7544 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7545 gimple_assign_rhs1 (stmt), depth);
7546 case GIMPLE_BINARY_RHS:
7547 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7548 gimple_assign_rhs1 (stmt),
7549 gimple_assign_rhs2 (stmt), depth);
7550 case GIMPLE_TERNARY_RHS:
7551 return false;
7552 case GIMPLE_SINGLE_RHS:
7553 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7554 case GIMPLE_INVALID_RHS:
7555 break;
7557 gcc_unreachable ();
7560 /* Return true if the floating-point value computed by call STMT is known
7561 to have an integer value. We also allow +Inf, -Inf and NaN to be
7562 considered integer values. Return false for signaling NaN.
7564 DEPTH is the current nesting depth of the query. */
7566 static bool
7567 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7569 tree arg0 = (gimple_call_num_args (stmt) > 0
7570 ? gimple_call_arg (stmt, 0)
7571 : NULL_TREE);
7572 tree arg1 = (gimple_call_num_args (stmt) > 1
7573 ? gimple_call_arg (stmt, 1)
7574 : NULL_TREE);
7575 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7576 arg0, arg1, depth);
7579 /* Return true if the floating-point result of phi STMT is known to have
7580 an integer value. We also allow +Inf, -Inf and NaN to be considered
7581 integer values. Return false for signaling NaN.
7583 DEPTH is the current nesting depth of the query. */
7585 static bool
7586 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7588 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7590 tree arg = gimple_phi_arg_def (stmt, i);
7591 if (!integer_valued_real_single_p (arg, depth + 1))
7592 return false;
7594 return true;
7597 /* Return true if the floating-point value computed by STMT is known
7598 to have an integer value. We also allow +Inf, -Inf and NaN to be
7599 considered integer values. Return false for signaling NaN.
7601 DEPTH is the current nesting depth of the query. */
7603 bool
7604 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7606 switch (gimple_code (stmt))
7608 case GIMPLE_ASSIGN:
7609 return gimple_assign_integer_valued_real_p (stmt, depth);
7610 case GIMPLE_CALL:
7611 return gimple_call_integer_valued_real_p (stmt, depth);
7612 case GIMPLE_PHI:
7613 return gimple_phi_integer_valued_real_p (stmt, depth);
7614 default:
7615 return false;