[PR c++/84492] stmt expr ending with overload
[official-gcc.git] / gcc / gimple-fold.c
blob8257873dd20ce40cfd04ab1834df37b419d3e28a
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "ipa-chkp.h"
59 #include "tree-cfg.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "asan.h"
64 #include "diagnostic-core.h"
65 #include "intl.h"
66 #include "calls.h"
67 #include "tree-vector-builder.h"
68 #include "tree-ssa-strlen.h"
70 /* Return true when DECL can be referenced from current unit.
71 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
72 We can get declarations that are not possible to reference for various
73 reasons:
75 1) When analyzing C++ virtual tables.
76 C++ virtual tables do have known constructors even
77 when they are keyed to other compilation unit.
78 Those tables can contain pointers to methods and vars
79 in other units. Those methods have both STATIC and EXTERNAL
80 set.
81 2) In WHOPR mode devirtualization might lead to reference
82 to method that was partitioned elsehwere.
83 In this case we have static VAR_DECL or FUNCTION_DECL
84 that has no corresponding callgraph/varpool node
85 declaring the body.
86 3) COMDAT functions referred by external vtables that
87 we devirtualize only during final compilation stage.
88 At this time we already decided that we will not output
89 the function body and thus we can't reference the symbol
90 directly. */
92 static bool
93 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
95 varpool_node *vnode;
96 struct cgraph_node *node;
97 symtab_node *snode;
99 if (DECL_ABSTRACT_P (decl))
100 return false;
102 /* We are concerned only about static/external vars and functions. */
103 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
104 || !VAR_OR_FUNCTION_DECL_P (decl))
105 return true;
107 /* Static objects can be referred only if they was not optimized out yet. */
108 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
110 /* Before we start optimizing unreachable code we can be sure all
111 static objects are defined. */
112 if (symtab->function_flags_ready)
113 return true;
114 snode = symtab_node::get (decl);
115 if (!snode || !snode->definition)
116 return false;
117 node = dyn_cast <cgraph_node *> (snode);
118 return !node || !node->global.inlined_to;
121 /* We will later output the initializer, so we can refer to it.
122 So we are concerned only when DECL comes from initializer of
123 external var or var that has been optimized out. */
124 if (!from_decl
125 || !VAR_P (from_decl)
126 || (!DECL_EXTERNAL (from_decl)
127 && (vnode = varpool_node::get (from_decl)) != NULL
128 && vnode->definition)
129 || (flag_ltrans
130 && (vnode = varpool_node::get (from_decl)) != NULL
131 && vnode->in_other_partition))
132 return true;
133 /* We are folding reference from external vtable. The vtable may reffer
134 to a symbol keyed to other compilation unit. The other compilation
135 unit may be in separate DSO and the symbol may be hidden. */
136 if (DECL_VISIBILITY_SPECIFIED (decl)
137 && DECL_EXTERNAL (decl)
138 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
139 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
140 return false;
141 /* When function is public, we always can introduce new reference.
142 Exception are the COMDAT functions where introducing a direct
143 reference imply need to include function body in the curren tunit. */
144 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
145 return true;
146 /* We have COMDAT. We are going to check if we still have definition
147 or if the definition is going to be output in other partition.
148 Bypass this when gimplifying; all needed functions will be produced.
150 As observed in PR20991 for already optimized out comdat virtual functions
151 it may be tempting to not necessarily give up because the copy will be
152 output elsewhere when corresponding vtable is output.
153 This is however not possible - ABI specify that COMDATs are output in
154 units where they are used and when the other unit was compiled with LTO
155 it is possible that vtable was kept public while the function itself
156 was privatized. */
157 if (!symtab->function_flags_ready)
158 return true;
160 snode = symtab_node::get (decl);
161 if (!snode
162 || ((!snode->definition || DECL_EXTERNAL (decl))
163 && (!snode->in_other_partition
164 || (!snode->forced_by_abi && !snode->force_output))))
165 return false;
166 node = dyn_cast <cgraph_node *> (snode);
167 return !node || !node->global.inlined_to;
170 /* Create a temporary for TYPE for a statement STMT. If the current function
171 is in SSA form, a SSA name is created. Otherwise a temporary register
172 is made. */
174 tree
175 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
177 if (gimple_in_ssa_p (cfun))
178 return make_ssa_name (type, stmt);
179 else
180 return create_tmp_reg (type);
183 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
184 acceptable form for is_gimple_min_invariant.
185 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
187 tree
188 canonicalize_constructor_val (tree cval, tree from_decl)
190 tree orig_cval = cval;
191 STRIP_NOPS (cval);
192 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
193 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
195 tree ptr = TREE_OPERAND (cval, 0);
196 if (is_gimple_min_invariant (ptr))
197 cval = build1_loc (EXPR_LOCATION (cval),
198 ADDR_EXPR, TREE_TYPE (ptr),
199 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
200 ptr,
201 fold_convert (ptr_type_node,
202 TREE_OPERAND (cval, 1))));
204 if (TREE_CODE (cval) == ADDR_EXPR)
206 tree base = NULL_TREE;
207 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
209 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
210 if (base)
211 TREE_OPERAND (cval, 0) = base;
213 else
214 base = get_base_address (TREE_OPERAND (cval, 0));
215 if (!base)
216 return NULL_TREE;
218 if (VAR_OR_FUNCTION_DECL_P (base)
219 && !can_refer_decl_in_current_unit_p (base, from_decl))
220 return NULL_TREE;
221 if (TREE_TYPE (base) == error_mark_node)
222 return NULL_TREE;
223 if (VAR_P (base))
224 TREE_ADDRESSABLE (base) = 1;
225 else if (TREE_CODE (base) == FUNCTION_DECL)
227 /* Make sure we create a cgraph node for functions we'll reference.
228 They can be non-existent if the reference comes from an entry
229 of an external vtable for example. */
230 cgraph_node::get_create (base);
232 /* Fixup types in global initializers. */
233 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
234 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
236 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
237 cval = fold_convert (TREE_TYPE (orig_cval), cval);
238 return cval;
240 if (TREE_OVERFLOW_P (cval))
241 return drop_tree_overflow (cval);
242 return orig_cval;
245 /* If SYM is a constant variable with known value, return the value.
246 NULL_TREE is returned otherwise. */
248 tree
249 get_symbol_constant_value (tree sym)
251 tree val = ctor_for_folding (sym);
252 if (val != error_mark_node)
254 if (val)
256 val = canonicalize_constructor_val (unshare_expr (val), sym);
257 if (val && is_gimple_min_invariant (val))
258 return val;
259 else
260 return NULL_TREE;
262 /* Variables declared 'const' without an initializer
263 have zero as the initializer if they may not be
264 overridden at link or run time. */
265 if (!val
266 && is_gimple_reg_type (TREE_TYPE (sym)))
267 return build_zero_cst (TREE_TYPE (sym));
270 return NULL_TREE;
275 /* Subroutine of fold_stmt. We perform several simplifications of the
276 memory reference tree EXPR and make sure to re-gimplify them properly
277 after propagation of constant addresses. IS_LHS is true if the
278 reference is supposed to be an lvalue. */
280 static tree
281 maybe_fold_reference (tree expr, bool is_lhs)
283 tree result;
285 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
286 || TREE_CODE (expr) == REALPART_EXPR
287 || TREE_CODE (expr) == IMAGPART_EXPR)
288 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
289 return fold_unary_loc (EXPR_LOCATION (expr),
290 TREE_CODE (expr),
291 TREE_TYPE (expr),
292 TREE_OPERAND (expr, 0));
293 else if (TREE_CODE (expr) == BIT_FIELD_REF
294 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
295 return fold_ternary_loc (EXPR_LOCATION (expr),
296 TREE_CODE (expr),
297 TREE_TYPE (expr),
298 TREE_OPERAND (expr, 0),
299 TREE_OPERAND (expr, 1),
300 TREE_OPERAND (expr, 2));
302 if (!is_lhs
303 && (result = fold_const_aggregate_ref (expr))
304 && is_gimple_min_invariant (result))
305 return result;
307 return NULL_TREE;
311 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
312 replacement rhs for the statement or NULL_TREE if no simplification
313 could be made. It is assumed that the operands have been previously
314 folded. */
316 static tree
317 fold_gimple_assign (gimple_stmt_iterator *si)
319 gimple *stmt = gsi_stmt (*si);
320 enum tree_code subcode = gimple_assign_rhs_code (stmt);
321 location_t loc = gimple_location (stmt);
323 tree result = NULL_TREE;
325 switch (get_gimple_rhs_class (subcode))
327 case GIMPLE_SINGLE_RHS:
329 tree rhs = gimple_assign_rhs1 (stmt);
331 if (TREE_CLOBBER_P (rhs))
332 return NULL_TREE;
334 if (REFERENCE_CLASS_P (rhs))
335 return maybe_fold_reference (rhs, false);
337 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
339 tree val = OBJ_TYPE_REF_EXPR (rhs);
340 if (is_gimple_min_invariant (val))
341 return val;
342 else if (flag_devirtualize && virtual_method_call_p (rhs))
344 bool final;
345 vec <cgraph_node *>targets
346 = possible_polymorphic_call_targets (rhs, stmt, &final);
347 if (final && targets.length () <= 1 && dbg_cnt (devirt))
349 if (dump_enabled_p ())
351 location_t loc = gimple_location_safe (stmt);
352 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
353 "resolving virtual function address "
354 "reference to function %s\n",
355 targets.length () == 1
356 ? targets[0]->name ()
357 : "NULL");
359 if (targets.length () == 1)
361 val = fold_convert (TREE_TYPE (val),
362 build_fold_addr_expr_loc
363 (loc, targets[0]->decl));
364 STRIP_USELESS_TYPE_CONVERSION (val);
366 else
367 /* We can not use __builtin_unreachable here because it
368 can not have address taken. */
369 val = build_int_cst (TREE_TYPE (val), 0);
370 return val;
375 else if (TREE_CODE (rhs) == ADDR_EXPR)
377 tree ref = TREE_OPERAND (rhs, 0);
378 tree tem = maybe_fold_reference (ref, true);
379 if (tem
380 && TREE_CODE (tem) == MEM_REF
381 && integer_zerop (TREE_OPERAND (tem, 1)))
382 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
383 else if (tem)
384 result = fold_convert (TREE_TYPE (rhs),
385 build_fold_addr_expr_loc (loc, tem));
386 else if (TREE_CODE (ref) == MEM_REF
387 && integer_zerop (TREE_OPERAND (ref, 1)))
388 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
390 if (result)
392 /* Strip away useless type conversions. Both the
393 NON_LVALUE_EXPR that may have been added by fold, and
394 "useless" type conversions that might now be apparent
395 due to propagation. */
396 STRIP_USELESS_TYPE_CONVERSION (result);
398 if (result != rhs && valid_gimple_rhs_p (result))
399 return result;
403 else if (TREE_CODE (rhs) == CONSTRUCTOR
404 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
406 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
407 unsigned i;
408 tree val;
410 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
411 if (! CONSTANT_CLASS_P (val))
412 return NULL_TREE;
414 return build_vector_from_ctor (TREE_TYPE (rhs),
415 CONSTRUCTOR_ELTS (rhs));
418 else if (DECL_P (rhs))
419 return get_symbol_constant_value (rhs);
421 break;
423 case GIMPLE_UNARY_RHS:
424 break;
426 case GIMPLE_BINARY_RHS:
427 break;
429 case GIMPLE_TERNARY_RHS:
430 result = fold_ternary_loc (loc, subcode,
431 TREE_TYPE (gimple_assign_lhs (stmt)),
432 gimple_assign_rhs1 (stmt),
433 gimple_assign_rhs2 (stmt),
434 gimple_assign_rhs3 (stmt));
436 if (result)
438 STRIP_USELESS_TYPE_CONVERSION (result);
439 if (valid_gimple_rhs_p (result))
440 return result;
442 break;
444 case GIMPLE_INVALID_RHS:
445 gcc_unreachable ();
448 return NULL_TREE;
452 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
453 adjusting the replacement stmts location and virtual operands.
454 If the statement has a lhs the last stmt in the sequence is expected
455 to assign to that lhs. */
457 static void
458 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
460 gimple *stmt = gsi_stmt (*si_p);
462 if (gimple_has_location (stmt))
463 annotate_all_with_location (stmts, gimple_location (stmt));
465 /* First iterate over the replacement statements backward, assigning
466 virtual operands to their defining statements. */
467 gimple *laststore = NULL;
468 for (gimple_stmt_iterator i = gsi_last (stmts);
469 !gsi_end_p (i); gsi_prev (&i))
471 gimple *new_stmt = gsi_stmt (i);
472 if ((gimple_assign_single_p (new_stmt)
473 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
474 || (is_gimple_call (new_stmt)
475 && (gimple_call_flags (new_stmt)
476 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
478 tree vdef;
479 if (!laststore)
480 vdef = gimple_vdef (stmt);
481 else
482 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
483 gimple_set_vdef (new_stmt, vdef);
484 if (vdef && TREE_CODE (vdef) == SSA_NAME)
485 SSA_NAME_DEF_STMT (vdef) = new_stmt;
486 laststore = new_stmt;
490 /* Second iterate over the statements forward, assigning virtual
491 operands to their uses. */
492 tree reaching_vuse = gimple_vuse (stmt);
493 for (gimple_stmt_iterator i = gsi_start (stmts);
494 !gsi_end_p (i); gsi_next (&i))
496 gimple *new_stmt = gsi_stmt (i);
497 /* If the new statement possibly has a VUSE, update it with exact SSA
498 name we know will reach this one. */
499 if (gimple_has_mem_ops (new_stmt))
500 gimple_set_vuse (new_stmt, reaching_vuse);
501 gimple_set_modified (new_stmt, true);
502 if (gimple_vdef (new_stmt))
503 reaching_vuse = gimple_vdef (new_stmt);
506 /* If the new sequence does not do a store release the virtual
507 definition of the original statement. */
508 if (reaching_vuse
509 && reaching_vuse == gimple_vuse (stmt))
511 tree vdef = gimple_vdef (stmt);
512 if (vdef
513 && TREE_CODE (vdef) == SSA_NAME)
515 unlink_stmt_vdef (stmt);
516 release_ssa_name (vdef);
520 /* Finally replace the original statement with the sequence. */
521 gsi_replace_with_seq (si_p, stmts, false);
524 /* Convert EXPR into a GIMPLE value suitable for substitution on the
525 RHS of an assignment. Insert the necessary statements before
526 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
527 is replaced. If the call is expected to produces a result, then it
528 is replaced by an assignment of the new RHS to the result variable.
529 If the result is to be ignored, then the call is replaced by a
530 GIMPLE_NOP. A proper VDEF chain is retained by making the first
531 VUSE and the last VDEF of the whole sequence be the same as the replaced
532 statement and using new SSA names for stores in between. */
534 void
535 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
537 tree lhs;
538 gimple *stmt, *new_stmt;
539 gimple_stmt_iterator i;
540 gimple_seq stmts = NULL;
542 stmt = gsi_stmt (*si_p);
544 gcc_assert (is_gimple_call (stmt));
546 push_gimplify_context (gimple_in_ssa_p (cfun));
548 lhs = gimple_call_lhs (stmt);
549 if (lhs == NULL_TREE)
551 gimplify_and_add (expr, &stmts);
552 /* We can end up with folding a memcpy of an empty class assignment
553 which gets optimized away by C++ gimplification. */
554 if (gimple_seq_empty_p (stmts))
556 pop_gimplify_context (NULL);
557 if (gimple_in_ssa_p (cfun))
559 unlink_stmt_vdef (stmt);
560 release_defs (stmt);
562 gsi_replace (si_p, gimple_build_nop (), false);
563 return;
566 else
568 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
569 new_stmt = gimple_build_assign (lhs, tmp);
570 i = gsi_last (stmts);
571 gsi_insert_after_without_update (&i, new_stmt,
572 GSI_CONTINUE_LINKING);
575 pop_gimplify_context (NULL);
577 gsi_replace_with_seq_vops (si_p, stmts);
581 /* Replace the call at *GSI with the gimple value VAL. */
583 void
584 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
586 gimple *stmt = gsi_stmt (*gsi);
587 tree lhs = gimple_call_lhs (stmt);
588 gimple *repl;
589 if (lhs)
591 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
592 val = fold_convert (TREE_TYPE (lhs), val);
593 repl = gimple_build_assign (lhs, val);
595 else
596 repl = gimple_build_nop ();
597 tree vdef = gimple_vdef (stmt);
598 if (vdef && TREE_CODE (vdef) == SSA_NAME)
600 unlink_stmt_vdef (stmt);
601 release_ssa_name (vdef);
603 gsi_replace (gsi, repl, false);
606 /* Replace the call at *GSI with the new call REPL and fold that
607 again. */
609 static void
610 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
612 gimple *stmt = gsi_stmt (*gsi);
613 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
614 gimple_set_location (repl, gimple_location (stmt));
615 if (gimple_vdef (stmt)
616 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
618 gimple_set_vdef (repl, gimple_vdef (stmt));
619 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
621 if (gimple_vuse (stmt))
622 gimple_set_vuse (repl, gimple_vuse (stmt));
623 gsi_replace (gsi, repl, false);
624 fold_stmt (gsi);
627 /* Return true if VAR is a VAR_DECL or a component thereof. */
629 static bool
630 var_decl_component_p (tree var)
632 tree inner = var;
633 while (handled_component_p (inner))
634 inner = TREE_OPERAND (inner, 0);
635 return SSA_VAR_P (inner);
638 /* If the SIZE argument representing the size of an object is in a range
639 of values of which exactly one is valid (and that is zero), return
640 true, otherwise false. */
642 static bool
643 size_must_be_zero_p (tree size)
645 if (integer_zerop (size))
646 return true;
648 if (TREE_CODE (size) != SSA_NAME)
649 return false;
651 wide_int min, max;
652 enum value_range_type rtype = get_range_info (size, &min, &max);
653 if (rtype != VR_ANTI_RANGE)
654 return false;
656 tree type = TREE_TYPE (size);
657 int prec = TYPE_PRECISION (type);
659 wide_int wone = wi::one (prec);
661 /* Compute the value of SSIZE_MAX, the largest positive value that
662 can be stored in ssize_t, the signed counterpart of size_t. */
663 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
665 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
668 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
669 diagnose (otherwise undefined) overlapping copies without preventing
670 folding. When folded, GCC guarantees that overlapping memcpy has
671 the same semantics as memmove. Call to the library memcpy need not
672 provide the same guarantee. Return false if no simplification can
673 be made. */
675 static bool
676 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
677 tree dest, tree src, int endp)
679 gimple *stmt = gsi_stmt (*gsi);
680 tree lhs = gimple_call_lhs (stmt);
681 tree len = gimple_call_arg (stmt, 2);
682 tree destvar, srcvar;
683 location_t loc = gimple_location (stmt);
685 tree func = gimple_call_fndecl (stmt);
686 bool nowarn = gimple_no_warning_p (stmt);
687 bool check_overlap = (DECL_FUNCTION_CODE (func) != BUILT_IN_MEMMOVE
688 && DECL_FUNCTION_CODE (func) != BUILT_IN_MEMMOVE_CHK
689 && !nowarn);
691 /* If the LEN parameter is a constant zero or in range where
692 the only valid value is zero, return DEST. */
693 if (size_must_be_zero_p (len))
695 gimple *repl;
696 if (gimple_call_lhs (stmt))
697 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
698 else
699 repl = gimple_build_nop ();
700 tree vdef = gimple_vdef (stmt);
701 if (vdef && TREE_CODE (vdef) == SSA_NAME)
703 unlink_stmt_vdef (stmt);
704 release_ssa_name (vdef);
706 gsi_replace (gsi, repl, false);
707 return true;
710 /* If SRC and DEST are the same (and not volatile), return
711 DEST{,+LEN,+LEN-1}. */
712 if (operand_equal_p (src, dest, 0))
714 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
715 It's safe and may even be emitted by GCC itself (see bug
716 32667). However, diagnose it in explicit calls to the memcpy
717 function. */
718 if (check_overlap && *IDENTIFIER_POINTER (DECL_NAME (func)) != '_')
719 warning_at (loc, OPT_Wrestrict,
720 "%qD source argument is the same as destination",
721 func);
723 unlink_stmt_vdef (stmt);
724 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
725 release_ssa_name (gimple_vdef (stmt));
726 if (!lhs)
728 gsi_replace (gsi, gimple_build_nop (), false);
729 return true;
731 goto done;
733 else
735 tree srctype, desttype;
736 unsigned int src_align, dest_align;
737 tree off0;
739 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
740 pointers as wide integer) and also may result in huge function
741 size because of inlined bounds copy. Thus don't inline for
742 functions we want to instrument. */
743 if (flag_check_pointer_bounds
744 && chkp_instrumentable_p (cfun->decl)
745 /* Even if data may contain pointers we can inline if copy
746 less than a pointer size. */
747 && (!tree_fits_uhwi_p (len)
748 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
749 return false;
751 /* Build accesses at offset zero with a ref-all character type. */
752 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
753 ptr_mode, true), 0);
755 /* If we can perform the copy efficiently with first doing all loads
756 and then all stores inline it that way. Currently efficiently
757 means that we can load all the memory into a single integer
758 register which is what MOVE_MAX gives us. */
759 src_align = get_pointer_alignment (src);
760 dest_align = get_pointer_alignment (dest);
761 if (tree_fits_uhwi_p (len)
762 && compare_tree_int (len, MOVE_MAX) <= 0
763 /* ??? Don't transform copies from strings with known length this
764 confuses the tree-ssa-strlen.c. This doesn't handle
765 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
766 reason. */
767 && !c_strlen (src, 2))
769 unsigned ilen = tree_to_uhwi (len);
770 if (pow2p_hwi (ilen))
772 /* Detect invalid bounds and overlapping copies and issue
773 either -Warray-bounds or -Wrestrict. */
774 if (!nowarn
775 && check_bounds_or_overlap (as_a <gcall *>(stmt),
776 dest, src, len, len))
777 gimple_set_no_warning (stmt, true);
779 scalar_int_mode mode;
780 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
781 if (type
782 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
783 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
784 /* If the destination pointer is not aligned we must be able
785 to emit an unaligned store. */
786 && (dest_align >= GET_MODE_ALIGNMENT (mode)
787 || !targetm.slow_unaligned_access (mode, dest_align)
788 || (optab_handler (movmisalign_optab, mode)
789 != CODE_FOR_nothing)))
791 tree srctype = type;
792 tree desttype = type;
793 if (src_align < GET_MODE_ALIGNMENT (mode))
794 srctype = build_aligned_type (type, src_align);
795 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
796 tree tem = fold_const_aggregate_ref (srcmem);
797 if (tem)
798 srcmem = tem;
799 else if (src_align < GET_MODE_ALIGNMENT (mode)
800 && targetm.slow_unaligned_access (mode, src_align)
801 && (optab_handler (movmisalign_optab, mode)
802 == CODE_FOR_nothing))
803 srcmem = NULL_TREE;
804 if (srcmem)
806 gimple *new_stmt;
807 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
809 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
810 srcmem
811 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
812 new_stmt);
813 gimple_assign_set_lhs (new_stmt, srcmem);
814 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
815 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
817 if (dest_align < GET_MODE_ALIGNMENT (mode))
818 desttype = build_aligned_type (type, dest_align);
819 new_stmt
820 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
821 dest, off0),
822 srcmem);
823 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
824 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
825 if (gimple_vdef (new_stmt)
826 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
827 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
828 if (!lhs)
830 gsi_replace (gsi, new_stmt, false);
831 return true;
833 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
834 goto done;
840 if (endp == 3)
842 /* Both DEST and SRC must be pointer types.
843 ??? This is what old code did. Is the testing for pointer types
844 really mandatory?
846 If either SRC is readonly or length is 1, we can use memcpy. */
847 if (!dest_align || !src_align)
848 return false;
849 if (readonly_data_expr (src)
850 || (tree_fits_uhwi_p (len)
851 && (MIN (src_align, dest_align) / BITS_PER_UNIT
852 >= tree_to_uhwi (len))))
854 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
855 if (!fn)
856 return false;
857 gimple_call_set_fndecl (stmt, fn);
858 gimple_call_set_arg (stmt, 0, dest);
859 gimple_call_set_arg (stmt, 1, src);
860 fold_stmt (gsi);
861 return true;
864 /* If *src and *dest can't overlap, optimize into memcpy as well. */
865 if (TREE_CODE (src) == ADDR_EXPR
866 && TREE_CODE (dest) == ADDR_EXPR)
868 tree src_base, dest_base, fn;
869 poly_int64 src_offset = 0, dest_offset = 0;
870 poly_uint64 maxsize;
872 srcvar = TREE_OPERAND (src, 0);
873 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
874 if (src_base == NULL)
875 src_base = srcvar;
876 destvar = TREE_OPERAND (dest, 0);
877 dest_base = get_addr_base_and_unit_offset (destvar,
878 &dest_offset);
879 if (dest_base == NULL)
880 dest_base = destvar;
881 if (!poly_int_tree_p (len, &maxsize))
882 maxsize = -1;
883 if (SSA_VAR_P (src_base)
884 && SSA_VAR_P (dest_base))
886 if (operand_equal_p (src_base, dest_base, 0)
887 && ranges_maybe_overlap_p (src_offset, maxsize,
888 dest_offset, maxsize))
889 return false;
891 else if (TREE_CODE (src_base) == MEM_REF
892 && TREE_CODE (dest_base) == MEM_REF)
894 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
895 TREE_OPERAND (dest_base, 0), 0))
896 return false;
897 poly_offset_int full_src_offset
898 = mem_ref_offset (src_base) + src_offset;
899 poly_offset_int full_dest_offset
900 = mem_ref_offset (dest_base) + dest_offset;
901 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
902 full_dest_offset, maxsize))
903 return false;
905 else
906 return false;
908 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
909 if (!fn)
910 return false;
911 gimple_call_set_fndecl (stmt, fn);
912 gimple_call_set_arg (stmt, 0, dest);
913 gimple_call_set_arg (stmt, 1, src);
914 fold_stmt (gsi);
915 return true;
918 /* If the destination and source do not alias optimize into
919 memcpy as well. */
920 if ((is_gimple_min_invariant (dest)
921 || TREE_CODE (dest) == SSA_NAME)
922 && (is_gimple_min_invariant (src)
923 || TREE_CODE (src) == SSA_NAME))
925 ao_ref destr, srcr;
926 ao_ref_init_from_ptr_and_size (&destr, dest, len);
927 ao_ref_init_from_ptr_and_size (&srcr, src, len);
928 if (!refs_may_alias_p_1 (&destr, &srcr, false))
930 tree fn;
931 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
932 if (!fn)
933 return false;
934 gimple_call_set_fndecl (stmt, fn);
935 gimple_call_set_arg (stmt, 0, dest);
936 gimple_call_set_arg (stmt, 1, src);
937 fold_stmt (gsi);
938 return true;
942 return false;
945 if (!tree_fits_shwi_p (len))
946 return false;
947 if (!POINTER_TYPE_P (TREE_TYPE (src))
948 || !POINTER_TYPE_P (TREE_TYPE (dest)))
949 return false;
950 /* In the following try to find a type that is most natural to be
951 used for the memcpy source and destination and that allows
952 the most optimization when memcpy is turned into a plain assignment
953 using that type. In theory we could always use a char[len] type
954 but that only gains us that the destination and source possibly
955 no longer will have their address taken. */
956 srctype = TREE_TYPE (TREE_TYPE (src));
957 if (TREE_CODE (srctype) == ARRAY_TYPE
958 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
959 srctype = TREE_TYPE (srctype);
960 desttype = TREE_TYPE (TREE_TYPE (dest));
961 if (TREE_CODE (desttype) == ARRAY_TYPE
962 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
963 desttype = TREE_TYPE (desttype);
964 if (TREE_ADDRESSABLE (srctype)
965 || TREE_ADDRESSABLE (desttype))
966 return false;
968 /* Make sure we are not copying using a floating-point mode or
969 a type whose size possibly does not match its precision. */
970 if (FLOAT_MODE_P (TYPE_MODE (desttype))
971 || TREE_CODE (desttype) == BOOLEAN_TYPE
972 || TREE_CODE (desttype) == ENUMERAL_TYPE)
973 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
974 if (FLOAT_MODE_P (TYPE_MODE (srctype))
975 || TREE_CODE (srctype) == BOOLEAN_TYPE
976 || TREE_CODE (srctype) == ENUMERAL_TYPE)
977 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
978 if (!srctype)
979 srctype = desttype;
980 if (!desttype)
981 desttype = srctype;
982 if (!srctype)
983 return false;
985 src_align = get_pointer_alignment (src);
986 dest_align = get_pointer_alignment (dest);
987 if (dest_align < TYPE_ALIGN (desttype)
988 || src_align < TYPE_ALIGN (srctype))
989 return false;
991 destvar = NULL_TREE;
992 if (TREE_CODE (dest) == ADDR_EXPR
993 && var_decl_component_p (TREE_OPERAND (dest, 0))
994 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
995 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
997 srcvar = NULL_TREE;
998 if (TREE_CODE (src) == ADDR_EXPR
999 && var_decl_component_p (TREE_OPERAND (src, 0))
1000 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1002 if (!destvar
1003 || src_align >= TYPE_ALIGN (desttype))
1004 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1005 src, off0);
1006 else if (!STRICT_ALIGNMENT)
1008 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1009 src_align);
1010 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1014 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1015 return false;
1017 if (srcvar == NULL_TREE)
1019 if (src_align >= TYPE_ALIGN (desttype))
1020 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1021 else
1023 if (STRICT_ALIGNMENT)
1024 return false;
1025 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1026 src_align);
1027 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1030 else if (destvar == NULL_TREE)
1032 if (dest_align >= TYPE_ALIGN (srctype))
1033 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1034 else
1036 if (STRICT_ALIGNMENT)
1037 return false;
1038 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1039 dest_align);
1040 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1044 /* Detect invalid bounds and overlapping copies and issue either
1045 -Warray-bounds or -Wrestrict. */
1046 if (!nowarn)
1047 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1049 gimple *new_stmt;
1050 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1052 tree tem = fold_const_aggregate_ref (srcvar);
1053 if (tem)
1054 srcvar = tem;
1055 if (! is_gimple_min_invariant (srcvar))
1057 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1058 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1059 new_stmt);
1060 gimple_assign_set_lhs (new_stmt, srcvar);
1061 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1062 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1064 new_stmt = gimple_build_assign (destvar, srcvar);
1065 goto set_vop_and_replace;
1068 /* We get an aggregate copy. Use an unsigned char[] type to
1069 perform the copying to preserve padding and to avoid any issues
1070 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1071 desttype = build_array_type_nelts (unsigned_char_type_node,
1072 tree_to_uhwi (len));
1073 srctype = desttype;
1074 if (src_align > TYPE_ALIGN (srctype))
1075 srctype = build_aligned_type (srctype, src_align);
1076 if (dest_align > TYPE_ALIGN (desttype))
1077 desttype = build_aligned_type (desttype, dest_align);
1078 new_stmt
1079 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1080 fold_build2 (MEM_REF, srctype, src, off0));
1081 set_vop_and_replace:
1082 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1083 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1084 if (gimple_vdef (new_stmt)
1085 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1086 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1087 if (!lhs)
1089 gsi_replace (gsi, new_stmt, false);
1090 return true;
1092 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1095 done:
1096 gimple_seq stmts = NULL;
1097 if (endp == 0 || endp == 3)
1098 len = NULL_TREE;
1099 else if (endp == 2)
1100 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1101 ssize_int (1));
1102 if (endp == 2 || endp == 1)
1104 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1105 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1106 TREE_TYPE (dest), dest, len);
1109 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1110 gimple *repl = gimple_build_assign (lhs, dest);
1111 gsi_replace (gsi, repl, false);
1112 return true;
1115 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1116 to built-in memcmp (a, b, len). */
1118 static bool
1119 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1121 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1123 if (!fn)
1124 return false;
1126 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1128 gimple *stmt = gsi_stmt (*gsi);
1129 tree a = gimple_call_arg (stmt, 0);
1130 tree b = gimple_call_arg (stmt, 1);
1131 tree len = gimple_call_arg (stmt, 2);
1133 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1134 replace_call_with_call_and_fold (gsi, repl);
1136 return true;
1139 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1140 to built-in memmove (dest, src, len). */
1142 static bool
1143 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1145 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1147 if (!fn)
1148 return false;
1150 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1151 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1152 len) into memmove (dest, src, len). */
1154 gimple *stmt = gsi_stmt (*gsi);
1155 tree src = gimple_call_arg (stmt, 0);
1156 tree dest = gimple_call_arg (stmt, 1);
1157 tree len = gimple_call_arg (stmt, 2);
1159 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1160 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1161 replace_call_with_call_and_fold (gsi, repl);
1163 return true;
1166 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1167 to built-in memset (dest, 0, len). */
1169 static bool
1170 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1172 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1174 if (!fn)
1175 return false;
1177 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1179 gimple *stmt = gsi_stmt (*gsi);
1180 tree dest = gimple_call_arg (stmt, 0);
1181 tree len = gimple_call_arg (stmt, 1);
1183 gimple_seq seq = NULL;
1184 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1185 gimple_seq_add_stmt_without_update (&seq, repl);
1186 gsi_replace_with_seq_vops (gsi, seq);
1187 fold_stmt (gsi);
1189 return true;
1192 /* Fold function call to builtin memset or bzero at *GSI setting the
1193 memory of size LEN to VAL. Return whether a simplification was made. */
1195 static bool
1196 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1198 gimple *stmt = gsi_stmt (*gsi);
1199 tree etype;
1200 unsigned HOST_WIDE_INT length, cval;
1202 /* If the LEN parameter is zero, return DEST. */
1203 if (integer_zerop (len))
1205 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1206 return true;
1209 if (! tree_fits_uhwi_p (len))
1210 return false;
1212 if (TREE_CODE (c) != INTEGER_CST)
1213 return false;
1215 tree dest = gimple_call_arg (stmt, 0);
1216 tree var = dest;
1217 if (TREE_CODE (var) != ADDR_EXPR)
1218 return false;
1220 var = TREE_OPERAND (var, 0);
1221 if (TREE_THIS_VOLATILE (var))
1222 return false;
1224 etype = TREE_TYPE (var);
1225 if (TREE_CODE (etype) == ARRAY_TYPE)
1226 etype = TREE_TYPE (etype);
1228 if (!INTEGRAL_TYPE_P (etype)
1229 && !POINTER_TYPE_P (etype))
1230 return NULL_TREE;
1232 if (! var_decl_component_p (var))
1233 return NULL_TREE;
1235 length = tree_to_uhwi (len);
1236 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1237 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1238 return NULL_TREE;
1240 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1241 return NULL_TREE;
1243 if (integer_zerop (c))
1244 cval = 0;
1245 else
1247 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1248 return NULL_TREE;
1250 cval = TREE_INT_CST_LOW (c);
1251 cval &= 0xff;
1252 cval |= cval << 8;
1253 cval |= cval << 16;
1254 cval |= (cval << 31) << 1;
1257 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1258 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1259 gimple_set_vuse (store, gimple_vuse (stmt));
1260 tree vdef = gimple_vdef (stmt);
1261 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1263 gimple_set_vdef (store, gimple_vdef (stmt));
1264 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1266 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1267 if (gimple_call_lhs (stmt))
1269 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1270 gsi_replace (gsi, asgn, false);
1272 else
1274 gimple_stmt_iterator gsi2 = *gsi;
1275 gsi_prev (gsi);
1276 gsi_remove (&gsi2, true);
1279 return true;
1283 /* Obtain the minimum and maximum string length or minimum and maximum
1284 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1285 If ARG is an SSA name variable, follow its use-def chains. When
1286 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1287 if we are unable to determine the length or value, return false.
1288 VISITED is a bitmap of visited variables.
1289 TYPE is 0 if string length should be obtained, 1 for maximum string
1290 length and 2 for maximum value ARG can have.
1291 When FUZZY is non-zero and the length of a string cannot be determined,
1292 the function instead considers as the maximum possible length the
1293 size of a character array it may refer to. If FUZZY is 2, it will handle
1294 PHIs and COND_EXPRs optimistically, if we can determine string length
1295 minimum and maximum, it will use the minimum from the ones where it
1296 can be determined.
1297 Set *FLEXP to true if the range of the string lengths has been
1298 obtained from the upper bound of an array at the end of a struct.
1299 Such an array may hold a string that's longer than its upper bound
1300 due to it being used as a poor-man's flexible array member. */
1302 static bool
1303 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1304 int fuzzy, bool *flexp)
1306 tree var, val = NULL_TREE;
1307 gimple *def_stmt;
1309 /* The minimum and maximum length. */
1310 tree *const minlen = length;
1311 tree *const maxlen = length + 1;
1313 if (TREE_CODE (arg) != SSA_NAME)
1315 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1316 if (TREE_CODE (arg) == ADDR_EXPR
1317 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1319 tree op = TREE_OPERAND (arg, 0);
1320 if (integer_zerop (TREE_OPERAND (op, 1)))
1322 tree aop0 = TREE_OPERAND (op, 0);
1323 if (TREE_CODE (aop0) == INDIRECT_REF
1324 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1325 return get_range_strlen (TREE_OPERAND (aop0, 0),
1326 length, visited, type, fuzzy, flexp);
1328 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF && fuzzy)
1330 /* Fail if an array is the last member of a struct object
1331 since it could be treated as a (fake) flexible array
1332 member. */
1333 tree idx = TREE_OPERAND (op, 1);
1335 arg = TREE_OPERAND (op, 0);
1336 tree optype = TREE_TYPE (arg);
1337 if (tree dom = TYPE_DOMAIN (optype))
1338 if (tree bound = TYPE_MAX_VALUE (dom))
1339 if (TREE_CODE (bound) == INTEGER_CST
1340 && TREE_CODE (idx) == INTEGER_CST
1341 && tree_int_cst_lt (bound, idx))
1342 return false;
1346 if (type == 2)
1348 val = arg;
1349 if (TREE_CODE (val) != INTEGER_CST
1350 || tree_int_cst_sgn (val) < 0)
1351 return false;
1353 else
1354 val = c_strlen (arg, 1);
1356 if (!val && fuzzy)
1358 if (TREE_CODE (arg) == ADDR_EXPR)
1359 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1360 visited, type, fuzzy, flexp);
1362 if (TREE_CODE (arg) == ARRAY_REF)
1364 tree type = TREE_TYPE (TREE_OPERAND (arg, 0));
1366 /* Determine the "innermost" array type. */
1367 while (TREE_CODE (type) == ARRAY_TYPE
1368 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1369 type = TREE_TYPE (type);
1371 /* Avoid arrays of pointers. */
1372 tree eltype = TREE_TYPE (type);
1373 if (TREE_CODE (type) != ARRAY_TYPE
1374 || !INTEGRAL_TYPE_P (eltype))
1375 return false;
1377 val = TYPE_SIZE_UNIT (type);
1378 if (!val || integer_zerop (val))
1379 return false;
1381 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1382 integer_one_node);
1383 /* Set the minimum size to zero since the string in
1384 the array could have zero length. */
1385 *minlen = ssize_int (0);
1387 if (TREE_CODE (TREE_OPERAND (arg, 0)) == COMPONENT_REF
1388 && type == TREE_TYPE (TREE_OPERAND (arg, 0))
1389 && array_at_struct_end_p (TREE_OPERAND (arg, 0)))
1390 *flexp = true;
1392 else if (TREE_CODE (arg) == COMPONENT_REF
1393 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1394 == ARRAY_TYPE))
1396 /* Use the type of the member array to determine the upper
1397 bound on the length of the array. This may be overly
1398 optimistic if the array itself isn't NUL-terminated and
1399 the caller relies on the subsequent member to contain
1400 the NUL but that would only be considered valid if
1401 the array were the last member of a struct.
1402 Set *FLEXP to true if the array whose bound is being
1403 used is at the end of a struct. */
1404 if (array_at_struct_end_p (arg))
1405 *flexp = true;
1407 arg = TREE_OPERAND (arg, 1);
1409 tree type = TREE_TYPE (arg);
1411 while (TREE_CODE (type) == ARRAY_TYPE
1412 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1413 type = TREE_TYPE (type);
1415 /* Fail when the array bound is unknown or zero. */
1416 val = TYPE_SIZE_UNIT (type);
1417 if (!val || integer_zerop (val))
1418 return false;
1419 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1420 integer_one_node);
1421 /* Set the minimum size to zero since the string in
1422 the array could have zero length. */
1423 *minlen = ssize_int (0);
1426 if (VAR_P (arg))
1428 tree type = TREE_TYPE (arg);
1429 if (POINTER_TYPE_P (type))
1430 type = TREE_TYPE (type);
1432 if (TREE_CODE (type) == ARRAY_TYPE)
1434 val = TYPE_SIZE_UNIT (type);
1435 if (!val
1436 || TREE_CODE (val) != INTEGER_CST
1437 || integer_zerop (val))
1438 return false;
1439 val = wide_int_to_tree (TREE_TYPE (val),
1440 wi::sub (wi::to_wide (val), 1));
1441 /* Set the minimum size to zero since the string in
1442 the array could have zero length. */
1443 *minlen = ssize_int (0);
1448 if (!val)
1449 return false;
1451 if (!*minlen
1452 || (type > 0
1453 && TREE_CODE (*minlen) == INTEGER_CST
1454 && TREE_CODE (val) == INTEGER_CST
1455 && tree_int_cst_lt (val, *minlen)))
1456 *minlen = val;
1458 if (*maxlen)
1460 if (type > 0)
1462 if (TREE_CODE (*maxlen) != INTEGER_CST
1463 || TREE_CODE (val) != INTEGER_CST)
1464 return false;
1466 if (tree_int_cst_lt (*maxlen, val))
1467 *maxlen = val;
1468 return true;
1470 else if (simple_cst_equal (val, *maxlen) != 1)
1471 return false;
1474 *maxlen = val;
1475 return true;
1478 /* If ARG is registered for SSA update we cannot look at its defining
1479 statement. */
1480 if (name_registered_for_update_p (arg))
1481 return false;
1483 /* If we were already here, break the infinite cycle. */
1484 if (!*visited)
1485 *visited = BITMAP_ALLOC (NULL);
1486 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1487 return true;
1489 var = arg;
1490 def_stmt = SSA_NAME_DEF_STMT (var);
1492 switch (gimple_code (def_stmt))
1494 case GIMPLE_ASSIGN:
1495 /* The RHS of the statement defining VAR must either have a
1496 constant length or come from another SSA_NAME with a constant
1497 length. */
1498 if (gimple_assign_single_p (def_stmt)
1499 || gimple_assign_unary_nop_p (def_stmt))
1501 tree rhs = gimple_assign_rhs1 (def_stmt);
1502 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
1504 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1506 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1507 gimple_assign_rhs3 (def_stmt) };
1509 for (unsigned int i = 0; i < 2; i++)
1510 if (!get_range_strlen (ops[i], length, visited, type, fuzzy,
1511 flexp))
1513 if (fuzzy == 2)
1514 *maxlen = build_all_ones_cst (size_type_node);
1515 else
1516 return false;
1518 return true;
1520 return false;
1522 case GIMPLE_PHI:
1523 /* All the arguments of the PHI node must have the same constant
1524 length. */
1525 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1527 tree arg = gimple_phi_arg (def_stmt, i)->def;
1529 /* If this PHI has itself as an argument, we cannot
1530 determine the string length of this argument. However,
1531 if we can find a constant string length for the other
1532 PHI args then we can still be sure that this is a
1533 constant string length. So be optimistic and just
1534 continue with the next argument. */
1535 if (arg == gimple_phi_result (def_stmt))
1536 continue;
1538 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
1540 if (fuzzy == 2)
1541 *maxlen = build_all_ones_cst (size_type_node);
1542 else
1543 return false;
1546 return true;
1548 default:
1549 return false;
1553 /* Determine the minimum and maximum value or string length that ARG
1554 refers to and store each in the first two elements of MINMAXLEN.
1555 For expressions that point to strings of unknown lengths that are
1556 character arrays, use the upper bound of the array as the maximum
1557 length. For example, given an expression like 'x ? array : "xyz"'
1558 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1559 to 0 and MINMAXLEN[1] to 7, the longest string that could be
1560 stored in array.
1561 Return true if the range of the string lengths has been obtained
1562 from the upper bound of an array at the end of a struct. Such
1563 an array may hold a string that's longer than its upper bound
1564 due to it being used as a poor-man's flexible array member.
1566 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1567 and false if PHIs and COND_EXPRs are to be handled optimistically,
1568 if we can determine string length minimum and maximum; it will use
1569 the minimum from the ones where it can be determined.
1570 STRICT false should be only used for warning code. */
1572 bool
1573 get_range_strlen (tree arg, tree minmaxlen[2], bool strict)
1575 bitmap visited = NULL;
1577 minmaxlen[0] = NULL_TREE;
1578 minmaxlen[1] = NULL_TREE;
1580 bool flexarray = false;
1581 if (!get_range_strlen (arg, minmaxlen, &visited, 1, strict ? 1 : 2,
1582 &flexarray))
1584 minmaxlen[0] = NULL_TREE;
1585 minmaxlen[1] = NULL_TREE;
1588 if (visited)
1589 BITMAP_FREE (visited);
1591 return flexarray;
1594 tree
1595 get_maxval_strlen (tree arg, int type)
1597 bitmap visited = NULL;
1598 tree len[2] = { NULL_TREE, NULL_TREE };
1600 bool dummy;
1601 if (!get_range_strlen (arg, len, &visited, type, 0, &dummy))
1602 len[1] = NULL_TREE;
1603 if (visited)
1604 BITMAP_FREE (visited);
1606 return len[1];
1610 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1611 If LEN is not NULL, it represents the length of the string to be
1612 copied. Return NULL_TREE if no simplification can be made. */
1614 static bool
1615 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1616 tree dest, tree src)
1618 gimple *stmt = gsi_stmt (*gsi);
1619 location_t loc = gimple_location (stmt);
1620 tree fn;
1622 /* If SRC and DEST are the same (and not volatile), return DEST. */
1623 if (operand_equal_p (src, dest, 0))
1625 tree func = gimple_call_fndecl (stmt);
1627 warning_at (loc, OPT_Wrestrict,
1628 "%qD source argument is the same as destination",
1629 func);
1631 replace_call_with_value (gsi, dest);
1632 return true;
1635 if (optimize_function_for_size_p (cfun))
1636 return false;
1638 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1639 if (!fn)
1640 return false;
1642 tree len = get_maxval_strlen (src, 0);
1643 if (!len)
1644 return false;
1646 len = fold_convert_loc (loc, size_type_node, len);
1647 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1648 len = force_gimple_operand_gsi (gsi, len, true,
1649 NULL_TREE, true, GSI_SAME_STMT);
1650 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1651 replace_call_with_call_and_fold (gsi, repl);
1652 return true;
1655 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1656 If SLEN is not NULL, it represents the length of the source string.
1657 Return NULL_TREE if no simplification can be made. */
1659 static bool
1660 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1661 tree dest, tree src, tree len)
1663 gimple *stmt = gsi_stmt (*gsi);
1664 location_t loc = gimple_location (stmt);
1665 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1667 /* If the LEN parameter is zero, return DEST. */
1668 if (integer_zerop (len))
1670 /* Avoid warning if the destination refers to a an array/pointer
1671 decorate with attribute nonstring. */
1672 if (!nonstring)
1674 tree fndecl = gimple_call_fndecl (stmt);
1675 gcall *call = as_a <gcall *> (stmt);
1677 /* Warn about the lack of nul termination: the result is not
1678 a (nul-terminated) string. */
1679 tree slen = get_maxval_strlen (src, 0);
1680 if (slen && !integer_zerop (slen))
1681 warning_at (loc, OPT_Wstringop_truncation,
1682 "%G%qD destination unchanged after copying no bytes "
1683 "from a string of length %E",
1684 call, fndecl, slen);
1685 else
1686 warning_at (loc, OPT_Wstringop_truncation,
1687 "%G%qD destination unchanged after copying no bytes",
1688 call, fndecl);
1691 replace_call_with_value (gsi, dest);
1692 return true;
1695 /* We can't compare slen with len as constants below if len is not a
1696 constant. */
1697 if (TREE_CODE (len) != INTEGER_CST)
1698 return false;
1700 /* Now, we must be passed a constant src ptr parameter. */
1701 tree slen = get_maxval_strlen (src, 0);
1702 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1703 return false;
1705 /* The size of the source string including the terminating nul. */
1706 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1708 /* We do not support simplification of this case, though we do
1709 support it when expanding trees into RTL. */
1710 /* FIXME: generate a call to __builtin_memset. */
1711 if (tree_int_cst_lt (ssize, len))
1712 return false;
1714 /* Diagnose truncation that leaves the copy unterminated. */
1715 maybe_diag_stxncpy_trunc (*gsi, src, len);
1717 /* OK transform into builtin memcpy. */
1718 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1719 if (!fn)
1720 return false;
1722 len = fold_convert_loc (loc, size_type_node, len);
1723 len = force_gimple_operand_gsi (gsi, len, true,
1724 NULL_TREE, true, GSI_SAME_STMT);
1725 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1726 replace_call_with_call_and_fold (gsi, repl);
1728 return true;
1731 /* Fold function call to builtin strchr or strrchr.
1732 If both arguments are constant, evaluate and fold the result,
1733 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1734 In general strlen is significantly faster than strchr
1735 due to being a simpler operation. */
1736 static bool
1737 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1739 gimple *stmt = gsi_stmt (*gsi);
1740 tree str = gimple_call_arg (stmt, 0);
1741 tree c = gimple_call_arg (stmt, 1);
1742 location_t loc = gimple_location (stmt);
1743 const char *p;
1744 char ch;
1746 if (!gimple_call_lhs (stmt))
1747 return false;
1749 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1751 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1753 if (p1 == NULL)
1755 replace_call_with_value (gsi, integer_zero_node);
1756 return true;
1759 tree len = build_int_cst (size_type_node, p1 - p);
1760 gimple_seq stmts = NULL;
1761 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1762 POINTER_PLUS_EXPR, str, len);
1763 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1764 gsi_replace_with_seq_vops (gsi, stmts);
1765 return true;
1768 if (!integer_zerop (c))
1769 return false;
1771 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1772 if (is_strrchr && optimize_function_for_size_p (cfun))
1774 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1776 if (strchr_fn)
1778 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1779 replace_call_with_call_and_fold (gsi, repl);
1780 return true;
1783 return false;
1786 tree len;
1787 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1789 if (!strlen_fn)
1790 return false;
1792 /* Create newstr = strlen (str). */
1793 gimple_seq stmts = NULL;
1794 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1795 gimple_set_location (new_stmt, loc);
1796 len = create_tmp_reg_or_ssa_name (size_type_node);
1797 gimple_call_set_lhs (new_stmt, len);
1798 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1800 /* Create (str p+ strlen (str)). */
1801 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1802 POINTER_PLUS_EXPR, str, len);
1803 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1804 gsi_replace_with_seq_vops (gsi, stmts);
1805 /* gsi now points at the assignment to the lhs, get a
1806 stmt iterator to the strlen.
1807 ??? We can't use gsi_for_stmt as that doesn't work when the
1808 CFG isn't built yet. */
1809 gimple_stmt_iterator gsi2 = *gsi;
1810 gsi_prev (&gsi2);
1811 fold_stmt (&gsi2);
1812 return true;
1815 /* Fold function call to builtin strstr.
1816 If both arguments are constant, evaluate and fold the result,
1817 additionally fold strstr (x, "") into x and strstr (x, "c")
1818 into strchr (x, 'c'). */
1819 static bool
1820 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1822 gimple *stmt = gsi_stmt (*gsi);
1823 tree haystack = gimple_call_arg (stmt, 0);
1824 tree needle = gimple_call_arg (stmt, 1);
1825 const char *p, *q;
1827 if (!gimple_call_lhs (stmt))
1828 return false;
1830 q = c_getstr (needle);
1831 if (q == NULL)
1832 return false;
1834 if ((p = c_getstr (haystack)))
1836 const char *r = strstr (p, q);
1838 if (r == NULL)
1840 replace_call_with_value (gsi, integer_zero_node);
1841 return true;
1844 tree len = build_int_cst (size_type_node, r - p);
1845 gimple_seq stmts = NULL;
1846 gimple *new_stmt
1847 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1848 haystack, len);
1849 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1850 gsi_replace_with_seq_vops (gsi, stmts);
1851 return true;
1854 /* For strstr (x, "") return x. */
1855 if (q[0] == '\0')
1857 replace_call_with_value (gsi, haystack);
1858 return true;
1861 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1862 if (q[1] == '\0')
1864 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1865 if (strchr_fn)
1867 tree c = build_int_cst (integer_type_node, q[0]);
1868 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1869 replace_call_with_call_and_fold (gsi, repl);
1870 return true;
1874 return false;
1877 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1878 to the call.
1880 Return NULL_TREE if no simplification was possible, otherwise return the
1881 simplified form of the call as a tree.
1883 The simplified form may be a constant or other expression which
1884 computes the same value, but in a more efficient manner (including
1885 calls to other builtin functions).
1887 The call may contain arguments which need to be evaluated, but
1888 which are not useful to determine the result of the call. In
1889 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1890 COMPOUND_EXPR will be an argument which must be evaluated.
1891 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1892 COMPOUND_EXPR in the chain will contain the tree for the simplified
1893 form of the builtin function call. */
1895 static bool
1896 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1898 gimple *stmt = gsi_stmt (*gsi);
1899 location_t loc = gimple_location (stmt);
1901 const char *p = c_getstr (src);
1903 /* If the string length is zero, return the dst parameter. */
1904 if (p && *p == '\0')
1906 replace_call_with_value (gsi, dst);
1907 return true;
1910 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1911 return false;
1913 /* See if we can store by pieces into (dst + strlen(dst)). */
1914 tree newdst;
1915 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1916 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1918 if (!strlen_fn || !memcpy_fn)
1919 return false;
1921 /* If the length of the source string isn't computable don't
1922 split strcat into strlen and memcpy. */
1923 tree len = get_maxval_strlen (src, 0);
1924 if (! len)
1925 return false;
1927 /* Create strlen (dst). */
1928 gimple_seq stmts = NULL, stmts2;
1929 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1930 gimple_set_location (repl, loc);
1931 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1932 gimple_call_set_lhs (repl, newdst);
1933 gimple_seq_add_stmt_without_update (&stmts, repl);
1935 /* Create (dst p+ strlen (dst)). */
1936 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1937 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1938 gimple_seq_add_seq_without_update (&stmts, stmts2);
1940 len = fold_convert_loc (loc, size_type_node, len);
1941 len = size_binop_loc (loc, PLUS_EXPR, len,
1942 build_int_cst (size_type_node, 1));
1943 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1944 gimple_seq_add_seq_without_update (&stmts, stmts2);
1946 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1947 gimple_seq_add_stmt_without_update (&stmts, repl);
1948 if (gimple_call_lhs (stmt))
1950 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1951 gimple_seq_add_stmt_without_update (&stmts, repl);
1952 gsi_replace_with_seq_vops (gsi, stmts);
1953 /* gsi now points at the assignment to the lhs, get a
1954 stmt iterator to the memcpy call.
1955 ??? We can't use gsi_for_stmt as that doesn't work when the
1956 CFG isn't built yet. */
1957 gimple_stmt_iterator gsi2 = *gsi;
1958 gsi_prev (&gsi2);
1959 fold_stmt (&gsi2);
1961 else
1963 gsi_replace_with_seq_vops (gsi, stmts);
1964 fold_stmt (gsi);
1966 return true;
1969 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1970 are the arguments to the call. */
1972 static bool
1973 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1975 gimple *stmt = gsi_stmt (*gsi);
1976 tree dest = gimple_call_arg (stmt, 0);
1977 tree src = gimple_call_arg (stmt, 1);
1978 tree size = gimple_call_arg (stmt, 2);
1979 tree fn;
1980 const char *p;
1983 p = c_getstr (src);
1984 /* If the SRC parameter is "", return DEST. */
1985 if (p && *p == '\0')
1987 replace_call_with_value (gsi, dest);
1988 return true;
1991 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1992 return false;
1994 /* If __builtin_strcat_chk is used, assume strcat is available. */
1995 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1996 if (!fn)
1997 return false;
1999 gimple *repl = gimple_build_call (fn, 2, dest, src);
2000 replace_call_with_call_and_fold (gsi, repl);
2001 return true;
2004 /* Simplify a call to the strncat builtin. */
2006 static bool
2007 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2009 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2010 tree dst = gimple_call_arg (stmt, 0);
2011 tree src = gimple_call_arg (stmt, 1);
2012 tree len = gimple_call_arg (stmt, 2);
2014 const char *p = c_getstr (src);
2016 /* If the requested length is zero, or the src parameter string
2017 length is zero, return the dst parameter. */
2018 if (integer_zerop (len) || (p && *p == '\0'))
2020 replace_call_with_value (gsi, dst);
2021 return true;
2024 if (TREE_CODE (len) != INTEGER_CST || !p)
2025 return false;
2027 unsigned srclen = strlen (p);
2029 int cmpsrc = compare_tree_int (len, srclen);
2031 /* Return early if the requested len is less than the string length.
2032 Warnings will be issued elsewhere later. */
2033 if (cmpsrc < 0)
2034 return false;
2036 unsigned HOST_WIDE_INT dstsize;
2038 bool nowarn = gimple_no_warning_p (stmt);
2040 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2042 int cmpdst = compare_tree_int (len, dstsize);
2044 if (cmpdst >= 0)
2046 tree fndecl = gimple_call_fndecl (stmt);
2048 /* Strncat copies (at most) LEN bytes and always appends
2049 the terminating NUL so the specified bound should never
2050 be equal to (or greater than) the size of the destination.
2051 If it is, the copy could overflow. */
2052 location_t loc = gimple_location (stmt);
2053 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2054 cmpdst == 0
2055 ? G_("%G%qD specified bound %E equals "
2056 "destination size")
2057 : G_("%G%qD specified bound %E exceeds "
2058 "destination size %wu"),
2059 stmt, fndecl, len, dstsize);
2060 if (nowarn)
2061 gimple_set_no_warning (stmt, true);
2065 if (!nowarn && cmpsrc == 0)
2067 tree fndecl = gimple_call_fndecl (stmt);
2069 /* To avoid certain truncation the specified bound should also
2070 not be equal to (or less than) the length of the source. */
2071 location_t loc = gimple_location (stmt);
2072 if (warning_at (loc, OPT_Wstringop_overflow_,
2073 "%G%qD specified bound %E equals source length",
2074 stmt, fndecl, len))
2075 gimple_set_no_warning (stmt, true);
2078 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2080 /* If the replacement _DECL isn't initialized, don't do the
2081 transformation. */
2082 if (!fn)
2083 return false;
2085 /* Otherwise, emit a call to strcat. */
2086 gcall *repl = gimple_build_call (fn, 2, dst, src);
2087 replace_call_with_call_and_fold (gsi, repl);
2088 return true;
2091 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2092 LEN, and SIZE. */
2094 static bool
2095 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2097 gimple *stmt = gsi_stmt (*gsi);
2098 tree dest = gimple_call_arg (stmt, 0);
2099 tree src = gimple_call_arg (stmt, 1);
2100 tree len = gimple_call_arg (stmt, 2);
2101 tree size = gimple_call_arg (stmt, 3);
2102 tree fn;
2103 const char *p;
2105 p = c_getstr (src);
2106 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2107 if ((p && *p == '\0')
2108 || integer_zerop (len))
2110 replace_call_with_value (gsi, dest);
2111 return true;
2114 if (! tree_fits_uhwi_p (size))
2115 return false;
2117 if (! integer_all_onesp (size))
2119 tree src_len = c_strlen (src, 1);
2120 if (src_len
2121 && tree_fits_uhwi_p (src_len)
2122 && tree_fits_uhwi_p (len)
2123 && ! tree_int_cst_lt (len, src_len))
2125 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2126 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2127 if (!fn)
2128 return false;
2130 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2131 replace_call_with_call_and_fold (gsi, repl);
2132 return true;
2134 return false;
2137 /* If __builtin_strncat_chk is used, assume strncat is available. */
2138 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2139 if (!fn)
2140 return false;
2142 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2143 replace_call_with_call_and_fold (gsi, repl);
2144 return true;
2147 /* Build and append gimple statements to STMTS that would load a first
2148 character of a memory location identified by STR. LOC is location
2149 of the statement. */
2151 static tree
2152 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2154 tree var;
2156 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2157 tree cst_uchar_ptr_node
2158 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2159 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2161 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2162 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2163 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2165 gimple_assign_set_lhs (stmt, var);
2166 gimple_seq_add_stmt_without_update (stmts, stmt);
2168 return var;
2171 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2172 FCODE is the name of the builtin. */
2174 static bool
2175 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2177 gimple *stmt = gsi_stmt (*gsi);
2178 tree callee = gimple_call_fndecl (stmt);
2179 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2181 tree type = integer_type_node;
2182 tree str1 = gimple_call_arg (stmt, 0);
2183 tree str2 = gimple_call_arg (stmt, 1);
2184 tree lhs = gimple_call_lhs (stmt);
2185 HOST_WIDE_INT length = -1;
2187 /* Handle strncmp and strncasecmp functions. */
2188 if (gimple_call_num_args (stmt) == 3)
2190 tree len = gimple_call_arg (stmt, 2);
2191 if (tree_fits_uhwi_p (len))
2192 length = tree_to_uhwi (len);
2195 /* If the LEN parameter is zero, return zero. */
2196 if (length == 0)
2198 replace_call_with_value (gsi, integer_zero_node);
2199 return true;
2202 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2203 if (operand_equal_p (str1, str2, 0))
2205 replace_call_with_value (gsi, integer_zero_node);
2206 return true;
2209 const char *p1 = c_getstr (str1);
2210 const char *p2 = c_getstr (str2);
2212 /* For known strings, return an immediate value. */
2213 if (p1 && p2)
2215 int r = 0;
2216 bool known_result = false;
2218 switch (fcode)
2220 case BUILT_IN_STRCMP:
2222 r = strcmp (p1, p2);
2223 known_result = true;
2224 break;
2226 case BUILT_IN_STRNCMP:
2228 if (length == -1)
2229 break;
2230 r = strncmp (p1, p2, length);
2231 known_result = true;
2232 break;
2234 /* Only handleable situation is where the string are equal (result 0),
2235 which is already handled by operand_equal_p case. */
2236 case BUILT_IN_STRCASECMP:
2237 break;
2238 case BUILT_IN_STRNCASECMP:
2240 if (length == -1)
2241 break;
2242 r = strncmp (p1, p2, length);
2243 if (r == 0)
2244 known_result = true;
2245 break;
2247 default:
2248 gcc_unreachable ();
2251 if (known_result)
2253 replace_call_with_value (gsi, build_cmp_result (type, r));
2254 return true;
2258 bool nonzero_length = length >= 1
2259 || fcode == BUILT_IN_STRCMP
2260 || fcode == BUILT_IN_STRCASECMP;
2262 location_t loc = gimple_location (stmt);
2264 /* If the second arg is "", return *(const unsigned char*)arg1. */
2265 if (p2 && *p2 == '\0' && nonzero_length)
2267 gimple_seq stmts = NULL;
2268 tree var = gimple_load_first_char (loc, str1, &stmts);
2269 if (lhs)
2271 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2272 gimple_seq_add_stmt_without_update (&stmts, stmt);
2275 gsi_replace_with_seq_vops (gsi, stmts);
2276 return true;
2279 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2280 if (p1 && *p1 == '\0' && nonzero_length)
2282 gimple_seq stmts = NULL;
2283 tree var = gimple_load_first_char (loc, str2, &stmts);
2285 if (lhs)
2287 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2288 stmt = gimple_build_assign (c, NOP_EXPR, var);
2289 gimple_seq_add_stmt_without_update (&stmts, stmt);
2291 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2292 gimple_seq_add_stmt_without_update (&stmts, stmt);
2295 gsi_replace_with_seq_vops (gsi, stmts);
2296 return true;
2299 /* If len parameter is one, return an expression corresponding to
2300 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2301 if (fcode == BUILT_IN_STRNCMP && length == 1)
2303 gimple_seq stmts = NULL;
2304 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2305 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2307 if (lhs)
2309 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2310 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2311 gimple_seq_add_stmt_without_update (&stmts, convert1);
2313 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2314 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2315 gimple_seq_add_stmt_without_update (&stmts, convert2);
2317 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2318 gimple_seq_add_stmt_without_update (&stmts, stmt);
2321 gsi_replace_with_seq_vops (gsi, stmts);
2322 return true;
2325 /* If length is larger than the length of one constant string,
2326 replace strncmp with corresponding strcmp */
2327 if (fcode == BUILT_IN_STRNCMP
2328 && length > 0
2329 && ((p2 && (size_t) length > strlen (p2))
2330 || (p1 && (size_t) length > strlen (p1))))
2332 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2333 if (!fn)
2334 return false;
2335 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2336 replace_call_with_call_and_fold (gsi, repl);
2337 return true;
2340 return false;
2343 /* Fold a call to the memchr pointed by GSI iterator. */
2345 static bool
2346 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2348 gimple *stmt = gsi_stmt (*gsi);
2349 tree lhs = gimple_call_lhs (stmt);
2350 tree arg1 = gimple_call_arg (stmt, 0);
2351 tree arg2 = gimple_call_arg (stmt, 1);
2352 tree len = gimple_call_arg (stmt, 2);
2354 /* If the LEN parameter is zero, return zero. */
2355 if (integer_zerop (len))
2357 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2358 return true;
2361 char c;
2362 if (TREE_CODE (arg2) != INTEGER_CST
2363 || !tree_fits_uhwi_p (len)
2364 || !target_char_cst_p (arg2, &c))
2365 return false;
2367 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2368 unsigned HOST_WIDE_INT string_length;
2369 const char *p1 = c_getstr (arg1, &string_length);
2371 if (p1)
2373 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2374 if (r == NULL)
2376 if (length <= string_length)
2378 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2379 return true;
2382 else
2384 unsigned HOST_WIDE_INT offset = r - p1;
2385 gimple_seq stmts = NULL;
2386 if (lhs != NULL_TREE)
2388 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2389 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2390 arg1, offset_cst);
2391 gimple_seq_add_stmt_without_update (&stmts, stmt);
2393 else
2394 gimple_seq_add_stmt_without_update (&stmts,
2395 gimple_build_nop ());
2397 gsi_replace_with_seq_vops (gsi, stmts);
2398 return true;
2402 return false;
2405 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2406 to the call. IGNORE is true if the value returned
2407 by the builtin will be ignored. UNLOCKED is true is true if this
2408 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2409 the known length of the string. Return NULL_TREE if no simplification
2410 was possible. */
2412 static bool
2413 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2414 tree arg0, tree arg1,
2415 bool unlocked)
2417 gimple *stmt = gsi_stmt (*gsi);
2419 /* If we're using an unlocked function, assume the other unlocked
2420 functions exist explicitly. */
2421 tree const fn_fputc = (unlocked
2422 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2423 : builtin_decl_implicit (BUILT_IN_FPUTC));
2424 tree const fn_fwrite = (unlocked
2425 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2426 : builtin_decl_implicit (BUILT_IN_FWRITE));
2428 /* If the return value is used, don't do the transformation. */
2429 if (gimple_call_lhs (stmt))
2430 return false;
2432 /* Get the length of the string passed to fputs. If the length
2433 can't be determined, punt. */
2434 tree len = get_maxval_strlen (arg0, 0);
2435 if (!len
2436 || TREE_CODE (len) != INTEGER_CST)
2437 return false;
2439 switch (compare_tree_int (len, 1))
2441 case -1: /* length is 0, delete the call entirely . */
2442 replace_call_with_value (gsi, integer_zero_node);
2443 return true;
2445 case 0: /* length is 1, call fputc. */
2447 const char *p = c_getstr (arg0);
2448 if (p != NULL)
2450 if (!fn_fputc)
2451 return false;
2453 gimple *repl = gimple_build_call (fn_fputc, 2,
2454 build_int_cst
2455 (integer_type_node, p[0]), arg1);
2456 replace_call_with_call_and_fold (gsi, repl);
2457 return true;
2460 /* FALLTHROUGH */
2461 case 1: /* length is greater than 1, call fwrite. */
2463 /* If optimizing for size keep fputs. */
2464 if (optimize_function_for_size_p (cfun))
2465 return false;
2466 /* New argument list transforming fputs(string, stream) to
2467 fwrite(string, 1, len, stream). */
2468 if (!fn_fwrite)
2469 return false;
2471 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2472 size_one_node, len, arg1);
2473 replace_call_with_call_and_fold (gsi, repl);
2474 return true;
2476 default:
2477 gcc_unreachable ();
2479 return false;
2482 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2483 DEST, SRC, LEN, and SIZE are the arguments to the call.
2484 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2485 code of the builtin. If MAXLEN is not NULL, it is maximum length
2486 passed as third argument. */
2488 static bool
2489 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2490 tree dest, tree src, tree len, tree size,
2491 enum built_in_function fcode)
2493 gimple *stmt = gsi_stmt (*gsi);
2494 location_t loc = gimple_location (stmt);
2495 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2496 tree fn;
2498 /* If SRC and DEST are the same (and not volatile), return DEST
2499 (resp. DEST+LEN for __mempcpy_chk). */
2500 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2502 if (fcode != BUILT_IN_MEMMOVE && fcode != BUILT_IN_MEMMOVE_CHK)
2504 tree func = gimple_call_fndecl (stmt);
2506 warning_at (loc, OPT_Wrestrict,
2507 "%qD source argument is the same as destination",
2508 func);
2511 if (fcode != BUILT_IN_MEMPCPY_CHK)
2513 replace_call_with_value (gsi, dest);
2514 return true;
2516 else
2518 gimple_seq stmts = NULL;
2519 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2520 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2521 TREE_TYPE (dest), dest, len);
2522 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2523 replace_call_with_value (gsi, temp);
2524 return true;
2528 if (! tree_fits_uhwi_p (size))
2529 return false;
2531 tree maxlen = get_maxval_strlen (len, 2);
2532 if (! integer_all_onesp (size))
2534 if (! tree_fits_uhwi_p (len))
2536 /* If LEN is not constant, try MAXLEN too.
2537 For MAXLEN only allow optimizing into non-_ocs function
2538 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2539 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2541 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2543 /* (void) __mempcpy_chk () can be optimized into
2544 (void) __memcpy_chk (). */
2545 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2546 if (!fn)
2547 return false;
2549 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2550 replace_call_with_call_and_fold (gsi, repl);
2551 return true;
2553 return false;
2556 else
2557 maxlen = len;
2559 if (tree_int_cst_lt (size, maxlen))
2560 return false;
2563 fn = NULL_TREE;
2564 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2565 mem{cpy,pcpy,move,set} is available. */
2566 switch (fcode)
2568 case BUILT_IN_MEMCPY_CHK:
2569 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2570 break;
2571 case BUILT_IN_MEMPCPY_CHK:
2572 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2573 break;
2574 case BUILT_IN_MEMMOVE_CHK:
2575 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2576 break;
2577 case BUILT_IN_MEMSET_CHK:
2578 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2579 break;
2580 default:
2581 break;
2584 if (!fn)
2585 return false;
2587 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2588 replace_call_with_call_and_fold (gsi, repl);
2589 return true;
2592 /* Fold a call to the __st[rp]cpy_chk builtin.
2593 DEST, SRC, and SIZE are the arguments to the call.
2594 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2595 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2596 strings passed as second argument. */
2598 static bool
2599 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2600 tree dest,
2601 tree src, tree size,
2602 enum built_in_function fcode)
2604 gimple *stmt = gsi_stmt (*gsi);
2605 location_t loc = gimple_location (stmt);
2606 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2607 tree len, fn;
2609 /* If SRC and DEST are the same (and not volatile), return DEST. */
2610 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2612 tree func = gimple_call_fndecl (stmt);
2614 warning_at (loc, OPT_Wrestrict,
2615 "%qD source argument is the same as destination",
2616 func);
2618 replace_call_with_value (gsi, dest);
2619 return true;
2622 if (! tree_fits_uhwi_p (size))
2623 return false;
2625 tree maxlen = get_maxval_strlen (src, 1);
2626 if (! integer_all_onesp (size))
2628 len = c_strlen (src, 1);
2629 if (! len || ! tree_fits_uhwi_p (len))
2631 /* If LEN is not constant, try MAXLEN too.
2632 For MAXLEN only allow optimizing into non-_ocs function
2633 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2634 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2636 if (fcode == BUILT_IN_STPCPY_CHK)
2638 if (! ignore)
2639 return false;
2641 /* If return value of __stpcpy_chk is ignored,
2642 optimize into __strcpy_chk. */
2643 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2644 if (!fn)
2645 return false;
2647 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2648 replace_call_with_call_and_fold (gsi, repl);
2649 return true;
2652 if (! len || TREE_SIDE_EFFECTS (len))
2653 return false;
2655 /* If c_strlen returned something, but not a constant,
2656 transform __strcpy_chk into __memcpy_chk. */
2657 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2658 if (!fn)
2659 return false;
2661 gimple_seq stmts = NULL;
2662 len = gimple_convert (&stmts, loc, size_type_node, len);
2663 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2664 build_int_cst (size_type_node, 1));
2665 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2666 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2667 replace_call_with_call_and_fold (gsi, repl);
2668 return true;
2671 else
2672 maxlen = len;
2674 if (! tree_int_cst_lt (maxlen, size))
2675 return false;
2678 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2679 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2680 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2681 if (!fn)
2682 return false;
2684 gimple *repl = gimple_build_call (fn, 2, dest, src);
2685 replace_call_with_call_and_fold (gsi, repl);
2686 return true;
2689 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2690 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2691 length passed as third argument. IGNORE is true if return value can be
2692 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2694 static bool
2695 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2696 tree dest, tree src,
2697 tree len, tree size,
2698 enum built_in_function fcode)
2700 gimple *stmt = gsi_stmt (*gsi);
2701 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2702 tree fn;
2704 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2706 /* If return value of __stpncpy_chk is ignored,
2707 optimize into __strncpy_chk. */
2708 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2709 if (fn)
2711 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2712 replace_call_with_call_and_fold (gsi, repl);
2713 return true;
2717 if (! tree_fits_uhwi_p (size))
2718 return false;
2720 tree maxlen = get_maxval_strlen (len, 2);
2721 if (! integer_all_onesp (size))
2723 if (! tree_fits_uhwi_p (len))
2725 /* If LEN is not constant, try MAXLEN too.
2726 For MAXLEN only allow optimizing into non-_ocs function
2727 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2728 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2729 return false;
2731 else
2732 maxlen = len;
2734 if (tree_int_cst_lt (size, maxlen))
2735 return false;
2738 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2739 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2740 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2741 if (!fn)
2742 return false;
2744 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2745 replace_call_with_call_and_fold (gsi, repl);
2746 return true;
2749 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2750 Return NULL_TREE if no simplification can be made. */
2752 static bool
2753 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2755 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2756 location_t loc = gimple_location (stmt);
2757 tree dest = gimple_call_arg (stmt, 0);
2758 tree src = gimple_call_arg (stmt, 1);
2759 tree fn, len, lenp1;
2761 /* If the result is unused, replace stpcpy with strcpy. */
2762 if (gimple_call_lhs (stmt) == NULL_TREE)
2764 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2765 if (!fn)
2766 return false;
2767 gimple_call_set_fndecl (stmt, fn);
2768 fold_stmt (gsi);
2769 return true;
2772 len = c_strlen (src, 1);
2773 if (!len
2774 || TREE_CODE (len) != INTEGER_CST)
2775 return false;
2777 if (optimize_function_for_size_p (cfun)
2778 /* If length is zero it's small enough. */
2779 && !integer_zerop (len))
2780 return false;
2782 /* If the source has a known length replace stpcpy with memcpy. */
2783 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2784 if (!fn)
2785 return false;
2787 gimple_seq stmts = NULL;
2788 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2789 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2790 tem, build_int_cst (size_type_node, 1));
2791 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2792 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2793 gimple_set_vuse (repl, gimple_vuse (stmt));
2794 gimple_set_vdef (repl, gimple_vdef (stmt));
2795 if (gimple_vdef (repl)
2796 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2797 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2798 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2799 /* Replace the result with dest + len. */
2800 stmts = NULL;
2801 tem = gimple_convert (&stmts, loc, sizetype, len);
2802 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2803 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2804 POINTER_PLUS_EXPR, dest, tem);
2805 gsi_replace (gsi, ret, false);
2806 /* Finally fold the memcpy call. */
2807 gimple_stmt_iterator gsi2 = *gsi;
2808 gsi_prev (&gsi2);
2809 fold_stmt (&gsi2);
2810 return true;
2813 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2814 NULL_TREE if a normal call should be emitted rather than expanding
2815 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2816 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2817 passed as second argument. */
2819 static bool
2820 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2821 enum built_in_function fcode)
2823 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2824 tree dest, size, len, fn, fmt, flag;
2825 const char *fmt_str;
2827 /* Verify the required arguments in the original call. */
2828 if (gimple_call_num_args (stmt) < 5)
2829 return false;
2831 dest = gimple_call_arg (stmt, 0);
2832 len = gimple_call_arg (stmt, 1);
2833 flag = gimple_call_arg (stmt, 2);
2834 size = gimple_call_arg (stmt, 3);
2835 fmt = gimple_call_arg (stmt, 4);
2837 if (! tree_fits_uhwi_p (size))
2838 return false;
2840 if (! integer_all_onesp (size))
2842 tree maxlen = get_maxval_strlen (len, 2);
2843 if (! tree_fits_uhwi_p (len))
2845 /* If LEN is not constant, try MAXLEN too.
2846 For MAXLEN only allow optimizing into non-_ocs function
2847 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2848 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2849 return false;
2851 else
2852 maxlen = len;
2854 if (tree_int_cst_lt (size, maxlen))
2855 return false;
2858 if (!init_target_chars ())
2859 return false;
2861 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2862 or if format doesn't contain % chars or is "%s". */
2863 if (! integer_zerop (flag))
2865 fmt_str = c_getstr (fmt);
2866 if (fmt_str == NULL)
2867 return false;
2868 if (strchr (fmt_str, target_percent) != NULL
2869 && strcmp (fmt_str, target_percent_s))
2870 return false;
2873 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2874 available. */
2875 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2876 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2877 if (!fn)
2878 return false;
2880 /* Replace the called function and the first 5 argument by 3 retaining
2881 trailing varargs. */
2882 gimple_call_set_fndecl (stmt, fn);
2883 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2884 gimple_call_set_arg (stmt, 0, dest);
2885 gimple_call_set_arg (stmt, 1, len);
2886 gimple_call_set_arg (stmt, 2, fmt);
2887 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2888 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2889 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2890 fold_stmt (gsi);
2891 return true;
2894 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2895 Return NULL_TREE if a normal call should be emitted rather than
2896 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2897 or BUILT_IN_VSPRINTF_CHK. */
2899 static bool
2900 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2901 enum built_in_function fcode)
2903 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2904 tree dest, size, len, fn, fmt, flag;
2905 const char *fmt_str;
2906 unsigned nargs = gimple_call_num_args (stmt);
2908 /* Verify the required arguments in the original call. */
2909 if (nargs < 4)
2910 return false;
2911 dest = gimple_call_arg (stmt, 0);
2912 flag = gimple_call_arg (stmt, 1);
2913 size = gimple_call_arg (stmt, 2);
2914 fmt = gimple_call_arg (stmt, 3);
2916 if (! tree_fits_uhwi_p (size))
2917 return false;
2919 len = NULL_TREE;
2921 if (!init_target_chars ())
2922 return false;
2924 /* Check whether the format is a literal string constant. */
2925 fmt_str = c_getstr (fmt);
2926 if (fmt_str != NULL)
2928 /* If the format doesn't contain % args or %%, we know the size. */
2929 if (strchr (fmt_str, target_percent) == 0)
2931 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2932 len = build_int_cstu (size_type_node, strlen (fmt_str));
2934 /* If the format is "%s" and first ... argument is a string literal,
2935 we know the size too. */
2936 else if (fcode == BUILT_IN_SPRINTF_CHK
2937 && strcmp (fmt_str, target_percent_s) == 0)
2939 tree arg;
2941 if (nargs == 5)
2943 arg = gimple_call_arg (stmt, 4);
2944 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2946 len = c_strlen (arg, 1);
2947 if (! len || ! tree_fits_uhwi_p (len))
2948 len = NULL_TREE;
2954 if (! integer_all_onesp (size))
2956 if (! len || ! tree_int_cst_lt (len, size))
2957 return false;
2960 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2961 or if format doesn't contain % chars or is "%s". */
2962 if (! integer_zerop (flag))
2964 if (fmt_str == NULL)
2965 return false;
2966 if (strchr (fmt_str, target_percent) != NULL
2967 && strcmp (fmt_str, target_percent_s))
2968 return false;
2971 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2972 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2973 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2974 if (!fn)
2975 return false;
2977 /* Replace the called function and the first 4 argument by 2 retaining
2978 trailing varargs. */
2979 gimple_call_set_fndecl (stmt, fn);
2980 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2981 gimple_call_set_arg (stmt, 0, dest);
2982 gimple_call_set_arg (stmt, 1, fmt);
2983 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2984 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2985 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2986 fold_stmt (gsi);
2987 return true;
2990 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2991 ORIG may be null if this is a 2-argument call. We don't attempt to
2992 simplify calls with more than 3 arguments.
2994 Return true if simplification was possible, otherwise false. */
2996 bool
2997 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2999 gimple *stmt = gsi_stmt (*gsi);
3000 tree dest = gimple_call_arg (stmt, 0);
3001 tree fmt = gimple_call_arg (stmt, 1);
3002 tree orig = NULL_TREE;
3003 const char *fmt_str = NULL;
3005 /* Verify the required arguments in the original call. We deal with two
3006 types of sprintf() calls: 'sprintf (str, fmt)' and
3007 'sprintf (dest, "%s", orig)'. */
3008 if (gimple_call_num_args (stmt) > 3)
3009 return false;
3011 if (gimple_call_num_args (stmt) == 3)
3012 orig = gimple_call_arg (stmt, 2);
3014 /* Check whether the format is a literal string constant. */
3015 fmt_str = c_getstr (fmt);
3016 if (fmt_str == NULL)
3017 return false;
3019 if (!init_target_chars ())
3020 return false;
3022 /* If the format doesn't contain % args or %%, use strcpy. */
3023 if (strchr (fmt_str, target_percent) == NULL)
3025 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3027 if (!fn)
3028 return false;
3030 /* Don't optimize sprintf (buf, "abc", ptr++). */
3031 if (orig)
3032 return false;
3034 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3035 'format' is known to contain no % formats. */
3036 gimple_seq stmts = NULL;
3037 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3038 gimple_seq_add_stmt_without_update (&stmts, repl);
3039 if (gimple_call_lhs (stmt))
3041 repl = gimple_build_assign (gimple_call_lhs (stmt),
3042 build_int_cst (integer_type_node,
3043 strlen (fmt_str)));
3044 gimple_seq_add_stmt_without_update (&stmts, repl);
3045 gsi_replace_with_seq_vops (gsi, stmts);
3046 /* gsi now points at the assignment to the lhs, get a
3047 stmt iterator to the memcpy call.
3048 ??? We can't use gsi_for_stmt as that doesn't work when the
3049 CFG isn't built yet. */
3050 gimple_stmt_iterator gsi2 = *gsi;
3051 gsi_prev (&gsi2);
3052 fold_stmt (&gsi2);
3054 else
3056 gsi_replace_with_seq_vops (gsi, stmts);
3057 fold_stmt (gsi);
3059 return true;
3062 /* If the format is "%s", use strcpy if the result isn't used. */
3063 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3065 tree fn;
3066 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3068 if (!fn)
3069 return false;
3071 /* Don't crash on sprintf (str1, "%s"). */
3072 if (!orig)
3073 return false;
3075 tree orig_len = NULL_TREE;
3076 if (gimple_call_lhs (stmt))
3078 orig_len = get_maxval_strlen (orig, 0);
3079 if (!orig_len)
3080 return false;
3083 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3084 gimple_seq stmts = NULL;
3085 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3086 gimple_seq_add_stmt_without_update (&stmts, repl);
3087 if (gimple_call_lhs (stmt))
3089 if (!useless_type_conversion_p (integer_type_node,
3090 TREE_TYPE (orig_len)))
3091 orig_len = fold_convert (integer_type_node, orig_len);
3092 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3093 gimple_seq_add_stmt_without_update (&stmts, repl);
3094 gsi_replace_with_seq_vops (gsi, stmts);
3095 /* gsi now points at the assignment to the lhs, get a
3096 stmt iterator to the memcpy call.
3097 ??? We can't use gsi_for_stmt as that doesn't work when the
3098 CFG isn't built yet. */
3099 gimple_stmt_iterator gsi2 = *gsi;
3100 gsi_prev (&gsi2);
3101 fold_stmt (&gsi2);
3103 else
3105 gsi_replace_with_seq_vops (gsi, stmts);
3106 fold_stmt (gsi);
3108 return true;
3110 return false;
3113 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3114 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3115 attempt to simplify calls with more than 4 arguments.
3117 Return true if simplification was possible, otherwise false. */
3119 bool
3120 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3122 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3123 tree dest = gimple_call_arg (stmt, 0);
3124 tree destsize = gimple_call_arg (stmt, 1);
3125 tree fmt = gimple_call_arg (stmt, 2);
3126 tree orig = NULL_TREE;
3127 const char *fmt_str = NULL;
3129 if (gimple_call_num_args (stmt) > 4)
3130 return false;
3132 if (gimple_call_num_args (stmt) == 4)
3133 orig = gimple_call_arg (stmt, 3);
3135 if (!tree_fits_uhwi_p (destsize))
3136 return false;
3137 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3139 /* Check whether the format is a literal string constant. */
3140 fmt_str = c_getstr (fmt);
3141 if (fmt_str == NULL)
3142 return false;
3144 if (!init_target_chars ())
3145 return false;
3147 /* If the format doesn't contain % args or %%, use strcpy. */
3148 if (strchr (fmt_str, target_percent) == NULL)
3150 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3151 if (!fn)
3152 return false;
3154 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3155 if (orig)
3156 return false;
3158 /* We could expand this as
3159 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3160 or to
3161 memcpy (str, fmt_with_nul_at_cstm1, cst);
3162 but in the former case that might increase code size
3163 and in the latter case grow .rodata section too much.
3164 So punt for now. */
3165 size_t len = strlen (fmt_str);
3166 if (len >= destlen)
3167 return false;
3169 gimple_seq stmts = NULL;
3170 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3171 gimple_seq_add_stmt_without_update (&stmts, repl);
3172 if (gimple_call_lhs (stmt))
3174 repl = gimple_build_assign (gimple_call_lhs (stmt),
3175 build_int_cst (integer_type_node, len));
3176 gimple_seq_add_stmt_without_update (&stmts, repl);
3177 gsi_replace_with_seq_vops (gsi, stmts);
3178 /* gsi now points at the assignment to the lhs, get a
3179 stmt iterator to the memcpy call.
3180 ??? We can't use gsi_for_stmt as that doesn't work when the
3181 CFG isn't built yet. */
3182 gimple_stmt_iterator gsi2 = *gsi;
3183 gsi_prev (&gsi2);
3184 fold_stmt (&gsi2);
3186 else
3188 gsi_replace_with_seq_vops (gsi, stmts);
3189 fold_stmt (gsi);
3191 return true;
3194 /* If the format is "%s", use strcpy if the result isn't used. */
3195 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3197 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3198 if (!fn)
3199 return false;
3201 /* Don't crash on snprintf (str1, cst, "%s"). */
3202 if (!orig)
3203 return false;
3205 tree orig_len = get_maxval_strlen (orig, 0);
3206 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3207 return false;
3209 /* We could expand this as
3210 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3211 or to
3212 memcpy (str1, str2_with_nul_at_cstm1, cst);
3213 but in the former case that might increase code size
3214 and in the latter case grow .rodata section too much.
3215 So punt for now. */
3216 if (compare_tree_int (orig_len, destlen) >= 0)
3217 return false;
3219 /* Convert snprintf (str1, cst, "%s", str2) into
3220 strcpy (str1, str2) if strlen (str2) < cst. */
3221 gimple_seq stmts = NULL;
3222 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3223 gimple_seq_add_stmt_without_update (&stmts, repl);
3224 if (gimple_call_lhs (stmt))
3226 if (!useless_type_conversion_p (integer_type_node,
3227 TREE_TYPE (orig_len)))
3228 orig_len = fold_convert (integer_type_node, orig_len);
3229 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3230 gimple_seq_add_stmt_without_update (&stmts, repl);
3231 gsi_replace_with_seq_vops (gsi, stmts);
3232 /* gsi now points at the assignment to the lhs, get a
3233 stmt iterator to the memcpy call.
3234 ??? We can't use gsi_for_stmt as that doesn't work when the
3235 CFG isn't built yet. */
3236 gimple_stmt_iterator gsi2 = *gsi;
3237 gsi_prev (&gsi2);
3238 fold_stmt (&gsi2);
3240 else
3242 gsi_replace_with_seq_vops (gsi, stmts);
3243 fold_stmt (gsi);
3245 return true;
3247 return false;
3250 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3251 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3252 more than 3 arguments, and ARG may be null in the 2-argument case.
3254 Return NULL_TREE if no simplification was possible, otherwise return the
3255 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3256 code of the function to be simplified. */
3258 static bool
3259 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3260 tree fp, tree fmt, tree arg,
3261 enum built_in_function fcode)
3263 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3264 tree fn_fputc, fn_fputs;
3265 const char *fmt_str = NULL;
3267 /* If the return value is used, don't do the transformation. */
3268 if (gimple_call_lhs (stmt) != NULL_TREE)
3269 return false;
3271 /* Check whether the format is a literal string constant. */
3272 fmt_str = c_getstr (fmt);
3273 if (fmt_str == NULL)
3274 return false;
3276 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3278 /* If we're using an unlocked function, assume the other
3279 unlocked functions exist explicitly. */
3280 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3281 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3283 else
3285 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3286 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3289 if (!init_target_chars ())
3290 return false;
3292 /* If the format doesn't contain % args or %%, use strcpy. */
3293 if (strchr (fmt_str, target_percent) == NULL)
3295 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3296 && arg)
3297 return false;
3299 /* If the format specifier was "", fprintf does nothing. */
3300 if (fmt_str[0] == '\0')
3302 replace_call_with_value (gsi, NULL_TREE);
3303 return true;
3306 /* When "string" doesn't contain %, replace all cases of
3307 fprintf (fp, string) with fputs (string, fp). The fputs
3308 builtin will take care of special cases like length == 1. */
3309 if (fn_fputs)
3311 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3312 replace_call_with_call_and_fold (gsi, repl);
3313 return true;
3317 /* The other optimizations can be done only on the non-va_list variants. */
3318 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3319 return false;
3321 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3322 else if (strcmp (fmt_str, target_percent_s) == 0)
3324 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3325 return false;
3326 if (fn_fputs)
3328 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3329 replace_call_with_call_and_fold (gsi, repl);
3330 return true;
3334 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3335 else if (strcmp (fmt_str, target_percent_c) == 0)
3337 if (!arg
3338 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3339 return false;
3340 if (fn_fputc)
3342 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3343 replace_call_with_call_and_fold (gsi, repl);
3344 return true;
3348 return false;
3351 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3352 FMT and ARG are the arguments to the call; we don't fold cases with
3353 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3355 Return NULL_TREE if no simplification was possible, otherwise return the
3356 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3357 code of the function to be simplified. */
3359 static bool
3360 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3361 tree arg, enum built_in_function fcode)
3363 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3364 tree fn_putchar, fn_puts, newarg;
3365 const char *fmt_str = NULL;
3367 /* If the return value is used, don't do the transformation. */
3368 if (gimple_call_lhs (stmt) != NULL_TREE)
3369 return false;
3371 /* Check whether the format is a literal string constant. */
3372 fmt_str = c_getstr (fmt);
3373 if (fmt_str == NULL)
3374 return false;
3376 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3378 /* If we're using an unlocked function, assume the other
3379 unlocked functions exist explicitly. */
3380 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3381 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3383 else
3385 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3386 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3389 if (!init_target_chars ())
3390 return false;
3392 if (strcmp (fmt_str, target_percent_s) == 0
3393 || strchr (fmt_str, target_percent) == NULL)
3395 const char *str;
3397 if (strcmp (fmt_str, target_percent_s) == 0)
3399 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3400 return false;
3402 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3403 return false;
3405 str = c_getstr (arg);
3406 if (str == NULL)
3407 return false;
3409 else
3411 /* The format specifier doesn't contain any '%' characters. */
3412 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3413 && arg)
3414 return false;
3415 str = fmt_str;
3418 /* If the string was "", printf does nothing. */
3419 if (str[0] == '\0')
3421 replace_call_with_value (gsi, NULL_TREE);
3422 return true;
3425 /* If the string has length of 1, call putchar. */
3426 if (str[1] == '\0')
3428 /* Given printf("c"), (where c is any one character,)
3429 convert "c"[0] to an int and pass that to the replacement
3430 function. */
3431 newarg = build_int_cst (integer_type_node, str[0]);
3432 if (fn_putchar)
3434 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3435 replace_call_with_call_and_fold (gsi, repl);
3436 return true;
3439 else
3441 /* If the string was "string\n", call puts("string"). */
3442 size_t len = strlen (str);
3443 if ((unsigned char)str[len - 1] == target_newline
3444 && (size_t) (int) len == len
3445 && (int) len > 0)
3447 char *newstr;
3448 tree offset_node, string_cst;
3450 /* Create a NUL-terminated string that's one char shorter
3451 than the original, stripping off the trailing '\n'. */
3452 newarg = build_string_literal (len, str);
3453 string_cst = string_constant (newarg, &offset_node);
3454 gcc_checking_assert (string_cst
3455 && (TREE_STRING_LENGTH (string_cst)
3456 == (int) len)
3457 && integer_zerop (offset_node)
3458 && (unsigned char)
3459 TREE_STRING_POINTER (string_cst)[len - 1]
3460 == target_newline);
3461 /* build_string_literal creates a new STRING_CST,
3462 modify it in place to avoid double copying. */
3463 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3464 newstr[len - 1] = '\0';
3465 if (fn_puts)
3467 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3468 replace_call_with_call_and_fold (gsi, repl);
3469 return true;
3472 else
3473 /* We'd like to arrange to call fputs(string,stdout) here,
3474 but we need stdout and don't have a way to get it yet. */
3475 return false;
3479 /* The other optimizations can be done only on the non-va_list variants. */
3480 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3481 return false;
3483 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3484 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3486 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3487 return false;
3488 if (fn_puts)
3490 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3491 replace_call_with_call_and_fold (gsi, repl);
3492 return true;
3496 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3497 else if (strcmp (fmt_str, target_percent_c) == 0)
3499 if (!arg || ! useless_type_conversion_p (integer_type_node,
3500 TREE_TYPE (arg)))
3501 return false;
3502 if (fn_putchar)
3504 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3505 replace_call_with_call_and_fold (gsi, repl);
3506 return true;
3510 return false;
3515 /* Fold a call to __builtin_strlen with known length LEN. */
3517 static bool
3518 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3520 gimple *stmt = gsi_stmt (*gsi);
3522 wide_int minlen;
3523 wide_int maxlen;
3525 tree lenrange[2];
3526 if (!get_range_strlen (gimple_call_arg (stmt, 0), lenrange, true)
3527 && lenrange[0] && TREE_CODE (lenrange[0]) == INTEGER_CST
3528 && lenrange[1] && TREE_CODE (lenrange[1]) == INTEGER_CST)
3530 /* The range of lengths refers to either a single constant
3531 string or to the longest and shortest constant string
3532 referenced by the argument of the strlen() call, or to
3533 the strings that can possibly be stored in the arrays
3534 the argument refers to. */
3535 minlen = wi::to_wide (lenrange[0]);
3536 maxlen = wi::to_wide (lenrange[1]);
3538 else
3540 unsigned prec = TYPE_PRECISION (sizetype);
3542 minlen = wi::shwi (0, prec);
3543 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3546 if (minlen == maxlen)
3548 lenrange[0] = force_gimple_operand_gsi (gsi, lenrange[0], true, NULL,
3549 true, GSI_SAME_STMT);
3550 replace_call_with_value (gsi, lenrange[0]);
3551 return true;
3554 tree lhs = gimple_call_lhs (stmt);
3555 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3556 set_range_info (lhs, VR_RANGE, minlen, maxlen);
3558 return false;
3561 /* Fold a call to __builtin_acc_on_device. */
3563 static bool
3564 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3566 /* Defer folding until we know which compiler we're in. */
3567 if (symtab->state != EXPANSION)
3568 return false;
3570 unsigned val_host = GOMP_DEVICE_HOST;
3571 unsigned val_dev = GOMP_DEVICE_NONE;
3573 #ifdef ACCEL_COMPILER
3574 val_host = GOMP_DEVICE_NOT_HOST;
3575 val_dev = ACCEL_COMPILER_acc_device;
3576 #endif
3578 location_t loc = gimple_location (gsi_stmt (*gsi));
3580 tree host_eq = make_ssa_name (boolean_type_node);
3581 gimple *host_ass = gimple_build_assign
3582 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3583 gimple_set_location (host_ass, loc);
3584 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3586 tree dev_eq = make_ssa_name (boolean_type_node);
3587 gimple *dev_ass = gimple_build_assign
3588 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3589 gimple_set_location (dev_ass, loc);
3590 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3592 tree result = make_ssa_name (boolean_type_node);
3593 gimple *result_ass = gimple_build_assign
3594 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3595 gimple_set_location (result_ass, loc);
3596 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3598 replace_call_with_value (gsi, result);
3600 return true;
3603 /* Fold realloc (0, n) -> malloc (n). */
3605 static bool
3606 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3608 gimple *stmt = gsi_stmt (*gsi);
3609 tree arg = gimple_call_arg (stmt, 0);
3610 tree size = gimple_call_arg (stmt, 1);
3612 if (operand_equal_p (arg, null_pointer_node, 0))
3614 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3615 if (fn_malloc)
3617 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3618 replace_call_with_call_and_fold (gsi, repl);
3619 return true;
3622 return false;
3625 /* Fold the non-target builtin at *GSI and return whether any simplification
3626 was made. */
3628 static bool
3629 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3631 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3632 tree callee = gimple_call_fndecl (stmt);
3634 /* Give up for always_inline inline builtins until they are
3635 inlined. */
3636 if (avoid_folding_inline_builtin (callee))
3637 return false;
3639 unsigned n = gimple_call_num_args (stmt);
3640 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3641 switch (fcode)
3643 case BUILT_IN_BCMP:
3644 return gimple_fold_builtin_bcmp (gsi);
3645 case BUILT_IN_BCOPY:
3646 return gimple_fold_builtin_bcopy (gsi);
3647 case BUILT_IN_BZERO:
3648 return gimple_fold_builtin_bzero (gsi);
3650 case BUILT_IN_MEMSET:
3651 return gimple_fold_builtin_memset (gsi,
3652 gimple_call_arg (stmt, 1),
3653 gimple_call_arg (stmt, 2));
3654 case BUILT_IN_MEMCPY:
3655 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3656 gimple_call_arg (stmt, 1), 0);
3657 case BUILT_IN_MEMPCPY:
3658 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3659 gimple_call_arg (stmt, 1), 1);
3660 case BUILT_IN_MEMMOVE:
3661 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3662 gimple_call_arg (stmt, 1), 3);
3663 case BUILT_IN_SPRINTF_CHK:
3664 case BUILT_IN_VSPRINTF_CHK:
3665 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3666 case BUILT_IN_STRCAT_CHK:
3667 return gimple_fold_builtin_strcat_chk (gsi);
3668 case BUILT_IN_STRNCAT_CHK:
3669 return gimple_fold_builtin_strncat_chk (gsi);
3670 case BUILT_IN_STRLEN:
3671 return gimple_fold_builtin_strlen (gsi);
3672 case BUILT_IN_STRCPY:
3673 return gimple_fold_builtin_strcpy (gsi,
3674 gimple_call_arg (stmt, 0),
3675 gimple_call_arg (stmt, 1));
3676 case BUILT_IN_STRNCPY:
3677 return gimple_fold_builtin_strncpy (gsi,
3678 gimple_call_arg (stmt, 0),
3679 gimple_call_arg (stmt, 1),
3680 gimple_call_arg (stmt, 2));
3681 case BUILT_IN_STRCAT:
3682 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3683 gimple_call_arg (stmt, 1));
3684 case BUILT_IN_STRNCAT:
3685 return gimple_fold_builtin_strncat (gsi);
3686 case BUILT_IN_INDEX:
3687 case BUILT_IN_STRCHR:
3688 return gimple_fold_builtin_strchr (gsi, false);
3689 case BUILT_IN_RINDEX:
3690 case BUILT_IN_STRRCHR:
3691 return gimple_fold_builtin_strchr (gsi, true);
3692 case BUILT_IN_STRSTR:
3693 return gimple_fold_builtin_strstr (gsi);
3694 case BUILT_IN_STRCMP:
3695 case BUILT_IN_STRCASECMP:
3696 case BUILT_IN_STRNCMP:
3697 case BUILT_IN_STRNCASECMP:
3698 return gimple_fold_builtin_string_compare (gsi);
3699 case BUILT_IN_MEMCHR:
3700 return gimple_fold_builtin_memchr (gsi);
3701 case BUILT_IN_FPUTS:
3702 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3703 gimple_call_arg (stmt, 1), false);
3704 case BUILT_IN_FPUTS_UNLOCKED:
3705 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3706 gimple_call_arg (stmt, 1), true);
3707 case BUILT_IN_MEMCPY_CHK:
3708 case BUILT_IN_MEMPCPY_CHK:
3709 case BUILT_IN_MEMMOVE_CHK:
3710 case BUILT_IN_MEMSET_CHK:
3711 return gimple_fold_builtin_memory_chk (gsi,
3712 gimple_call_arg (stmt, 0),
3713 gimple_call_arg (stmt, 1),
3714 gimple_call_arg (stmt, 2),
3715 gimple_call_arg (stmt, 3),
3716 fcode);
3717 case BUILT_IN_STPCPY:
3718 return gimple_fold_builtin_stpcpy (gsi);
3719 case BUILT_IN_STRCPY_CHK:
3720 case BUILT_IN_STPCPY_CHK:
3721 return gimple_fold_builtin_stxcpy_chk (gsi,
3722 gimple_call_arg (stmt, 0),
3723 gimple_call_arg (stmt, 1),
3724 gimple_call_arg (stmt, 2),
3725 fcode);
3726 case BUILT_IN_STRNCPY_CHK:
3727 case BUILT_IN_STPNCPY_CHK:
3728 return gimple_fold_builtin_stxncpy_chk (gsi,
3729 gimple_call_arg (stmt, 0),
3730 gimple_call_arg (stmt, 1),
3731 gimple_call_arg (stmt, 2),
3732 gimple_call_arg (stmt, 3),
3733 fcode);
3734 case BUILT_IN_SNPRINTF_CHK:
3735 case BUILT_IN_VSNPRINTF_CHK:
3736 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3738 case BUILT_IN_FPRINTF:
3739 case BUILT_IN_FPRINTF_UNLOCKED:
3740 case BUILT_IN_VFPRINTF:
3741 if (n == 2 || n == 3)
3742 return gimple_fold_builtin_fprintf (gsi,
3743 gimple_call_arg (stmt, 0),
3744 gimple_call_arg (stmt, 1),
3745 n == 3
3746 ? gimple_call_arg (stmt, 2)
3747 : NULL_TREE,
3748 fcode);
3749 break;
3750 case BUILT_IN_FPRINTF_CHK:
3751 case BUILT_IN_VFPRINTF_CHK:
3752 if (n == 3 || n == 4)
3753 return gimple_fold_builtin_fprintf (gsi,
3754 gimple_call_arg (stmt, 0),
3755 gimple_call_arg (stmt, 2),
3756 n == 4
3757 ? gimple_call_arg (stmt, 3)
3758 : NULL_TREE,
3759 fcode);
3760 break;
3761 case BUILT_IN_PRINTF:
3762 case BUILT_IN_PRINTF_UNLOCKED:
3763 case BUILT_IN_VPRINTF:
3764 if (n == 1 || n == 2)
3765 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3766 n == 2
3767 ? gimple_call_arg (stmt, 1)
3768 : NULL_TREE, fcode);
3769 break;
3770 case BUILT_IN_PRINTF_CHK:
3771 case BUILT_IN_VPRINTF_CHK:
3772 if (n == 2 || n == 3)
3773 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3774 n == 3
3775 ? gimple_call_arg (stmt, 2)
3776 : NULL_TREE, fcode);
3777 break;
3778 case BUILT_IN_ACC_ON_DEVICE:
3779 return gimple_fold_builtin_acc_on_device (gsi,
3780 gimple_call_arg (stmt, 0));
3781 case BUILT_IN_REALLOC:
3782 return gimple_fold_builtin_realloc (gsi);
3784 default:;
3787 /* Try the generic builtin folder. */
3788 bool ignore = (gimple_call_lhs (stmt) == NULL);
3789 tree result = fold_call_stmt (stmt, ignore);
3790 if (result)
3792 if (ignore)
3793 STRIP_NOPS (result);
3794 else
3795 result = fold_convert (gimple_call_return_type (stmt), result);
3796 if (!update_call_from_tree (gsi, result))
3797 gimplify_and_update_call_from_tree (gsi, result);
3798 return true;
3801 return false;
3804 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3805 function calls to constants, where possible. */
3807 static tree
3808 fold_internal_goacc_dim (const gimple *call)
3810 int axis = oacc_get_ifn_dim_arg (call);
3811 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3812 tree result = NULL_TREE;
3813 tree type = TREE_TYPE (gimple_call_lhs (call));
3815 switch (gimple_call_internal_fn (call))
3817 case IFN_GOACC_DIM_POS:
3818 /* If the size is 1, we know the answer. */
3819 if (size == 1)
3820 result = build_int_cst (type, 0);
3821 break;
3822 case IFN_GOACC_DIM_SIZE:
3823 /* If the size is not dynamic, we know the answer. */
3824 if (size)
3825 result = build_int_cst (type, size);
3826 break;
3827 default:
3828 break;
3831 return result;
3834 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3835 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3836 &var where var is only addressable because of such calls. */
3838 bool
3839 optimize_atomic_compare_exchange_p (gimple *stmt)
3841 if (gimple_call_num_args (stmt) != 6
3842 || !flag_inline_atomics
3843 || !optimize
3844 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3845 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3846 || !gimple_vdef (stmt)
3847 || !gimple_vuse (stmt))
3848 return false;
3850 tree fndecl = gimple_call_fndecl (stmt);
3851 switch (DECL_FUNCTION_CODE (fndecl))
3853 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3854 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3855 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3856 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3857 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3858 break;
3859 default:
3860 return false;
3863 tree expected = gimple_call_arg (stmt, 1);
3864 if (TREE_CODE (expected) != ADDR_EXPR
3865 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3866 return false;
3868 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3869 if (!is_gimple_reg_type (etype)
3870 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3871 || TREE_THIS_VOLATILE (etype)
3872 || VECTOR_TYPE_P (etype)
3873 || TREE_CODE (etype) == COMPLEX_TYPE
3874 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3875 might not preserve all the bits. See PR71716. */
3876 || SCALAR_FLOAT_TYPE_P (etype)
3877 || maybe_ne (TYPE_PRECISION (etype),
3878 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3879 return false;
3881 tree weak = gimple_call_arg (stmt, 3);
3882 if (!integer_zerop (weak) && !integer_onep (weak))
3883 return false;
3885 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3886 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3887 machine_mode mode = TYPE_MODE (itype);
3889 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3890 == CODE_FOR_nothing
3891 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3892 return false;
3894 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
3895 return false;
3897 return true;
3900 /* Fold
3901 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3902 into
3903 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3904 i = IMAGPART_EXPR <t>;
3905 r = (_Bool) i;
3906 e = REALPART_EXPR <t>; */
3908 void
3909 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3911 gimple *stmt = gsi_stmt (*gsi);
3912 tree fndecl = gimple_call_fndecl (stmt);
3913 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3914 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3915 tree ctype = build_complex_type (itype);
3916 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3917 bool throws = false;
3918 edge e = NULL;
3919 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3920 expected);
3921 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3922 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3923 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3925 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3926 build1 (VIEW_CONVERT_EXPR, itype,
3927 gimple_assign_lhs (g)));
3928 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3930 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3931 + int_size_in_bytes (itype);
3932 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3933 gimple_call_arg (stmt, 0),
3934 gimple_assign_lhs (g),
3935 gimple_call_arg (stmt, 2),
3936 build_int_cst (integer_type_node, flag),
3937 gimple_call_arg (stmt, 4),
3938 gimple_call_arg (stmt, 5));
3939 tree lhs = make_ssa_name (ctype);
3940 gimple_call_set_lhs (g, lhs);
3941 gimple_set_vdef (g, gimple_vdef (stmt));
3942 gimple_set_vuse (g, gimple_vuse (stmt));
3943 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3944 tree oldlhs = gimple_call_lhs (stmt);
3945 if (stmt_can_throw_internal (stmt))
3947 throws = true;
3948 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3950 gimple_call_set_nothrow (as_a <gcall *> (g),
3951 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3952 gimple_call_set_lhs (stmt, NULL_TREE);
3953 gsi_replace (gsi, g, true);
3954 if (oldlhs)
3956 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3957 build1 (IMAGPART_EXPR, itype, lhs));
3958 if (throws)
3960 gsi_insert_on_edge_immediate (e, g);
3961 *gsi = gsi_for_stmt (g);
3963 else
3964 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3965 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3966 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3968 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3969 build1 (REALPART_EXPR, itype, lhs));
3970 if (throws && oldlhs == NULL_TREE)
3972 gsi_insert_on_edge_immediate (e, g);
3973 *gsi = gsi_for_stmt (g);
3975 else
3976 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3977 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3979 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3980 VIEW_CONVERT_EXPR,
3981 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3982 gimple_assign_lhs (g)));
3983 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3985 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3986 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3987 *gsi = gsiret;
3990 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3991 doesn't fit into TYPE. The test for overflow should be regardless of
3992 -fwrapv, and even for unsigned types. */
3994 bool
3995 arith_overflowed_p (enum tree_code code, const_tree type,
3996 const_tree arg0, const_tree arg1)
3998 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3999 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
4000 widest2_int_cst;
4001 widest2_int warg0 = widest2_int_cst (arg0);
4002 widest2_int warg1 = widest2_int_cst (arg1);
4003 widest2_int wres;
4004 switch (code)
4006 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4007 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4008 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4009 default: gcc_unreachable ();
4011 signop sign = TYPE_SIGN (type);
4012 if (sign == UNSIGNED && wi::neg_p (wres))
4013 return true;
4014 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4017 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4018 The statement may be replaced by another statement, e.g., if the call
4019 simplifies to a constant value. Return true if any changes were made.
4020 It is assumed that the operands have been previously folded. */
4022 static bool
4023 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4025 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4026 tree callee;
4027 bool changed = false;
4028 unsigned i;
4030 /* Fold *& in call arguments. */
4031 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4032 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4034 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4035 if (tmp)
4037 gimple_call_set_arg (stmt, i, tmp);
4038 changed = true;
4042 /* Check for virtual calls that became direct calls. */
4043 callee = gimple_call_fn (stmt);
4044 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4046 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4048 if (dump_file && virtual_method_call_p (callee)
4049 && !possible_polymorphic_call_target_p
4050 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4051 (OBJ_TYPE_REF_EXPR (callee)))))
4053 fprintf (dump_file,
4054 "Type inheritance inconsistent devirtualization of ");
4055 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4056 fprintf (dump_file, " to ");
4057 print_generic_expr (dump_file, callee, TDF_SLIM);
4058 fprintf (dump_file, "\n");
4061 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4062 changed = true;
4064 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4066 bool final;
4067 vec <cgraph_node *>targets
4068 = possible_polymorphic_call_targets (callee, stmt, &final);
4069 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4071 tree lhs = gimple_call_lhs (stmt);
4072 if (dump_enabled_p ())
4074 location_t loc = gimple_location_safe (stmt);
4075 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
4076 "folding virtual function call to %s\n",
4077 targets.length () == 1
4078 ? targets[0]->name ()
4079 : "__builtin_unreachable");
4081 if (targets.length () == 1)
4083 tree fndecl = targets[0]->decl;
4084 gimple_call_set_fndecl (stmt, fndecl);
4085 changed = true;
4086 /* If changing the call to __cxa_pure_virtual
4087 or similar noreturn function, adjust gimple_call_fntype
4088 too. */
4089 if (gimple_call_noreturn_p (stmt)
4090 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4091 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4092 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4093 == void_type_node))
4094 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4095 /* If the call becomes noreturn, remove the lhs. */
4096 if (lhs
4097 && gimple_call_noreturn_p (stmt)
4098 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4099 || should_remove_lhs_p (lhs)))
4101 if (TREE_CODE (lhs) == SSA_NAME)
4103 tree var = create_tmp_var (TREE_TYPE (lhs));
4104 tree def = get_or_create_ssa_default_def (cfun, var);
4105 gimple *new_stmt = gimple_build_assign (lhs, def);
4106 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4108 gimple_call_set_lhs (stmt, NULL_TREE);
4110 maybe_remove_unused_call_args (cfun, stmt);
4112 else
4114 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4115 gimple *new_stmt = gimple_build_call (fndecl, 0);
4116 gimple_set_location (new_stmt, gimple_location (stmt));
4117 /* If the call had a SSA name as lhs morph that into
4118 an uninitialized value. */
4119 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4121 tree var = create_tmp_var (TREE_TYPE (lhs));
4122 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4123 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4124 set_ssa_default_def (cfun, var, lhs);
4126 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4127 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4128 gsi_replace (gsi, new_stmt, false);
4129 return true;
4135 /* Check for indirect calls that became direct calls, and then
4136 no longer require a static chain. */
4137 if (gimple_call_chain (stmt))
4139 tree fn = gimple_call_fndecl (stmt);
4140 if (fn && !DECL_STATIC_CHAIN (fn))
4142 gimple_call_set_chain (stmt, NULL);
4143 changed = true;
4145 else
4147 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4148 if (tmp)
4150 gimple_call_set_chain (stmt, tmp);
4151 changed = true;
4156 if (inplace)
4157 return changed;
4159 /* Check for builtins that CCP can handle using information not
4160 available in the generic fold routines. */
4161 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4163 if (gimple_fold_builtin (gsi))
4164 changed = true;
4166 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4168 changed |= targetm.gimple_fold_builtin (gsi);
4170 else if (gimple_call_internal_p (stmt))
4172 enum tree_code subcode = ERROR_MARK;
4173 tree result = NULL_TREE;
4174 bool cplx_result = false;
4175 tree overflow = NULL_TREE;
4176 switch (gimple_call_internal_fn (stmt))
4178 case IFN_BUILTIN_EXPECT:
4179 result = fold_builtin_expect (gimple_location (stmt),
4180 gimple_call_arg (stmt, 0),
4181 gimple_call_arg (stmt, 1),
4182 gimple_call_arg (stmt, 2));
4183 break;
4184 case IFN_UBSAN_OBJECT_SIZE:
4186 tree offset = gimple_call_arg (stmt, 1);
4187 tree objsize = gimple_call_arg (stmt, 2);
4188 if (integer_all_onesp (objsize)
4189 || (TREE_CODE (offset) == INTEGER_CST
4190 && TREE_CODE (objsize) == INTEGER_CST
4191 && tree_int_cst_le (offset, objsize)))
4193 replace_call_with_value (gsi, NULL_TREE);
4194 return true;
4197 break;
4198 case IFN_UBSAN_PTR:
4199 if (integer_zerop (gimple_call_arg (stmt, 1)))
4201 replace_call_with_value (gsi, NULL_TREE);
4202 return true;
4204 break;
4205 case IFN_UBSAN_BOUNDS:
4207 tree index = gimple_call_arg (stmt, 1);
4208 tree bound = gimple_call_arg (stmt, 2);
4209 if (TREE_CODE (index) == INTEGER_CST
4210 && TREE_CODE (bound) == INTEGER_CST)
4212 index = fold_convert (TREE_TYPE (bound), index);
4213 if (TREE_CODE (index) == INTEGER_CST
4214 && tree_int_cst_le (index, bound))
4216 replace_call_with_value (gsi, NULL_TREE);
4217 return true;
4221 break;
4222 case IFN_GOACC_DIM_SIZE:
4223 case IFN_GOACC_DIM_POS:
4224 result = fold_internal_goacc_dim (stmt);
4225 break;
4226 case IFN_UBSAN_CHECK_ADD:
4227 subcode = PLUS_EXPR;
4228 break;
4229 case IFN_UBSAN_CHECK_SUB:
4230 subcode = MINUS_EXPR;
4231 break;
4232 case IFN_UBSAN_CHECK_MUL:
4233 subcode = MULT_EXPR;
4234 break;
4235 case IFN_ADD_OVERFLOW:
4236 subcode = PLUS_EXPR;
4237 cplx_result = true;
4238 break;
4239 case IFN_SUB_OVERFLOW:
4240 subcode = MINUS_EXPR;
4241 cplx_result = true;
4242 break;
4243 case IFN_MUL_OVERFLOW:
4244 subcode = MULT_EXPR;
4245 cplx_result = true;
4246 break;
4247 default:
4248 break;
4250 if (subcode != ERROR_MARK)
4252 tree arg0 = gimple_call_arg (stmt, 0);
4253 tree arg1 = gimple_call_arg (stmt, 1);
4254 tree type = TREE_TYPE (arg0);
4255 if (cplx_result)
4257 tree lhs = gimple_call_lhs (stmt);
4258 if (lhs == NULL_TREE)
4259 type = NULL_TREE;
4260 else
4261 type = TREE_TYPE (TREE_TYPE (lhs));
4263 if (type == NULL_TREE)
4265 /* x = y + 0; x = y - 0; x = y * 0; */
4266 else if (integer_zerop (arg1))
4267 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4268 /* x = 0 + y; x = 0 * y; */
4269 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4270 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4271 /* x = y - y; */
4272 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4273 result = integer_zero_node;
4274 /* x = y * 1; x = 1 * y; */
4275 else if (subcode == MULT_EXPR && integer_onep (arg1))
4276 result = arg0;
4277 else if (subcode == MULT_EXPR && integer_onep (arg0))
4278 result = arg1;
4279 else if (TREE_CODE (arg0) == INTEGER_CST
4280 && TREE_CODE (arg1) == INTEGER_CST)
4282 if (cplx_result)
4283 result = int_const_binop (subcode, fold_convert (type, arg0),
4284 fold_convert (type, arg1));
4285 else
4286 result = int_const_binop (subcode, arg0, arg1);
4287 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4289 if (cplx_result)
4290 overflow = build_one_cst (type);
4291 else
4292 result = NULL_TREE;
4295 if (result)
4297 if (result == integer_zero_node)
4298 result = build_zero_cst (type);
4299 else if (cplx_result && TREE_TYPE (result) != type)
4301 if (TREE_CODE (result) == INTEGER_CST)
4303 if (arith_overflowed_p (PLUS_EXPR, type, result,
4304 integer_zero_node))
4305 overflow = build_one_cst (type);
4307 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4308 && TYPE_UNSIGNED (type))
4309 || (TYPE_PRECISION (type)
4310 < (TYPE_PRECISION (TREE_TYPE (result))
4311 + (TYPE_UNSIGNED (TREE_TYPE (result))
4312 && !TYPE_UNSIGNED (type)))))
4313 result = NULL_TREE;
4314 if (result)
4315 result = fold_convert (type, result);
4320 if (result)
4322 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4323 result = drop_tree_overflow (result);
4324 if (cplx_result)
4326 if (overflow == NULL_TREE)
4327 overflow = build_zero_cst (TREE_TYPE (result));
4328 tree ctype = build_complex_type (TREE_TYPE (result));
4329 if (TREE_CODE (result) == INTEGER_CST
4330 && TREE_CODE (overflow) == INTEGER_CST)
4331 result = build_complex (ctype, result, overflow);
4332 else
4333 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4334 ctype, result, overflow);
4336 if (!update_call_from_tree (gsi, result))
4337 gimplify_and_update_call_from_tree (gsi, result);
4338 changed = true;
4342 return changed;
4346 /* Return true whether NAME has a use on STMT. */
4348 static bool
4349 has_use_on_stmt (tree name, gimple *stmt)
4351 imm_use_iterator iter;
4352 use_operand_p use_p;
4353 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4354 if (USE_STMT (use_p) == stmt)
4355 return true;
4356 return false;
4359 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4360 gimple_simplify.
4362 Replaces *GSI with the simplification result in RCODE and OPS
4363 and the associated statements in *SEQ. Does the replacement
4364 according to INPLACE and returns true if the operation succeeded. */
4366 static bool
4367 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4368 code_helper rcode, tree *ops,
4369 gimple_seq *seq, bool inplace)
4371 gimple *stmt = gsi_stmt (*gsi);
4373 /* Play safe and do not allow abnormals to be mentioned in
4374 newly created statements. See also maybe_push_res_to_seq.
4375 As an exception allow such uses if there was a use of the
4376 same SSA name on the old stmt. */
4377 if ((TREE_CODE (ops[0]) == SSA_NAME
4378 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
4379 && !has_use_on_stmt (ops[0], stmt))
4380 || (ops[1]
4381 && TREE_CODE (ops[1]) == SSA_NAME
4382 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
4383 && !has_use_on_stmt (ops[1], stmt))
4384 || (ops[2]
4385 && TREE_CODE (ops[2]) == SSA_NAME
4386 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
4387 && !has_use_on_stmt (ops[2], stmt))
4388 || (COMPARISON_CLASS_P (ops[0])
4389 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
4390 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
4391 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
4392 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
4393 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
4394 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
4395 return false;
4397 /* Don't insert new statements when INPLACE is true, even if we could
4398 reuse STMT for the final statement. */
4399 if (inplace && !gimple_seq_empty_p (*seq))
4400 return false;
4402 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4404 gcc_assert (rcode.is_tree_code ());
4405 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
4406 /* GIMPLE_CONDs condition may not throw. */
4407 && (!flag_exceptions
4408 || !cfun->can_throw_non_call_exceptions
4409 || !operation_could_trap_p (rcode,
4410 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4411 false, NULL_TREE)))
4412 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
4413 else if (rcode == SSA_NAME)
4414 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4415 build_zero_cst (TREE_TYPE (ops[0])));
4416 else if (rcode == INTEGER_CST)
4418 if (integer_zerop (ops[0]))
4419 gimple_cond_make_false (cond_stmt);
4420 else
4421 gimple_cond_make_true (cond_stmt);
4423 else if (!inplace)
4425 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
4426 ops, seq);
4427 if (!res)
4428 return false;
4429 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4430 build_zero_cst (TREE_TYPE (res)));
4432 else
4433 return false;
4434 if (dump_file && (dump_flags & TDF_DETAILS))
4436 fprintf (dump_file, "gimple_simplified to ");
4437 if (!gimple_seq_empty_p (*seq))
4438 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4439 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4440 0, TDF_SLIM);
4442 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4443 return true;
4445 else if (is_gimple_assign (stmt)
4446 && rcode.is_tree_code ())
4448 if (!inplace
4449 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
4451 maybe_build_generic_op (rcode,
4452 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
4453 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
4454 if (dump_file && (dump_flags & TDF_DETAILS))
4456 fprintf (dump_file, "gimple_simplified to ");
4457 if (!gimple_seq_empty_p (*seq))
4458 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4459 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4460 0, TDF_SLIM);
4462 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4463 return true;
4466 else if (rcode.is_fn_code ()
4467 && gimple_call_combined_fn (stmt) == rcode)
4469 unsigned i;
4470 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4472 gcc_assert (ops[i] != NULL_TREE);
4473 gimple_call_set_arg (stmt, i, ops[i]);
4475 if (i < 3)
4476 gcc_assert (ops[i] == NULL_TREE);
4477 if (dump_file && (dump_flags & TDF_DETAILS))
4479 fprintf (dump_file, "gimple_simplified to ");
4480 if (!gimple_seq_empty_p (*seq))
4481 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4482 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4484 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4485 return true;
4487 else if (!inplace)
4489 if (gimple_has_lhs (stmt))
4491 tree lhs = gimple_get_lhs (stmt);
4492 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
4493 ops, seq, lhs))
4494 return false;
4495 if (dump_file && (dump_flags & TDF_DETAILS))
4497 fprintf (dump_file, "gimple_simplified to ");
4498 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4500 gsi_replace_with_seq_vops (gsi, *seq);
4501 return true;
4503 else
4504 gcc_unreachable ();
4507 return false;
4510 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4512 static bool
4513 maybe_canonicalize_mem_ref_addr (tree *t)
4515 bool res = false;
4517 if (TREE_CODE (*t) == ADDR_EXPR)
4518 t = &TREE_OPERAND (*t, 0);
4520 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4521 generic vector extension. The actual vector referenced is
4522 view-converted to an array type for this purpose. If the index
4523 is constant the canonical representation in the middle-end is a
4524 BIT_FIELD_REF so re-write the former to the latter here. */
4525 if (TREE_CODE (*t) == ARRAY_REF
4526 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4527 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4528 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4530 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4531 if (VECTOR_TYPE_P (vtype))
4533 tree low = array_ref_low_bound (*t);
4534 if (TREE_CODE (low) == INTEGER_CST)
4536 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4538 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4539 wi::to_widest (low));
4540 idx = wi::mul (idx, wi::to_widest
4541 (TYPE_SIZE (TREE_TYPE (*t))));
4542 widest_int ext
4543 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4544 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4546 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4547 TREE_TYPE (*t),
4548 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4549 TYPE_SIZE (TREE_TYPE (*t)),
4550 wide_int_to_tree (bitsizetype, idx));
4551 res = true;
4558 while (handled_component_p (*t))
4559 t = &TREE_OPERAND (*t, 0);
4561 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4562 of invariant addresses into a SSA name MEM_REF address. */
4563 if (TREE_CODE (*t) == MEM_REF
4564 || TREE_CODE (*t) == TARGET_MEM_REF)
4566 tree addr = TREE_OPERAND (*t, 0);
4567 if (TREE_CODE (addr) == ADDR_EXPR
4568 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4569 || handled_component_p (TREE_OPERAND (addr, 0))))
4571 tree base;
4572 poly_int64 coffset;
4573 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4574 &coffset);
4575 if (!base)
4576 gcc_unreachable ();
4578 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4579 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4580 TREE_OPERAND (*t, 1),
4581 size_int (coffset));
4582 res = true;
4584 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4585 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4588 /* Canonicalize back MEM_REFs to plain reference trees if the object
4589 accessed is a decl that has the same access semantics as the MEM_REF. */
4590 if (TREE_CODE (*t) == MEM_REF
4591 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4592 && integer_zerop (TREE_OPERAND (*t, 1))
4593 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4595 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4596 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4597 if (/* Same volatile qualification. */
4598 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4599 /* Same TBAA behavior with -fstrict-aliasing. */
4600 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4601 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4602 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4603 /* Same alignment. */
4604 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4605 /* We have to look out here to not drop a required conversion
4606 from the rhs to the lhs if *t appears on the lhs or vice-versa
4607 if it appears on the rhs. Thus require strict type
4608 compatibility. */
4609 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4611 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4612 res = true;
4616 /* Canonicalize TARGET_MEM_REF in particular with respect to
4617 the indexes becoming constant. */
4618 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4620 tree tem = maybe_fold_tmr (*t);
4621 if (tem)
4623 *t = tem;
4624 res = true;
4628 return res;
4631 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4632 distinguishes both cases. */
4634 static bool
4635 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4637 bool changed = false;
4638 gimple *stmt = gsi_stmt (*gsi);
4639 bool nowarning = gimple_no_warning_p (stmt);
4640 unsigned i;
4641 fold_defer_overflow_warnings ();
4643 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4644 after propagation.
4645 ??? This shouldn't be done in generic folding but in the
4646 propagation helpers which also know whether an address was
4647 propagated.
4648 Also canonicalize operand order. */
4649 switch (gimple_code (stmt))
4651 case GIMPLE_ASSIGN:
4652 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4654 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4655 if ((REFERENCE_CLASS_P (*rhs)
4656 || TREE_CODE (*rhs) == ADDR_EXPR)
4657 && maybe_canonicalize_mem_ref_addr (rhs))
4658 changed = true;
4659 tree *lhs = gimple_assign_lhs_ptr (stmt);
4660 if (REFERENCE_CLASS_P (*lhs)
4661 && maybe_canonicalize_mem_ref_addr (lhs))
4662 changed = true;
4664 else
4666 /* Canonicalize operand order. */
4667 enum tree_code code = gimple_assign_rhs_code (stmt);
4668 if (TREE_CODE_CLASS (code) == tcc_comparison
4669 || commutative_tree_code (code)
4670 || commutative_ternary_tree_code (code))
4672 tree rhs1 = gimple_assign_rhs1 (stmt);
4673 tree rhs2 = gimple_assign_rhs2 (stmt);
4674 if (tree_swap_operands_p (rhs1, rhs2))
4676 gimple_assign_set_rhs1 (stmt, rhs2);
4677 gimple_assign_set_rhs2 (stmt, rhs1);
4678 if (TREE_CODE_CLASS (code) == tcc_comparison)
4679 gimple_assign_set_rhs_code (stmt,
4680 swap_tree_comparison (code));
4681 changed = true;
4685 break;
4686 case GIMPLE_CALL:
4688 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4690 tree *arg = gimple_call_arg_ptr (stmt, i);
4691 if (REFERENCE_CLASS_P (*arg)
4692 && maybe_canonicalize_mem_ref_addr (arg))
4693 changed = true;
4695 tree *lhs = gimple_call_lhs_ptr (stmt);
4696 if (*lhs
4697 && REFERENCE_CLASS_P (*lhs)
4698 && maybe_canonicalize_mem_ref_addr (lhs))
4699 changed = true;
4700 break;
4702 case GIMPLE_ASM:
4704 gasm *asm_stmt = as_a <gasm *> (stmt);
4705 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4707 tree link = gimple_asm_output_op (asm_stmt, i);
4708 tree op = TREE_VALUE (link);
4709 if (REFERENCE_CLASS_P (op)
4710 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4711 changed = true;
4713 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4715 tree link = gimple_asm_input_op (asm_stmt, i);
4716 tree op = TREE_VALUE (link);
4717 if ((REFERENCE_CLASS_P (op)
4718 || TREE_CODE (op) == ADDR_EXPR)
4719 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4720 changed = true;
4723 break;
4724 case GIMPLE_DEBUG:
4725 if (gimple_debug_bind_p (stmt))
4727 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4728 if (*val
4729 && (REFERENCE_CLASS_P (*val)
4730 || TREE_CODE (*val) == ADDR_EXPR)
4731 && maybe_canonicalize_mem_ref_addr (val))
4732 changed = true;
4734 break;
4735 case GIMPLE_COND:
4737 /* Canonicalize operand order. */
4738 tree lhs = gimple_cond_lhs (stmt);
4739 tree rhs = gimple_cond_rhs (stmt);
4740 if (tree_swap_operands_p (lhs, rhs))
4742 gcond *gc = as_a <gcond *> (stmt);
4743 gimple_cond_set_lhs (gc, rhs);
4744 gimple_cond_set_rhs (gc, lhs);
4745 gimple_cond_set_code (gc,
4746 swap_tree_comparison (gimple_cond_code (gc)));
4747 changed = true;
4750 default:;
4753 /* Dispatch to pattern-based folding. */
4754 if (!inplace
4755 || is_gimple_assign (stmt)
4756 || gimple_code (stmt) == GIMPLE_COND)
4758 gimple_seq seq = NULL;
4759 code_helper rcode;
4760 tree ops[3] = {};
4761 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4762 valueize, valueize))
4764 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4765 changed = true;
4766 else
4767 gimple_seq_discard (seq);
4771 stmt = gsi_stmt (*gsi);
4773 /* Fold the main computation performed by the statement. */
4774 switch (gimple_code (stmt))
4776 case GIMPLE_ASSIGN:
4778 /* Try to canonicalize for boolean-typed X the comparisons
4779 X == 0, X == 1, X != 0, and X != 1. */
4780 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4781 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4783 tree lhs = gimple_assign_lhs (stmt);
4784 tree op1 = gimple_assign_rhs1 (stmt);
4785 tree op2 = gimple_assign_rhs2 (stmt);
4786 tree type = TREE_TYPE (op1);
4788 /* Check whether the comparison operands are of the same boolean
4789 type as the result type is.
4790 Check that second operand is an integer-constant with value
4791 one or zero. */
4792 if (TREE_CODE (op2) == INTEGER_CST
4793 && (integer_zerop (op2) || integer_onep (op2))
4794 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4796 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4797 bool is_logical_not = false;
4799 /* X == 0 and X != 1 is a logical-not.of X
4800 X == 1 and X != 0 is X */
4801 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4802 || (cmp_code == NE_EXPR && integer_onep (op2)))
4803 is_logical_not = true;
4805 if (is_logical_not == false)
4806 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4807 /* Only for one-bit precision typed X the transformation
4808 !X -> ~X is valied. */
4809 else if (TYPE_PRECISION (type) == 1)
4810 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4811 /* Otherwise we use !X -> X ^ 1. */
4812 else
4813 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4814 build_int_cst (type, 1));
4815 changed = true;
4816 break;
4820 unsigned old_num_ops = gimple_num_ops (stmt);
4821 tree lhs = gimple_assign_lhs (stmt);
4822 tree new_rhs = fold_gimple_assign (gsi);
4823 if (new_rhs
4824 && !useless_type_conversion_p (TREE_TYPE (lhs),
4825 TREE_TYPE (new_rhs)))
4826 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4827 if (new_rhs
4828 && (!inplace
4829 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4831 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4832 changed = true;
4834 break;
4837 case GIMPLE_CALL:
4838 changed |= gimple_fold_call (gsi, inplace);
4839 break;
4841 case GIMPLE_ASM:
4842 /* Fold *& in asm operands. */
4844 gasm *asm_stmt = as_a <gasm *> (stmt);
4845 size_t noutputs;
4846 const char **oconstraints;
4847 const char *constraint;
4848 bool allows_mem, allows_reg;
4850 noutputs = gimple_asm_noutputs (asm_stmt);
4851 oconstraints = XALLOCAVEC (const char *, noutputs);
4853 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4855 tree link = gimple_asm_output_op (asm_stmt, i);
4856 tree op = TREE_VALUE (link);
4857 oconstraints[i]
4858 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4859 if (REFERENCE_CLASS_P (op)
4860 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4862 TREE_VALUE (link) = op;
4863 changed = true;
4866 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4868 tree link = gimple_asm_input_op (asm_stmt, i);
4869 tree op = TREE_VALUE (link);
4870 constraint
4871 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4872 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4873 oconstraints, &allows_mem, &allows_reg);
4874 if (REFERENCE_CLASS_P (op)
4875 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4876 != NULL_TREE)
4878 TREE_VALUE (link) = op;
4879 changed = true;
4883 break;
4885 case GIMPLE_DEBUG:
4886 if (gimple_debug_bind_p (stmt))
4888 tree val = gimple_debug_bind_get_value (stmt);
4889 if (val
4890 && REFERENCE_CLASS_P (val))
4892 tree tem = maybe_fold_reference (val, false);
4893 if (tem)
4895 gimple_debug_bind_set_value (stmt, tem);
4896 changed = true;
4899 else if (val
4900 && TREE_CODE (val) == ADDR_EXPR)
4902 tree ref = TREE_OPERAND (val, 0);
4903 tree tem = maybe_fold_reference (ref, false);
4904 if (tem)
4906 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4907 gimple_debug_bind_set_value (stmt, tem);
4908 changed = true;
4912 break;
4914 case GIMPLE_RETURN:
4916 greturn *ret_stmt = as_a<greturn *> (stmt);
4917 tree ret = gimple_return_retval(ret_stmt);
4919 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4921 tree val = valueize (ret);
4922 if (val && val != ret
4923 && may_propagate_copy (ret, val))
4925 gimple_return_set_retval (ret_stmt, val);
4926 changed = true;
4930 break;
4932 default:;
4935 stmt = gsi_stmt (*gsi);
4937 /* Fold *& on the lhs. */
4938 if (gimple_has_lhs (stmt))
4940 tree lhs = gimple_get_lhs (stmt);
4941 if (lhs && REFERENCE_CLASS_P (lhs))
4943 tree new_lhs = maybe_fold_reference (lhs, true);
4944 if (new_lhs)
4946 gimple_set_lhs (stmt, new_lhs);
4947 changed = true;
4952 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4953 return changed;
4956 /* Valueziation callback that ends up not following SSA edges. */
4958 tree
4959 no_follow_ssa_edges (tree)
4961 return NULL_TREE;
4964 /* Valueization callback that ends up following single-use SSA edges only. */
4966 tree
4967 follow_single_use_edges (tree val)
4969 if (TREE_CODE (val) == SSA_NAME
4970 && !has_single_use (val))
4971 return NULL_TREE;
4972 return val;
4975 /* Fold the statement pointed to by GSI. In some cases, this function may
4976 replace the whole statement with a new one. Returns true iff folding
4977 makes any changes.
4978 The statement pointed to by GSI should be in valid gimple form but may
4979 be in unfolded state as resulting from for example constant propagation
4980 which can produce *&x = 0. */
4982 bool
4983 fold_stmt (gimple_stmt_iterator *gsi)
4985 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4988 bool
4989 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4991 return fold_stmt_1 (gsi, false, valueize);
4994 /* Perform the minimal folding on statement *GSI. Only operations like
4995 *&x created by constant propagation are handled. The statement cannot
4996 be replaced with a new one. Return true if the statement was
4997 changed, false otherwise.
4998 The statement *GSI should be in valid gimple form but may
4999 be in unfolded state as resulting from for example constant propagation
5000 which can produce *&x = 0. */
5002 bool
5003 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5005 gimple *stmt = gsi_stmt (*gsi);
5006 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5007 gcc_assert (gsi_stmt (*gsi) == stmt);
5008 return changed;
5011 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5012 if EXPR is null or we don't know how.
5013 If non-null, the result always has boolean type. */
5015 static tree
5016 canonicalize_bool (tree expr, bool invert)
5018 if (!expr)
5019 return NULL_TREE;
5020 else if (invert)
5022 if (integer_nonzerop (expr))
5023 return boolean_false_node;
5024 else if (integer_zerop (expr))
5025 return boolean_true_node;
5026 else if (TREE_CODE (expr) == SSA_NAME)
5027 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5028 build_int_cst (TREE_TYPE (expr), 0));
5029 else if (COMPARISON_CLASS_P (expr))
5030 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5031 boolean_type_node,
5032 TREE_OPERAND (expr, 0),
5033 TREE_OPERAND (expr, 1));
5034 else
5035 return NULL_TREE;
5037 else
5039 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5040 return expr;
5041 if (integer_nonzerop (expr))
5042 return boolean_true_node;
5043 else if (integer_zerop (expr))
5044 return boolean_false_node;
5045 else if (TREE_CODE (expr) == SSA_NAME)
5046 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5047 build_int_cst (TREE_TYPE (expr), 0));
5048 else if (COMPARISON_CLASS_P (expr))
5049 return fold_build2 (TREE_CODE (expr),
5050 boolean_type_node,
5051 TREE_OPERAND (expr, 0),
5052 TREE_OPERAND (expr, 1));
5053 else
5054 return NULL_TREE;
5058 /* Check to see if a boolean expression EXPR is logically equivalent to the
5059 comparison (OP1 CODE OP2). Check for various identities involving
5060 SSA_NAMEs. */
5062 static bool
5063 same_bool_comparison_p (const_tree expr, enum tree_code code,
5064 const_tree op1, const_tree op2)
5066 gimple *s;
5068 /* The obvious case. */
5069 if (TREE_CODE (expr) == code
5070 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5071 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5072 return true;
5074 /* Check for comparing (name, name != 0) and the case where expr
5075 is an SSA_NAME with a definition matching the comparison. */
5076 if (TREE_CODE (expr) == SSA_NAME
5077 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5079 if (operand_equal_p (expr, op1, 0))
5080 return ((code == NE_EXPR && integer_zerop (op2))
5081 || (code == EQ_EXPR && integer_nonzerop (op2)));
5082 s = SSA_NAME_DEF_STMT (expr);
5083 if (is_gimple_assign (s)
5084 && gimple_assign_rhs_code (s) == code
5085 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5086 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5087 return true;
5090 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5091 of name is a comparison, recurse. */
5092 if (TREE_CODE (op1) == SSA_NAME
5093 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5095 s = SSA_NAME_DEF_STMT (op1);
5096 if (is_gimple_assign (s)
5097 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5099 enum tree_code c = gimple_assign_rhs_code (s);
5100 if ((c == NE_EXPR && integer_zerop (op2))
5101 || (c == EQ_EXPR && integer_nonzerop (op2)))
5102 return same_bool_comparison_p (expr, c,
5103 gimple_assign_rhs1 (s),
5104 gimple_assign_rhs2 (s));
5105 if ((c == EQ_EXPR && integer_zerop (op2))
5106 || (c == NE_EXPR && integer_nonzerop (op2)))
5107 return same_bool_comparison_p (expr,
5108 invert_tree_comparison (c, false),
5109 gimple_assign_rhs1 (s),
5110 gimple_assign_rhs2 (s));
5113 return false;
5116 /* Check to see if two boolean expressions OP1 and OP2 are logically
5117 equivalent. */
5119 static bool
5120 same_bool_result_p (const_tree op1, const_tree op2)
5122 /* Simple cases first. */
5123 if (operand_equal_p (op1, op2, 0))
5124 return true;
5126 /* Check the cases where at least one of the operands is a comparison.
5127 These are a bit smarter than operand_equal_p in that they apply some
5128 identifies on SSA_NAMEs. */
5129 if (COMPARISON_CLASS_P (op2)
5130 && same_bool_comparison_p (op1, TREE_CODE (op2),
5131 TREE_OPERAND (op2, 0),
5132 TREE_OPERAND (op2, 1)))
5133 return true;
5134 if (COMPARISON_CLASS_P (op1)
5135 && same_bool_comparison_p (op2, TREE_CODE (op1),
5136 TREE_OPERAND (op1, 0),
5137 TREE_OPERAND (op1, 1)))
5138 return true;
5140 /* Default case. */
5141 return false;
5144 /* Forward declarations for some mutually recursive functions. */
5146 static tree
5147 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5148 enum tree_code code2, tree op2a, tree op2b);
5149 static tree
5150 and_var_with_comparison (tree var, bool invert,
5151 enum tree_code code2, tree op2a, tree op2b);
5152 static tree
5153 and_var_with_comparison_1 (gimple *stmt,
5154 enum tree_code code2, tree op2a, tree op2b);
5155 static tree
5156 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5157 enum tree_code code2, tree op2a, tree op2b);
5158 static tree
5159 or_var_with_comparison (tree var, bool invert,
5160 enum tree_code code2, tree op2a, tree op2b);
5161 static tree
5162 or_var_with_comparison_1 (gimple *stmt,
5163 enum tree_code code2, tree op2a, tree op2b);
5165 /* Helper function for and_comparisons_1: try to simplify the AND of the
5166 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5167 If INVERT is true, invert the value of the VAR before doing the AND.
5168 Return NULL_EXPR if we can't simplify this to a single expression. */
5170 static tree
5171 and_var_with_comparison (tree var, bool invert,
5172 enum tree_code code2, tree op2a, tree op2b)
5174 tree t;
5175 gimple *stmt = SSA_NAME_DEF_STMT (var);
5177 /* We can only deal with variables whose definitions are assignments. */
5178 if (!is_gimple_assign (stmt))
5179 return NULL_TREE;
5181 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5182 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5183 Then we only have to consider the simpler non-inverted cases. */
5184 if (invert)
5185 t = or_var_with_comparison_1 (stmt,
5186 invert_tree_comparison (code2, false),
5187 op2a, op2b);
5188 else
5189 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5190 return canonicalize_bool (t, invert);
5193 /* Try to simplify the AND of the ssa variable defined by the assignment
5194 STMT with the comparison specified by (OP2A CODE2 OP2B).
5195 Return NULL_EXPR if we can't simplify this to a single expression. */
5197 static tree
5198 and_var_with_comparison_1 (gimple *stmt,
5199 enum tree_code code2, tree op2a, tree op2b)
5201 tree var = gimple_assign_lhs (stmt);
5202 tree true_test_var = NULL_TREE;
5203 tree false_test_var = NULL_TREE;
5204 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5206 /* Check for identities like (var AND (var == 0)) => false. */
5207 if (TREE_CODE (op2a) == SSA_NAME
5208 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5210 if ((code2 == NE_EXPR && integer_zerop (op2b))
5211 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5213 true_test_var = op2a;
5214 if (var == true_test_var)
5215 return var;
5217 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5218 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5220 false_test_var = op2a;
5221 if (var == false_test_var)
5222 return boolean_false_node;
5226 /* If the definition is a comparison, recurse on it. */
5227 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5229 tree t = and_comparisons_1 (innercode,
5230 gimple_assign_rhs1 (stmt),
5231 gimple_assign_rhs2 (stmt),
5232 code2,
5233 op2a,
5234 op2b);
5235 if (t)
5236 return t;
5239 /* If the definition is an AND or OR expression, we may be able to
5240 simplify by reassociating. */
5241 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5242 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5244 tree inner1 = gimple_assign_rhs1 (stmt);
5245 tree inner2 = gimple_assign_rhs2 (stmt);
5246 gimple *s;
5247 tree t;
5248 tree partial = NULL_TREE;
5249 bool is_and = (innercode == BIT_AND_EXPR);
5251 /* Check for boolean identities that don't require recursive examination
5252 of inner1/inner2:
5253 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5254 inner1 AND (inner1 OR inner2) => inner1
5255 !inner1 AND (inner1 AND inner2) => false
5256 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5257 Likewise for similar cases involving inner2. */
5258 if (inner1 == true_test_var)
5259 return (is_and ? var : inner1);
5260 else if (inner2 == true_test_var)
5261 return (is_and ? var : inner2);
5262 else if (inner1 == false_test_var)
5263 return (is_and
5264 ? boolean_false_node
5265 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5266 else if (inner2 == false_test_var)
5267 return (is_and
5268 ? boolean_false_node
5269 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5271 /* Next, redistribute/reassociate the AND across the inner tests.
5272 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5273 if (TREE_CODE (inner1) == SSA_NAME
5274 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5275 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5276 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5277 gimple_assign_rhs1 (s),
5278 gimple_assign_rhs2 (s),
5279 code2, op2a, op2b)))
5281 /* Handle the AND case, where we are reassociating:
5282 (inner1 AND inner2) AND (op2a code2 op2b)
5283 => (t AND inner2)
5284 If the partial result t is a constant, we win. Otherwise
5285 continue on to try reassociating with the other inner test. */
5286 if (is_and)
5288 if (integer_onep (t))
5289 return inner2;
5290 else if (integer_zerop (t))
5291 return boolean_false_node;
5294 /* Handle the OR case, where we are redistributing:
5295 (inner1 OR inner2) AND (op2a code2 op2b)
5296 => (t OR (inner2 AND (op2a code2 op2b))) */
5297 else if (integer_onep (t))
5298 return boolean_true_node;
5300 /* Save partial result for later. */
5301 partial = t;
5304 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5305 if (TREE_CODE (inner2) == SSA_NAME
5306 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5307 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5308 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5309 gimple_assign_rhs1 (s),
5310 gimple_assign_rhs2 (s),
5311 code2, op2a, op2b)))
5313 /* Handle the AND case, where we are reassociating:
5314 (inner1 AND inner2) AND (op2a code2 op2b)
5315 => (inner1 AND t) */
5316 if (is_and)
5318 if (integer_onep (t))
5319 return inner1;
5320 else if (integer_zerop (t))
5321 return boolean_false_node;
5322 /* If both are the same, we can apply the identity
5323 (x AND x) == x. */
5324 else if (partial && same_bool_result_p (t, partial))
5325 return t;
5328 /* Handle the OR case. where we are redistributing:
5329 (inner1 OR inner2) AND (op2a code2 op2b)
5330 => (t OR (inner1 AND (op2a code2 op2b)))
5331 => (t OR partial) */
5332 else
5334 if (integer_onep (t))
5335 return boolean_true_node;
5336 else if (partial)
5338 /* We already got a simplification for the other
5339 operand to the redistributed OR expression. The
5340 interesting case is when at least one is false.
5341 Or, if both are the same, we can apply the identity
5342 (x OR x) == x. */
5343 if (integer_zerop (partial))
5344 return t;
5345 else if (integer_zerop (t))
5346 return partial;
5347 else if (same_bool_result_p (t, partial))
5348 return t;
5353 return NULL_TREE;
5356 /* Try to simplify the AND of two comparisons defined by
5357 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5358 If this can be done without constructing an intermediate value,
5359 return the resulting tree; otherwise NULL_TREE is returned.
5360 This function is deliberately asymmetric as it recurses on SSA_DEFs
5361 in the first comparison but not the second. */
5363 static tree
5364 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5365 enum tree_code code2, tree op2a, tree op2b)
5367 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5369 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5370 if (operand_equal_p (op1a, op2a, 0)
5371 && operand_equal_p (op1b, op2b, 0))
5373 /* Result will be either NULL_TREE, or a combined comparison. */
5374 tree t = combine_comparisons (UNKNOWN_LOCATION,
5375 TRUTH_ANDIF_EXPR, code1, code2,
5376 truth_type, op1a, op1b);
5377 if (t)
5378 return t;
5381 /* Likewise the swapped case of the above. */
5382 if (operand_equal_p (op1a, op2b, 0)
5383 && operand_equal_p (op1b, op2a, 0))
5385 /* Result will be either NULL_TREE, or a combined comparison. */
5386 tree t = combine_comparisons (UNKNOWN_LOCATION,
5387 TRUTH_ANDIF_EXPR, code1,
5388 swap_tree_comparison (code2),
5389 truth_type, op1a, op1b);
5390 if (t)
5391 return t;
5394 /* If both comparisons are of the same value against constants, we might
5395 be able to merge them. */
5396 if (operand_equal_p (op1a, op2a, 0)
5397 && TREE_CODE (op1b) == INTEGER_CST
5398 && TREE_CODE (op2b) == INTEGER_CST)
5400 int cmp = tree_int_cst_compare (op1b, op2b);
5402 /* If we have (op1a == op1b), we should either be able to
5403 return that or FALSE, depending on whether the constant op1b
5404 also satisfies the other comparison against op2b. */
5405 if (code1 == EQ_EXPR)
5407 bool done = true;
5408 bool val;
5409 switch (code2)
5411 case EQ_EXPR: val = (cmp == 0); break;
5412 case NE_EXPR: val = (cmp != 0); break;
5413 case LT_EXPR: val = (cmp < 0); break;
5414 case GT_EXPR: val = (cmp > 0); break;
5415 case LE_EXPR: val = (cmp <= 0); break;
5416 case GE_EXPR: val = (cmp >= 0); break;
5417 default: done = false;
5419 if (done)
5421 if (val)
5422 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5423 else
5424 return boolean_false_node;
5427 /* Likewise if the second comparison is an == comparison. */
5428 else if (code2 == EQ_EXPR)
5430 bool done = true;
5431 bool val;
5432 switch (code1)
5434 case EQ_EXPR: val = (cmp == 0); break;
5435 case NE_EXPR: val = (cmp != 0); break;
5436 case LT_EXPR: val = (cmp > 0); break;
5437 case GT_EXPR: val = (cmp < 0); break;
5438 case LE_EXPR: val = (cmp >= 0); break;
5439 case GE_EXPR: val = (cmp <= 0); break;
5440 default: done = false;
5442 if (done)
5444 if (val)
5445 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5446 else
5447 return boolean_false_node;
5451 /* Same business with inequality tests. */
5452 else if (code1 == NE_EXPR)
5454 bool val;
5455 switch (code2)
5457 case EQ_EXPR: val = (cmp != 0); break;
5458 case NE_EXPR: val = (cmp == 0); break;
5459 case LT_EXPR: val = (cmp >= 0); break;
5460 case GT_EXPR: val = (cmp <= 0); break;
5461 case LE_EXPR: val = (cmp > 0); break;
5462 case GE_EXPR: val = (cmp < 0); break;
5463 default:
5464 val = false;
5466 if (val)
5467 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5469 else if (code2 == NE_EXPR)
5471 bool val;
5472 switch (code1)
5474 case EQ_EXPR: val = (cmp == 0); break;
5475 case NE_EXPR: val = (cmp != 0); break;
5476 case LT_EXPR: val = (cmp <= 0); break;
5477 case GT_EXPR: val = (cmp >= 0); break;
5478 case LE_EXPR: val = (cmp < 0); break;
5479 case GE_EXPR: val = (cmp > 0); break;
5480 default:
5481 val = false;
5483 if (val)
5484 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5487 /* Chose the more restrictive of two < or <= comparisons. */
5488 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5489 && (code2 == LT_EXPR || code2 == LE_EXPR))
5491 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5492 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5493 else
5494 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5497 /* Likewise chose the more restrictive of two > or >= comparisons. */
5498 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5499 && (code2 == GT_EXPR || code2 == GE_EXPR))
5501 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5502 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5503 else
5504 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5507 /* Check for singleton ranges. */
5508 else if (cmp == 0
5509 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5510 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5511 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5513 /* Check for disjoint ranges. */
5514 else if (cmp <= 0
5515 && (code1 == LT_EXPR || code1 == LE_EXPR)
5516 && (code2 == GT_EXPR || code2 == GE_EXPR))
5517 return boolean_false_node;
5518 else if (cmp >= 0
5519 && (code1 == GT_EXPR || code1 == GE_EXPR)
5520 && (code2 == LT_EXPR || code2 == LE_EXPR))
5521 return boolean_false_node;
5524 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5525 NAME's definition is a truth value. See if there are any simplifications
5526 that can be done against the NAME's definition. */
5527 if (TREE_CODE (op1a) == SSA_NAME
5528 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5529 && (integer_zerop (op1b) || integer_onep (op1b)))
5531 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5532 || (code1 == NE_EXPR && integer_onep (op1b)));
5533 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5534 switch (gimple_code (stmt))
5536 case GIMPLE_ASSIGN:
5537 /* Try to simplify by copy-propagating the definition. */
5538 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5540 case GIMPLE_PHI:
5541 /* If every argument to the PHI produces the same result when
5542 ANDed with the second comparison, we win.
5543 Do not do this unless the type is bool since we need a bool
5544 result here anyway. */
5545 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5547 tree result = NULL_TREE;
5548 unsigned i;
5549 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5551 tree arg = gimple_phi_arg_def (stmt, i);
5553 /* If this PHI has itself as an argument, ignore it.
5554 If all the other args produce the same result,
5555 we're still OK. */
5556 if (arg == gimple_phi_result (stmt))
5557 continue;
5558 else if (TREE_CODE (arg) == INTEGER_CST)
5560 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5562 if (!result)
5563 result = boolean_false_node;
5564 else if (!integer_zerop (result))
5565 return NULL_TREE;
5567 else if (!result)
5568 result = fold_build2 (code2, boolean_type_node,
5569 op2a, op2b);
5570 else if (!same_bool_comparison_p (result,
5571 code2, op2a, op2b))
5572 return NULL_TREE;
5574 else if (TREE_CODE (arg) == SSA_NAME
5575 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5577 tree temp;
5578 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5579 /* In simple cases we can look through PHI nodes,
5580 but we have to be careful with loops.
5581 See PR49073. */
5582 if (! dom_info_available_p (CDI_DOMINATORS)
5583 || gimple_bb (def_stmt) == gimple_bb (stmt)
5584 || dominated_by_p (CDI_DOMINATORS,
5585 gimple_bb (def_stmt),
5586 gimple_bb (stmt)))
5587 return NULL_TREE;
5588 temp = and_var_with_comparison (arg, invert, code2,
5589 op2a, op2b);
5590 if (!temp)
5591 return NULL_TREE;
5592 else if (!result)
5593 result = temp;
5594 else if (!same_bool_result_p (result, temp))
5595 return NULL_TREE;
5597 else
5598 return NULL_TREE;
5600 return result;
5603 default:
5604 break;
5607 return NULL_TREE;
5610 /* Try to simplify the AND of two comparisons, specified by
5611 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5612 If this can be simplified to a single expression (without requiring
5613 introducing more SSA variables to hold intermediate values),
5614 return the resulting tree. Otherwise return NULL_TREE.
5615 If the result expression is non-null, it has boolean type. */
5617 tree
5618 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5619 enum tree_code code2, tree op2a, tree op2b)
5621 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5622 if (t)
5623 return t;
5624 else
5625 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5628 /* Helper function for or_comparisons_1: try to simplify the OR of the
5629 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5630 If INVERT is true, invert the value of VAR before doing the OR.
5631 Return NULL_EXPR if we can't simplify this to a single expression. */
5633 static tree
5634 or_var_with_comparison (tree var, bool invert,
5635 enum tree_code code2, tree op2a, tree op2b)
5637 tree t;
5638 gimple *stmt = SSA_NAME_DEF_STMT (var);
5640 /* We can only deal with variables whose definitions are assignments. */
5641 if (!is_gimple_assign (stmt))
5642 return NULL_TREE;
5644 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5645 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5646 Then we only have to consider the simpler non-inverted cases. */
5647 if (invert)
5648 t = and_var_with_comparison_1 (stmt,
5649 invert_tree_comparison (code2, false),
5650 op2a, op2b);
5651 else
5652 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5653 return canonicalize_bool (t, invert);
5656 /* Try to simplify the OR of the ssa variable defined by the assignment
5657 STMT with the comparison specified by (OP2A CODE2 OP2B).
5658 Return NULL_EXPR if we can't simplify this to a single expression. */
5660 static tree
5661 or_var_with_comparison_1 (gimple *stmt,
5662 enum tree_code code2, tree op2a, tree op2b)
5664 tree var = gimple_assign_lhs (stmt);
5665 tree true_test_var = NULL_TREE;
5666 tree false_test_var = NULL_TREE;
5667 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5669 /* Check for identities like (var OR (var != 0)) => true . */
5670 if (TREE_CODE (op2a) == SSA_NAME
5671 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5673 if ((code2 == NE_EXPR && integer_zerop (op2b))
5674 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5676 true_test_var = op2a;
5677 if (var == true_test_var)
5678 return var;
5680 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5681 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5683 false_test_var = op2a;
5684 if (var == false_test_var)
5685 return boolean_true_node;
5689 /* If the definition is a comparison, recurse on it. */
5690 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5692 tree t = or_comparisons_1 (innercode,
5693 gimple_assign_rhs1 (stmt),
5694 gimple_assign_rhs2 (stmt),
5695 code2,
5696 op2a,
5697 op2b);
5698 if (t)
5699 return t;
5702 /* If the definition is an AND or OR expression, we may be able to
5703 simplify by reassociating. */
5704 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5705 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5707 tree inner1 = gimple_assign_rhs1 (stmt);
5708 tree inner2 = gimple_assign_rhs2 (stmt);
5709 gimple *s;
5710 tree t;
5711 tree partial = NULL_TREE;
5712 bool is_or = (innercode == BIT_IOR_EXPR);
5714 /* Check for boolean identities that don't require recursive examination
5715 of inner1/inner2:
5716 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5717 inner1 OR (inner1 AND inner2) => inner1
5718 !inner1 OR (inner1 OR inner2) => true
5719 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5721 if (inner1 == true_test_var)
5722 return (is_or ? var : inner1);
5723 else if (inner2 == true_test_var)
5724 return (is_or ? var : inner2);
5725 else if (inner1 == false_test_var)
5726 return (is_or
5727 ? boolean_true_node
5728 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5729 else if (inner2 == false_test_var)
5730 return (is_or
5731 ? boolean_true_node
5732 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5734 /* Next, redistribute/reassociate the OR across the inner tests.
5735 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5736 if (TREE_CODE (inner1) == SSA_NAME
5737 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5738 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5739 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5740 gimple_assign_rhs1 (s),
5741 gimple_assign_rhs2 (s),
5742 code2, op2a, op2b)))
5744 /* Handle the OR case, where we are reassociating:
5745 (inner1 OR inner2) OR (op2a code2 op2b)
5746 => (t OR inner2)
5747 If the partial result t is a constant, we win. Otherwise
5748 continue on to try reassociating with the other inner test. */
5749 if (is_or)
5751 if (integer_onep (t))
5752 return boolean_true_node;
5753 else if (integer_zerop (t))
5754 return inner2;
5757 /* Handle the AND case, where we are redistributing:
5758 (inner1 AND inner2) OR (op2a code2 op2b)
5759 => (t AND (inner2 OR (op2a code op2b))) */
5760 else if (integer_zerop (t))
5761 return boolean_false_node;
5763 /* Save partial result for later. */
5764 partial = t;
5767 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5768 if (TREE_CODE (inner2) == SSA_NAME
5769 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5770 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5771 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5772 gimple_assign_rhs1 (s),
5773 gimple_assign_rhs2 (s),
5774 code2, op2a, op2b)))
5776 /* Handle the OR case, where we are reassociating:
5777 (inner1 OR inner2) OR (op2a code2 op2b)
5778 => (inner1 OR t)
5779 => (t OR partial) */
5780 if (is_or)
5782 if (integer_zerop (t))
5783 return inner1;
5784 else if (integer_onep (t))
5785 return boolean_true_node;
5786 /* If both are the same, we can apply the identity
5787 (x OR x) == x. */
5788 else if (partial && same_bool_result_p (t, partial))
5789 return t;
5792 /* Handle the AND case, where we are redistributing:
5793 (inner1 AND inner2) OR (op2a code2 op2b)
5794 => (t AND (inner1 OR (op2a code2 op2b)))
5795 => (t AND partial) */
5796 else
5798 if (integer_zerop (t))
5799 return boolean_false_node;
5800 else if (partial)
5802 /* We already got a simplification for the other
5803 operand to the redistributed AND expression. The
5804 interesting case is when at least one is true.
5805 Or, if both are the same, we can apply the identity
5806 (x AND x) == x. */
5807 if (integer_onep (partial))
5808 return t;
5809 else if (integer_onep (t))
5810 return partial;
5811 else if (same_bool_result_p (t, partial))
5812 return t;
5817 return NULL_TREE;
5820 /* Try to simplify the OR of two comparisons defined by
5821 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5822 If this can be done without constructing an intermediate value,
5823 return the resulting tree; otherwise NULL_TREE is returned.
5824 This function is deliberately asymmetric as it recurses on SSA_DEFs
5825 in the first comparison but not the second. */
5827 static tree
5828 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5829 enum tree_code code2, tree op2a, tree op2b)
5831 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5833 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5834 if (operand_equal_p (op1a, op2a, 0)
5835 && operand_equal_p (op1b, op2b, 0))
5837 /* Result will be either NULL_TREE, or a combined comparison. */
5838 tree t = combine_comparisons (UNKNOWN_LOCATION,
5839 TRUTH_ORIF_EXPR, code1, code2,
5840 truth_type, op1a, op1b);
5841 if (t)
5842 return t;
5845 /* Likewise the swapped case of the above. */
5846 if (operand_equal_p (op1a, op2b, 0)
5847 && operand_equal_p (op1b, op2a, 0))
5849 /* Result will be either NULL_TREE, or a combined comparison. */
5850 tree t = combine_comparisons (UNKNOWN_LOCATION,
5851 TRUTH_ORIF_EXPR, code1,
5852 swap_tree_comparison (code2),
5853 truth_type, op1a, op1b);
5854 if (t)
5855 return t;
5858 /* If both comparisons are of the same value against constants, we might
5859 be able to merge them. */
5860 if (operand_equal_p (op1a, op2a, 0)
5861 && TREE_CODE (op1b) == INTEGER_CST
5862 && TREE_CODE (op2b) == INTEGER_CST)
5864 int cmp = tree_int_cst_compare (op1b, op2b);
5866 /* If we have (op1a != op1b), we should either be able to
5867 return that or TRUE, depending on whether the constant op1b
5868 also satisfies the other comparison against op2b. */
5869 if (code1 == NE_EXPR)
5871 bool done = true;
5872 bool val;
5873 switch (code2)
5875 case EQ_EXPR: val = (cmp == 0); break;
5876 case NE_EXPR: val = (cmp != 0); break;
5877 case LT_EXPR: val = (cmp < 0); break;
5878 case GT_EXPR: val = (cmp > 0); break;
5879 case LE_EXPR: val = (cmp <= 0); break;
5880 case GE_EXPR: val = (cmp >= 0); break;
5881 default: done = false;
5883 if (done)
5885 if (val)
5886 return boolean_true_node;
5887 else
5888 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5891 /* Likewise if the second comparison is a != comparison. */
5892 else if (code2 == NE_EXPR)
5894 bool done = true;
5895 bool val;
5896 switch (code1)
5898 case EQ_EXPR: val = (cmp == 0); break;
5899 case NE_EXPR: val = (cmp != 0); break;
5900 case LT_EXPR: val = (cmp > 0); break;
5901 case GT_EXPR: val = (cmp < 0); break;
5902 case LE_EXPR: val = (cmp >= 0); break;
5903 case GE_EXPR: val = (cmp <= 0); break;
5904 default: done = false;
5906 if (done)
5908 if (val)
5909 return boolean_true_node;
5910 else
5911 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5915 /* See if an equality test is redundant with the other comparison. */
5916 else if (code1 == EQ_EXPR)
5918 bool val;
5919 switch (code2)
5921 case EQ_EXPR: val = (cmp == 0); break;
5922 case NE_EXPR: val = (cmp != 0); break;
5923 case LT_EXPR: val = (cmp < 0); break;
5924 case GT_EXPR: val = (cmp > 0); break;
5925 case LE_EXPR: val = (cmp <= 0); break;
5926 case GE_EXPR: val = (cmp >= 0); break;
5927 default:
5928 val = false;
5930 if (val)
5931 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5933 else if (code2 == EQ_EXPR)
5935 bool val;
5936 switch (code1)
5938 case EQ_EXPR: val = (cmp == 0); break;
5939 case NE_EXPR: val = (cmp != 0); break;
5940 case LT_EXPR: val = (cmp > 0); break;
5941 case GT_EXPR: val = (cmp < 0); break;
5942 case LE_EXPR: val = (cmp >= 0); break;
5943 case GE_EXPR: val = (cmp <= 0); break;
5944 default:
5945 val = false;
5947 if (val)
5948 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5951 /* Chose the less restrictive of two < or <= comparisons. */
5952 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5953 && (code2 == LT_EXPR || code2 == LE_EXPR))
5955 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5956 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5957 else
5958 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5961 /* Likewise chose the less restrictive of two > or >= comparisons. */
5962 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5963 && (code2 == GT_EXPR || code2 == GE_EXPR))
5965 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5966 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5967 else
5968 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5971 /* Check for singleton ranges. */
5972 else if (cmp == 0
5973 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5974 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5975 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5977 /* Check for less/greater pairs that don't restrict the range at all. */
5978 else if (cmp >= 0
5979 && (code1 == LT_EXPR || code1 == LE_EXPR)
5980 && (code2 == GT_EXPR || code2 == GE_EXPR))
5981 return boolean_true_node;
5982 else if (cmp <= 0
5983 && (code1 == GT_EXPR || code1 == GE_EXPR)
5984 && (code2 == LT_EXPR || code2 == LE_EXPR))
5985 return boolean_true_node;
5988 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5989 NAME's definition is a truth value. See if there are any simplifications
5990 that can be done against the NAME's definition. */
5991 if (TREE_CODE (op1a) == SSA_NAME
5992 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5993 && (integer_zerop (op1b) || integer_onep (op1b)))
5995 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5996 || (code1 == NE_EXPR && integer_onep (op1b)));
5997 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5998 switch (gimple_code (stmt))
6000 case GIMPLE_ASSIGN:
6001 /* Try to simplify by copy-propagating the definition. */
6002 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
6004 case GIMPLE_PHI:
6005 /* If every argument to the PHI produces the same result when
6006 ORed with the second comparison, we win.
6007 Do not do this unless the type is bool since we need a bool
6008 result here anyway. */
6009 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6011 tree result = NULL_TREE;
6012 unsigned i;
6013 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6015 tree arg = gimple_phi_arg_def (stmt, i);
6017 /* If this PHI has itself as an argument, ignore it.
6018 If all the other args produce the same result,
6019 we're still OK. */
6020 if (arg == gimple_phi_result (stmt))
6021 continue;
6022 else if (TREE_CODE (arg) == INTEGER_CST)
6024 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6026 if (!result)
6027 result = boolean_true_node;
6028 else if (!integer_onep (result))
6029 return NULL_TREE;
6031 else if (!result)
6032 result = fold_build2 (code2, boolean_type_node,
6033 op2a, op2b);
6034 else if (!same_bool_comparison_p (result,
6035 code2, op2a, op2b))
6036 return NULL_TREE;
6038 else if (TREE_CODE (arg) == SSA_NAME
6039 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6041 tree temp;
6042 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6043 /* In simple cases we can look through PHI nodes,
6044 but we have to be careful with loops.
6045 See PR49073. */
6046 if (! dom_info_available_p (CDI_DOMINATORS)
6047 || gimple_bb (def_stmt) == gimple_bb (stmt)
6048 || dominated_by_p (CDI_DOMINATORS,
6049 gimple_bb (def_stmt),
6050 gimple_bb (stmt)))
6051 return NULL_TREE;
6052 temp = or_var_with_comparison (arg, invert, code2,
6053 op2a, op2b);
6054 if (!temp)
6055 return NULL_TREE;
6056 else if (!result)
6057 result = temp;
6058 else if (!same_bool_result_p (result, temp))
6059 return NULL_TREE;
6061 else
6062 return NULL_TREE;
6064 return result;
6067 default:
6068 break;
6071 return NULL_TREE;
6074 /* Try to simplify the OR of two comparisons, specified by
6075 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6076 If this can be simplified to a single expression (without requiring
6077 introducing more SSA variables to hold intermediate values),
6078 return the resulting tree. Otherwise return NULL_TREE.
6079 If the result expression is non-null, it has boolean type. */
6081 tree
6082 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6083 enum tree_code code2, tree op2a, tree op2b)
6085 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6086 if (t)
6087 return t;
6088 else
6089 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6093 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6095 Either NULL_TREE, a simplified but non-constant or a constant
6096 is returned.
6098 ??? This should go into a gimple-fold-inline.h file to be eventually
6099 privatized with the single valueize function used in the various TUs
6100 to avoid the indirect function call overhead. */
6102 tree
6103 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6104 tree (*gvalueize) (tree))
6106 code_helper rcode;
6107 tree ops[3] = {};
6108 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6109 edges if there are intermediate VARYING defs. For this reason
6110 do not follow SSA edges here even though SCCVN can technically
6111 just deal fine with that. */
6112 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
6114 tree res = NULL_TREE;
6115 if (gimple_simplified_result_is_gimple_val (rcode, ops))
6116 res = ops[0];
6117 else if (mprts_hook)
6118 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
6119 if (res)
6121 if (dump_file && dump_flags & TDF_DETAILS)
6123 fprintf (dump_file, "Match-and-simplified ");
6124 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6125 fprintf (dump_file, " to ");
6126 print_generic_expr (dump_file, res);
6127 fprintf (dump_file, "\n");
6129 return res;
6133 location_t loc = gimple_location (stmt);
6134 switch (gimple_code (stmt))
6136 case GIMPLE_ASSIGN:
6138 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6140 switch (get_gimple_rhs_class (subcode))
6142 case GIMPLE_SINGLE_RHS:
6144 tree rhs = gimple_assign_rhs1 (stmt);
6145 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6147 if (TREE_CODE (rhs) == SSA_NAME)
6149 /* If the RHS is an SSA_NAME, return its known constant value,
6150 if any. */
6151 return (*valueize) (rhs);
6153 /* Handle propagating invariant addresses into address
6154 operations. */
6155 else if (TREE_CODE (rhs) == ADDR_EXPR
6156 && !is_gimple_min_invariant (rhs))
6158 poly_int64 offset = 0;
6159 tree base;
6160 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6161 &offset,
6162 valueize);
6163 if (base
6164 && (CONSTANT_CLASS_P (base)
6165 || decl_address_invariant_p (base)))
6166 return build_invariant_address (TREE_TYPE (rhs),
6167 base, offset);
6169 else if (TREE_CODE (rhs) == CONSTRUCTOR
6170 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6171 && known_eq (CONSTRUCTOR_NELTS (rhs),
6172 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6174 unsigned i, nelts;
6175 tree val;
6177 nelts = CONSTRUCTOR_NELTS (rhs);
6178 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6179 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6181 val = (*valueize) (val);
6182 if (TREE_CODE (val) == INTEGER_CST
6183 || TREE_CODE (val) == REAL_CST
6184 || TREE_CODE (val) == FIXED_CST)
6185 vec.quick_push (val);
6186 else
6187 return NULL_TREE;
6190 return vec.build ();
6192 if (subcode == OBJ_TYPE_REF)
6194 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6195 /* If callee is constant, we can fold away the wrapper. */
6196 if (is_gimple_min_invariant (val))
6197 return val;
6200 if (kind == tcc_reference)
6202 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6203 || TREE_CODE (rhs) == REALPART_EXPR
6204 || TREE_CODE (rhs) == IMAGPART_EXPR)
6205 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6207 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6208 return fold_unary_loc (EXPR_LOCATION (rhs),
6209 TREE_CODE (rhs),
6210 TREE_TYPE (rhs), val);
6212 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6213 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6215 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6216 return fold_ternary_loc (EXPR_LOCATION (rhs),
6217 TREE_CODE (rhs),
6218 TREE_TYPE (rhs), val,
6219 TREE_OPERAND (rhs, 1),
6220 TREE_OPERAND (rhs, 2));
6222 else if (TREE_CODE (rhs) == MEM_REF
6223 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6225 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6226 if (TREE_CODE (val) == ADDR_EXPR
6227 && is_gimple_min_invariant (val))
6229 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6230 unshare_expr (val),
6231 TREE_OPERAND (rhs, 1));
6232 if (tem)
6233 rhs = tem;
6236 return fold_const_aggregate_ref_1 (rhs, valueize);
6238 else if (kind == tcc_declaration)
6239 return get_symbol_constant_value (rhs);
6240 return rhs;
6243 case GIMPLE_UNARY_RHS:
6244 return NULL_TREE;
6246 case GIMPLE_BINARY_RHS:
6247 /* Translate &x + CST into an invariant form suitable for
6248 further propagation. */
6249 if (subcode == POINTER_PLUS_EXPR)
6251 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6252 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6253 if (TREE_CODE (op0) == ADDR_EXPR
6254 && TREE_CODE (op1) == INTEGER_CST)
6256 tree off = fold_convert (ptr_type_node, op1);
6257 return build_fold_addr_expr_loc
6258 (loc,
6259 fold_build2 (MEM_REF,
6260 TREE_TYPE (TREE_TYPE (op0)),
6261 unshare_expr (op0), off));
6264 /* Canonicalize bool != 0 and bool == 0 appearing after
6265 valueization. While gimple_simplify handles this
6266 it can get confused by the ~X == 1 -> X == 0 transform
6267 which we cant reduce to a SSA name or a constant
6268 (and we have no way to tell gimple_simplify to not
6269 consider those transforms in the first place). */
6270 else if (subcode == EQ_EXPR
6271 || subcode == NE_EXPR)
6273 tree lhs = gimple_assign_lhs (stmt);
6274 tree op0 = gimple_assign_rhs1 (stmt);
6275 if (useless_type_conversion_p (TREE_TYPE (lhs),
6276 TREE_TYPE (op0)))
6278 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6279 op0 = (*valueize) (op0);
6280 if (TREE_CODE (op0) == INTEGER_CST)
6281 std::swap (op0, op1);
6282 if (TREE_CODE (op1) == INTEGER_CST
6283 && ((subcode == NE_EXPR && integer_zerop (op1))
6284 || (subcode == EQ_EXPR && integer_onep (op1))))
6285 return op0;
6288 return NULL_TREE;
6290 case GIMPLE_TERNARY_RHS:
6292 /* Handle ternary operators that can appear in GIMPLE form. */
6293 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6294 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6295 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6296 return fold_ternary_loc (loc, subcode,
6297 gimple_expr_type (stmt), op0, op1, op2);
6300 default:
6301 gcc_unreachable ();
6305 case GIMPLE_CALL:
6307 tree fn;
6308 gcall *call_stmt = as_a <gcall *> (stmt);
6310 if (gimple_call_internal_p (stmt))
6312 enum tree_code subcode = ERROR_MARK;
6313 switch (gimple_call_internal_fn (stmt))
6315 case IFN_UBSAN_CHECK_ADD:
6316 subcode = PLUS_EXPR;
6317 break;
6318 case IFN_UBSAN_CHECK_SUB:
6319 subcode = MINUS_EXPR;
6320 break;
6321 case IFN_UBSAN_CHECK_MUL:
6322 subcode = MULT_EXPR;
6323 break;
6324 case IFN_BUILTIN_EXPECT:
6326 tree arg0 = gimple_call_arg (stmt, 0);
6327 tree op0 = (*valueize) (arg0);
6328 if (TREE_CODE (op0) == INTEGER_CST)
6329 return op0;
6330 return NULL_TREE;
6332 default:
6333 return NULL_TREE;
6335 tree arg0 = gimple_call_arg (stmt, 0);
6336 tree arg1 = gimple_call_arg (stmt, 1);
6337 tree op0 = (*valueize) (arg0);
6338 tree op1 = (*valueize) (arg1);
6340 if (TREE_CODE (op0) != INTEGER_CST
6341 || TREE_CODE (op1) != INTEGER_CST)
6343 switch (subcode)
6345 case MULT_EXPR:
6346 /* x * 0 = 0 * x = 0 without overflow. */
6347 if (integer_zerop (op0) || integer_zerop (op1))
6348 return build_zero_cst (TREE_TYPE (arg0));
6349 break;
6350 case MINUS_EXPR:
6351 /* y - y = 0 without overflow. */
6352 if (operand_equal_p (op0, op1, 0))
6353 return build_zero_cst (TREE_TYPE (arg0));
6354 break;
6355 default:
6356 break;
6359 tree res
6360 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6361 if (res
6362 && TREE_CODE (res) == INTEGER_CST
6363 && !TREE_OVERFLOW (res))
6364 return res;
6365 return NULL_TREE;
6368 fn = (*valueize) (gimple_call_fn (stmt));
6369 if (TREE_CODE (fn) == ADDR_EXPR
6370 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6371 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6372 && gimple_builtin_call_types_compatible_p (stmt,
6373 TREE_OPERAND (fn, 0)))
6375 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6376 tree retval;
6377 unsigned i;
6378 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6379 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6380 retval = fold_builtin_call_array (loc,
6381 gimple_call_return_type (call_stmt),
6382 fn, gimple_call_num_args (stmt), args);
6383 if (retval)
6385 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6386 STRIP_NOPS (retval);
6387 retval = fold_convert (gimple_call_return_type (call_stmt),
6388 retval);
6390 return retval;
6392 return NULL_TREE;
6395 default:
6396 return NULL_TREE;
6400 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6401 Returns NULL_TREE if folding to a constant is not possible, otherwise
6402 returns a constant according to is_gimple_min_invariant. */
6404 tree
6405 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6407 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6408 if (res && is_gimple_min_invariant (res))
6409 return res;
6410 return NULL_TREE;
6414 /* The following set of functions are supposed to fold references using
6415 their constant initializers. */
6417 /* See if we can find constructor defining value of BASE.
6418 When we know the consructor with constant offset (such as
6419 base is array[40] and we do know constructor of array), then
6420 BIT_OFFSET is adjusted accordingly.
6422 As a special case, return error_mark_node when constructor
6423 is not explicitly available, but it is known to be zero
6424 such as 'static const int a;'. */
6425 static tree
6426 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6427 tree (*valueize)(tree))
6429 poly_int64 bit_offset2, size, max_size;
6430 bool reverse;
6432 if (TREE_CODE (base) == MEM_REF)
6434 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6435 if (!boff.to_shwi (bit_offset))
6436 return NULL_TREE;
6438 if (valueize
6439 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6440 base = valueize (TREE_OPERAND (base, 0));
6441 if (!base || TREE_CODE (base) != ADDR_EXPR)
6442 return NULL_TREE;
6443 base = TREE_OPERAND (base, 0);
6445 else if (valueize
6446 && TREE_CODE (base) == SSA_NAME)
6447 base = valueize (base);
6449 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6450 DECL_INITIAL. If BASE is a nested reference into another
6451 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6452 the inner reference. */
6453 switch (TREE_CODE (base))
6455 case VAR_DECL:
6456 case CONST_DECL:
6458 tree init = ctor_for_folding (base);
6460 /* Our semantic is exact opposite of ctor_for_folding;
6461 NULL means unknown, while error_mark_node is 0. */
6462 if (init == error_mark_node)
6463 return NULL_TREE;
6464 if (!init)
6465 return error_mark_node;
6466 return init;
6469 case VIEW_CONVERT_EXPR:
6470 return get_base_constructor (TREE_OPERAND (base, 0),
6471 bit_offset, valueize);
6473 case ARRAY_REF:
6474 case COMPONENT_REF:
6475 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6476 &reverse);
6477 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6478 return NULL_TREE;
6479 *bit_offset += bit_offset2;
6480 return get_base_constructor (base, bit_offset, valueize);
6482 case CONSTRUCTOR:
6483 return base;
6485 default:
6486 if (CONSTANT_CLASS_P (base))
6487 return base;
6489 return NULL_TREE;
6493 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6494 SIZE to the memory at bit OFFSET. */
6496 static tree
6497 fold_array_ctor_reference (tree type, tree ctor,
6498 unsigned HOST_WIDE_INT offset,
6499 unsigned HOST_WIDE_INT size,
6500 tree from_decl)
6502 offset_int low_bound;
6503 offset_int elt_size;
6504 offset_int access_index;
6505 tree domain_type = NULL_TREE;
6506 HOST_WIDE_INT inner_offset;
6508 /* Compute low bound and elt size. */
6509 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6510 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6511 if (domain_type && TYPE_MIN_VALUE (domain_type))
6513 /* Static constructors for variably sized objects makes no sense. */
6514 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6515 return NULL_TREE;
6516 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6518 else
6519 low_bound = 0;
6520 /* Static constructors for variably sized objects makes no sense. */
6521 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6522 return NULL_TREE;
6523 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6525 /* We can handle only constantly sized accesses that are known to not
6526 be larger than size of array element. */
6527 if (!TYPE_SIZE_UNIT (type)
6528 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6529 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6530 || elt_size == 0)
6531 return NULL_TREE;
6533 /* Compute the array index we look for. */
6534 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6535 elt_size);
6536 access_index += low_bound;
6538 /* And offset within the access. */
6539 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6541 /* See if the array field is large enough to span whole access. We do not
6542 care to fold accesses spanning multiple array indexes. */
6543 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6544 return NULL_TREE;
6545 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6546 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
6548 /* When memory is not explicitely mentioned in constructor,
6549 it is 0 (or out of range). */
6550 return build_zero_cst (type);
6553 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6554 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6556 static tree
6557 fold_nonarray_ctor_reference (tree type, tree ctor,
6558 unsigned HOST_WIDE_INT offset,
6559 unsigned HOST_WIDE_INT size,
6560 tree from_decl)
6562 unsigned HOST_WIDE_INT cnt;
6563 tree cfield, cval;
6565 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6566 cval)
6568 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6569 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6570 tree field_size = DECL_SIZE (cfield);
6571 offset_int bitoffset;
6572 offset_int bitoffset_end, access_end;
6574 /* Variable sized objects in static constructors makes no sense,
6575 but field_size can be NULL for flexible array members. */
6576 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6577 && TREE_CODE (byte_offset) == INTEGER_CST
6578 && (field_size != NULL_TREE
6579 ? TREE_CODE (field_size) == INTEGER_CST
6580 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6582 /* Compute bit offset of the field. */
6583 bitoffset = (wi::to_offset (field_offset)
6584 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6585 /* Compute bit offset where the field ends. */
6586 if (field_size != NULL_TREE)
6587 bitoffset_end = bitoffset + wi::to_offset (field_size);
6588 else
6589 bitoffset_end = 0;
6591 access_end = offset_int (offset) + size;
6593 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6594 [BITOFFSET, BITOFFSET_END)? */
6595 if (wi::cmps (access_end, bitoffset) > 0
6596 && (field_size == NULL_TREE
6597 || wi::lts_p (offset, bitoffset_end)))
6599 offset_int inner_offset = offset_int (offset) - bitoffset;
6600 /* We do have overlap. Now see if field is large enough to
6601 cover the access. Give up for accesses spanning multiple
6602 fields. */
6603 if (wi::cmps (access_end, bitoffset_end) > 0)
6604 return NULL_TREE;
6605 if (offset < bitoffset)
6606 return NULL_TREE;
6607 return fold_ctor_reference (type, cval,
6608 inner_offset.to_uhwi (), size,
6609 from_decl);
6612 /* When memory is not explicitely mentioned in constructor, it is 0. */
6613 return build_zero_cst (type);
6616 /* CTOR is value initializing memory, fold reference of type TYPE and
6617 size POLY_SIZE to the memory at bit POLY_OFFSET. */
6619 tree
6620 fold_ctor_reference (tree type, tree ctor, poly_uint64 poly_offset,
6621 poly_uint64 poly_size, tree from_decl)
6623 tree ret;
6625 /* We found the field with exact match. */
6626 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6627 && known_eq (poly_offset, 0U))
6628 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6630 /* The remaining optimizations need a constant size and offset. */
6631 unsigned HOST_WIDE_INT size, offset;
6632 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6633 return NULL_TREE;
6635 /* We are at the end of walk, see if we can view convert the
6636 result. */
6637 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6638 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6639 && !compare_tree_int (TYPE_SIZE (type), size)
6640 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6642 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6643 if (ret)
6645 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6646 if (ret)
6647 STRIP_USELESS_TYPE_CONVERSION (ret);
6649 return ret;
6651 /* For constants and byte-aligned/sized reads try to go through
6652 native_encode/interpret. */
6653 if (CONSTANT_CLASS_P (ctor)
6654 && BITS_PER_UNIT == 8
6655 && offset % BITS_PER_UNIT == 0
6656 && size % BITS_PER_UNIT == 0
6657 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6659 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6660 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6661 offset / BITS_PER_UNIT);
6662 if (len > 0)
6663 return native_interpret_expr (type, buf, len);
6665 if (TREE_CODE (ctor) == CONSTRUCTOR)
6668 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6669 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6670 return fold_array_ctor_reference (type, ctor, offset, size,
6671 from_decl);
6672 else
6673 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6674 from_decl);
6677 return NULL_TREE;
6680 /* Return the tree representing the element referenced by T if T is an
6681 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6682 names using VALUEIZE. Return NULL_TREE otherwise. */
6684 tree
6685 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6687 tree ctor, idx, base;
6688 poly_int64 offset, size, max_size;
6689 tree tem;
6690 bool reverse;
6692 if (TREE_THIS_VOLATILE (t))
6693 return NULL_TREE;
6695 if (DECL_P (t))
6696 return get_symbol_constant_value (t);
6698 tem = fold_read_from_constant_string (t);
6699 if (tem)
6700 return tem;
6702 switch (TREE_CODE (t))
6704 case ARRAY_REF:
6705 case ARRAY_RANGE_REF:
6706 /* Constant indexes are handled well by get_base_constructor.
6707 Only special case variable offsets.
6708 FIXME: This code can't handle nested references with variable indexes
6709 (they will be handled only by iteration of ccp). Perhaps we can bring
6710 get_ref_base_and_extent here and make it use a valueize callback. */
6711 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6712 && valueize
6713 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6714 && poly_int_tree_p (idx))
6716 tree low_bound, unit_size;
6718 /* If the resulting bit-offset is constant, track it. */
6719 if ((low_bound = array_ref_low_bound (t),
6720 poly_int_tree_p (low_bound))
6721 && (unit_size = array_ref_element_size (t),
6722 tree_fits_uhwi_p (unit_size)))
6724 poly_offset_int woffset
6725 = wi::sext (wi::to_poly_offset (idx)
6726 - wi::to_poly_offset (low_bound),
6727 TYPE_PRECISION (TREE_TYPE (idx)));
6729 if (woffset.to_shwi (&offset))
6731 /* TODO: This code seems wrong, multiply then check
6732 to see if it fits. */
6733 offset *= tree_to_uhwi (unit_size);
6734 offset *= BITS_PER_UNIT;
6736 base = TREE_OPERAND (t, 0);
6737 ctor = get_base_constructor (base, &offset, valueize);
6738 /* Empty constructor. Always fold to 0. */
6739 if (ctor == error_mark_node)
6740 return build_zero_cst (TREE_TYPE (t));
6741 /* Out of bound array access. Value is undefined,
6742 but don't fold. */
6743 if (maybe_lt (offset, 0))
6744 return NULL_TREE;
6745 /* We can not determine ctor. */
6746 if (!ctor)
6747 return NULL_TREE;
6748 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6749 tree_to_uhwi (unit_size)
6750 * BITS_PER_UNIT,
6751 base);
6755 /* Fallthru. */
6757 case COMPONENT_REF:
6758 case BIT_FIELD_REF:
6759 case TARGET_MEM_REF:
6760 case MEM_REF:
6761 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6762 ctor = get_base_constructor (base, &offset, valueize);
6764 /* Empty constructor. Always fold to 0. */
6765 if (ctor == error_mark_node)
6766 return build_zero_cst (TREE_TYPE (t));
6767 /* We do not know precise address. */
6768 if (!known_size_p (max_size) || maybe_ne (max_size, size))
6769 return NULL_TREE;
6770 /* We can not determine ctor. */
6771 if (!ctor)
6772 return NULL_TREE;
6774 /* Out of bound array access. Value is undefined, but don't fold. */
6775 if (maybe_lt (offset, 0))
6776 return NULL_TREE;
6778 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6779 base);
6781 case REALPART_EXPR:
6782 case IMAGPART_EXPR:
6784 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6785 if (c && TREE_CODE (c) == COMPLEX_CST)
6786 return fold_build1_loc (EXPR_LOCATION (t),
6787 TREE_CODE (t), TREE_TYPE (t), c);
6788 break;
6791 default:
6792 break;
6795 return NULL_TREE;
6798 tree
6799 fold_const_aggregate_ref (tree t)
6801 return fold_const_aggregate_ref_1 (t, NULL);
6804 /* Lookup virtual method with index TOKEN in a virtual table V
6805 at OFFSET.
6806 Set CAN_REFER if non-NULL to false if method
6807 is not referable or if the virtual table is ill-formed (such as rewriten
6808 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6810 tree
6811 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6812 tree v,
6813 unsigned HOST_WIDE_INT offset,
6814 bool *can_refer)
6816 tree vtable = v, init, fn;
6817 unsigned HOST_WIDE_INT size;
6818 unsigned HOST_WIDE_INT elt_size, access_index;
6819 tree domain_type;
6821 if (can_refer)
6822 *can_refer = true;
6824 /* First of all double check we have virtual table. */
6825 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6827 /* Pass down that we lost track of the target. */
6828 if (can_refer)
6829 *can_refer = false;
6830 return NULL_TREE;
6833 init = ctor_for_folding (v);
6835 /* The virtual tables should always be born with constructors
6836 and we always should assume that they are avaialble for
6837 folding. At the moment we do not stream them in all cases,
6838 but it should never happen that ctor seem unreachable. */
6839 gcc_assert (init);
6840 if (init == error_mark_node)
6842 /* Pass down that we lost track of the target. */
6843 if (can_refer)
6844 *can_refer = false;
6845 return NULL_TREE;
6847 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6848 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6849 offset *= BITS_PER_UNIT;
6850 offset += token * size;
6852 /* Lookup the value in the constructor that is assumed to be array.
6853 This is equivalent to
6854 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6855 offset, size, NULL);
6856 but in a constant time. We expect that frontend produced a simple
6857 array without indexed initializers. */
6859 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6860 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6861 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6862 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6864 access_index = offset / BITS_PER_UNIT / elt_size;
6865 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6867 /* This code makes an assumption that there are no
6868 indexed fileds produced by C++ FE, so we can directly index the array. */
6869 if (access_index < CONSTRUCTOR_NELTS (init))
6871 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6872 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6873 STRIP_NOPS (fn);
6875 else
6876 fn = NULL;
6878 /* For type inconsistent program we may end up looking up virtual method
6879 in virtual table that does not contain TOKEN entries. We may overrun
6880 the virtual table and pick up a constant or RTTI info pointer.
6881 In any case the call is undefined. */
6882 if (!fn
6883 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6884 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6885 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6886 else
6888 fn = TREE_OPERAND (fn, 0);
6890 /* When cgraph node is missing and function is not public, we cannot
6891 devirtualize. This can happen in WHOPR when the actual method
6892 ends up in other partition, because we found devirtualization
6893 possibility too late. */
6894 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6896 if (can_refer)
6898 *can_refer = false;
6899 return fn;
6901 return NULL_TREE;
6905 /* Make sure we create a cgraph node for functions we'll reference.
6906 They can be non-existent if the reference comes from an entry
6907 of an external vtable for example. */
6908 cgraph_node::get_create (fn);
6910 return fn;
6913 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6914 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6915 KNOWN_BINFO carries the binfo describing the true type of
6916 OBJ_TYPE_REF_OBJECT(REF).
6917 Set CAN_REFER if non-NULL to false if method
6918 is not referable or if the virtual table is ill-formed (such as rewriten
6919 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6921 tree
6922 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6923 bool *can_refer)
6925 unsigned HOST_WIDE_INT offset;
6926 tree v;
6928 v = BINFO_VTABLE (known_binfo);
6929 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6930 if (!v)
6931 return NULL_TREE;
6933 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6935 if (can_refer)
6936 *can_refer = false;
6937 return NULL_TREE;
6939 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6942 /* Given a pointer value T, return a simplified version of an
6943 indirection through T, or NULL_TREE if no simplification is
6944 possible. Note that the resulting type may be different from
6945 the type pointed to in the sense that it is still compatible
6946 from the langhooks point of view. */
6948 tree
6949 gimple_fold_indirect_ref (tree t)
6951 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6952 tree sub = t;
6953 tree subtype;
6955 STRIP_NOPS (sub);
6956 subtype = TREE_TYPE (sub);
6957 if (!POINTER_TYPE_P (subtype)
6958 || TYPE_REF_CAN_ALIAS_ALL (ptype))
6959 return NULL_TREE;
6961 if (TREE_CODE (sub) == ADDR_EXPR)
6963 tree op = TREE_OPERAND (sub, 0);
6964 tree optype = TREE_TYPE (op);
6965 /* *&p => p */
6966 if (useless_type_conversion_p (type, optype))
6967 return op;
6969 /* *(foo *)&fooarray => fooarray[0] */
6970 if (TREE_CODE (optype) == ARRAY_TYPE
6971 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6972 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6974 tree type_domain = TYPE_DOMAIN (optype);
6975 tree min_val = size_zero_node;
6976 if (type_domain && TYPE_MIN_VALUE (type_domain))
6977 min_val = TYPE_MIN_VALUE (type_domain);
6978 if (TREE_CODE (min_val) == INTEGER_CST)
6979 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6981 /* *(foo *)&complexfoo => __real__ complexfoo */
6982 else if (TREE_CODE (optype) == COMPLEX_TYPE
6983 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6984 return fold_build1 (REALPART_EXPR, type, op);
6985 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6986 else if (TREE_CODE (optype) == VECTOR_TYPE
6987 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6989 tree part_width = TYPE_SIZE (type);
6990 tree index = bitsize_int (0);
6991 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6995 /* *(p + CST) -> ... */
6996 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6997 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6999 tree addr = TREE_OPERAND (sub, 0);
7000 tree off = TREE_OPERAND (sub, 1);
7001 tree addrtype;
7003 STRIP_NOPS (addr);
7004 addrtype = TREE_TYPE (addr);
7006 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7007 if (TREE_CODE (addr) == ADDR_EXPR
7008 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7009 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7010 && tree_fits_uhwi_p (off))
7012 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7013 tree part_width = TYPE_SIZE (type);
7014 unsigned HOST_WIDE_INT part_widthi
7015 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7016 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7017 tree index = bitsize_int (indexi);
7018 if (known_lt (offset / part_widthi,
7019 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7020 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7021 part_width, index);
7024 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7025 if (TREE_CODE (addr) == ADDR_EXPR
7026 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7027 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7029 tree size = TYPE_SIZE_UNIT (type);
7030 if (tree_int_cst_equal (size, off))
7031 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7034 /* *(p + CST) -> MEM_REF <p, CST>. */
7035 if (TREE_CODE (addr) != ADDR_EXPR
7036 || DECL_P (TREE_OPERAND (addr, 0)))
7037 return fold_build2 (MEM_REF, type,
7038 addr,
7039 wide_int_to_tree (ptype, wi::to_wide (off)));
7042 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7043 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7044 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7045 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7047 tree type_domain;
7048 tree min_val = size_zero_node;
7049 tree osub = sub;
7050 sub = gimple_fold_indirect_ref (sub);
7051 if (! sub)
7052 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7053 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7054 if (type_domain && TYPE_MIN_VALUE (type_domain))
7055 min_val = TYPE_MIN_VALUE (type_domain);
7056 if (TREE_CODE (min_val) == INTEGER_CST)
7057 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7060 return NULL_TREE;
7063 /* Return true if CODE is an operation that when operating on signed
7064 integer types involves undefined behavior on overflow and the
7065 operation can be expressed with unsigned arithmetic. */
7067 bool
7068 arith_code_with_undefined_signed_overflow (tree_code code)
7070 switch (code)
7072 case PLUS_EXPR:
7073 case MINUS_EXPR:
7074 case MULT_EXPR:
7075 case NEGATE_EXPR:
7076 case POINTER_PLUS_EXPR:
7077 return true;
7078 default:
7079 return false;
7083 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7084 operation that can be transformed to unsigned arithmetic by converting
7085 its operand, carrying out the operation in the corresponding unsigned
7086 type and converting the result back to the original type.
7088 Returns a sequence of statements that replace STMT and also contain
7089 a modified form of STMT itself. */
7091 gimple_seq
7092 rewrite_to_defined_overflow (gimple *stmt)
7094 if (dump_file && (dump_flags & TDF_DETAILS))
7096 fprintf (dump_file, "rewriting stmt with undefined signed "
7097 "overflow ");
7098 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7101 tree lhs = gimple_assign_lhs (stmt);
7102 tree type = unsigned_type_for (TREE_TYPE (lhs));
7103 gimple_seq stmts = NULL;
7104 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7106 tree op = gimple_op (stmt, i);
7107 op = gimple_convert (&stmts, type, op);
7108 gimple_set_op (stmt, i, op);
7110 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7111 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7112 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7113 gimple_seq_add_stmt (&stmts, stmt);
7114 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7115 gimple_seq_add_stmt (&stmts, cvt);
7117 return stmts;
7121 /* The valueization hook we use for the gimple_build API simplification.
7122 This makes us match fold_buildN behavior by only combining with
7123 statements in the sequence(s) we are currently building. */
7125 static tree
7126 gimple_build_valueize (tree op)
7128 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7129 return op;
7130 return NULL_TREE;
7133 /* Build the expression CODE OP0 of type TYPE with location LOC,
7134 simplifying it first if possible. Returns the built
7135 expression value and appends statements possibly defining it
7136 to SEQ. */
7138 tree
7139 gimple_build (gimple_seq *seq, location_t loc,
7140 enum tree_code code, tree type, tree op0)
7142 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7143 if (!res)
7145 res = create_tmp_reg_or_ssa_name (type);
7146 gimple *stmt;
7147 if (code == REALPART_EXPR
7148 || code == IMAGPART_EXPR
7149 || code == VIEW_CONVERT_EXPR)
7150 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7151 else
7152 stmt = gimple_build_assign (res, code, op0);
7153 gimple_set_location (stmt, loc);
7154 gimple_seq_add_stmt_without_update (seq, stmt);
7156 return res;
7159 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7160 simplifying it first if possible. Returns the built
7161 expression value and appends statements possibly defining it
7162 to SEQ. */
7164 tree
7165 gimple_build (gimple_seq *seq, location_t loc,
7166 enum tree_code code, tree type, tree op0, tree op1)
7168 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7169 if (!res)
7171 res = create_tmp_reg_or_ssa_name (type);
7172 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7173 gimple_set_location (stmt, loc);
7174 gimple_seq_add_stmt_without_update (seq, stmt);
7176 return res;
7179 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7180 simplifying it first if possible. Returns the built
7181 expression value and appends statements possibly defining it
7182 to SEQ. */
7184 tree
7185 gimple_build (gimple_seq *seq, location_t loc,
7186 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7188 tree res = gimple_simplify (code, type, op0, op1, op2,
7189 seq, gimple_build_valueize);
7190 if (!res)
7192 res = create_tmp_reg_or_ssa_name (type);
7193 gimple *stmt;
7194 if (code == BIT_FIELD_REF)
7195 stmt = gimple_build_assign (res, code,
7196 build3 (code, type, op0, op1, op2));
7197 else
7198 stmt = gimple_build_assign (res, code, op0, op1, op2);
7199 gimple_set_location (stmt, loc);
7200 gimple_seq_add_stmt_without_update (seq, stmt);
7202 return res;
7205 /* Build the call FN (ARG0) with a result of type TYPE
7206 (or no result if TYPE is void) with location LOC,
7207 simplifying it first if possible. Returns the built
7208 expression value (or NULL_TREE if TYPE is void) and appends
7209 statements possibly defining it to SEQ. */
7211 tree
7212 gimple_build (gimple_seq *seq, location_t loc,
7213 enum built_in_function fn, tree type, tree arg0)
7215 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7216 if (!res)
7218 tree decl = builtin_decl_implicit (fn);
7219 gimple *stmt = gimple_build_call (decl, 1, arg0);
7220 if (!VOID_TYPE_P (type))
7222 res = create_tmp_reg_or_ssa_name (type);
7223 gimple_call_set_lhs (stmt, res);
7225 gimple_set_location (stmt, loc);
7226 gimple_seq_add_stmt_without_update (seq, stmt);
7228 return res;
7231 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7232 (or no result if TYPE is void) with location LOC,
7233 simplifying it first if possible. Returns the built
7234 expression value (or NULL_TREE if TYPE is void) and appends
7235 statements possibly defining it to SEQ. */
7237 tree
7238 gimple_build (gimple_seq *seq, location_t loc,
7239 enum built_in_function fn, tree type, tree arg0, tree arg1)
7241 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7242 if (!res)
7244 tree decl = builtin_decl_implicit (fn);
7245 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
7246 if (!VOID_TYPE_P (type))
7248 res = create_tmp_reg_or_ssa_name (type);
7249 gimple_call_set_lhs (stmt, res);
7251 gimple_set_location (stmt, loc);
7252 gimple_seq_add_stmt_without_update (seq, stmt);
7254 return res;
7257 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7258 (or no result if TYPE is void) with location LOC,
7259 simplifying it first if possible. Returns the built
7260 expression value (or NULL_TREE if TYPE is void) and appends
7261 statements possibly defining it to SEQ. */
7263 tree
7264 gimple_build (gimple_seq *seq, location_t loc,
7265 enum built_in_function fn, tree type,
7266 tree arg0, tree arg1, tree arg2)
7268 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7269 seq, gimple_build_valueize);
7270 if (!res)
7272 tree decl = builtin_decl_implicit (fn);
7273 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7274 if (!VOID_TYPE_P (type))
7276 res = create_tmp_reg_or_ssa_name (type);
7277 gimple_call_set_lhs (stmt, res);
7279 gimple_set_location (stmt, loc);
7280 gimple_seq_add_stmt_without_update (seq, stmt);
7282 return res;
7285 /* Build the conversion (TYPE) OP with a result of type TYPE
7286 with location LOC if such conversion is neccesary in GIMPLE,
7287 simplifying it first.
7288 Returns the built expression value and appends
7289 statements possibly defining it to SEQ. */
7291 tree
7292 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7294 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7295 return op;
7296 return gimple_build (seq, loc, NOP_EXPR, type, op);
7299 /* Build the conversion (ptrofftype) OP with a result of a type
7300 compatible with ptrofftype with location LOC if such conversion
7301 is neccesary in GIMPLE, simplifying it first.
7302 Returns the built expression value and appends
7303 statements possibly defining it to SEQ. */
7305 tree
7306 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7308 if (ptrofftype_p (TREE_TYPE (op)))
7309 return op;
7310 return gimple_convert (seq, loc, sizetype, op);
7313 /* Build a vector of type TYPE in which each element has the value OP.
7314 Return a gimple value for the result, appending any new statements
7315 to SEQ. */
7317 tree
7318 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7319 tree op)
7321 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7322 && !CONSTANT_CLASS_P (op))
7323 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7325 tree res, vec = build_vector_from_val (type, op);
7326 if (is_gimple_val (vec))
7327 return vec;
7328 if (gimple_in_ssa_p (cfun))
7329 res = make_ssa_name (type);
7330 else
7331 res = create_tmp_reg (type);
7332 gimple *stmt = gimple_build_assign (res, vec);
7333 gimple_set_location (stmt, loc);
7334 gimple_seq_add_stmt_without_update (seq, stmt);
7335 return res;
7338 /* Build a vector from BUILDER, handling the case in which some elements
7339 are non-constant. Return a gimple value for the result, appending any
7340 new instructions to SEQ.
7342 BUILDER must not have a stepped encoding on entry. This is because
7343 the function is not geared up to handle the arithmetic that would
7344 be needed in the variable case, and any code building a vector that
7345 is known to be constant should use BUILDER->build () directly. */
7347 tree
7348 gimple_build_vector (gimple_seq *seq, location_t loc,
7349 tree_vector_builder *builder)
7351 gcc_assert (builder->nelts_per_pattern () <= 2);
7352 unsigned int encoded_nelts = builder->encoded_nelts ();
7353 for (unsigned int i = 0; i < encoded_nelts; ++i)
7354 if (!TREE_CONSTANT ((*builder)[i]))
7356 tree type = builder->type ();
7357 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7358 vec<constructor_elt, va_gc> *v;
7359 vec_alloc (v, nelts);
7360 for (i = 0; i < nelts; ++i)
7361 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7363 tree res;
7364 if (gimple_in_ssa_p (cfun))
7365 res = make_ssa_name (type);
7366 else
7367 res = create_tmp_reg (type);
7368 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7369 gimple_set_location (stmt, loc);
7370 gimple_seq_add_stmt_without_update (seq, stmt);
7371 return res;
7373 return builder->build ();
7376 /* Return true if the result of assignment STMT is known to be non-negative.
7377 If the return value is based on the assumption that signed overflow is
7378 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7379 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7381 static bool
7382 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7383 int depth)
7385 enum tree_code code = gimple_assign_rhs_code (stmt);
7386 switch (get_gimple_rhs_class (code))
7388 case GIMPLE_UNARY_RHS:
7389 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7390 gimple_expr_type (stmt),
7391 gimple_assign_rhs1 (stmt),
7392 strict_overflow_p, depth);
7393 case GIMPLE_BINARY_RHS:
7394 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7395 gimple_expr_type (stmt),
7396 gimple_assign_rhs1 (stmt),
7397 gimple_assign_rhs2 (stmt),
7398 strict_overflow_p, depth);
7399 case GIMPLE_TERNARY_RHS:
7400 return false;
7401 case GIMPLE_SINGLE_RHS:
7402 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7403 strict_overflow_p, depth);
7404 case GIMPLE_INVALID_RHS:
7405 break;
7407 gcc_unreachable ();
7410 /* Return true if return value of call STMT is known to be non-negative.
7411 If the return value is based on the assumption that signed overflow is
7412 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7413 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7415 static bool
7416 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7417 int depth)
7419 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7420 gimple_call_arg (stmt, 0) : NULL_TREE;
7421 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7422 gimple_call_arg (stmt, 1) : NULL_TREE;
7424 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7425 gimple_call_combined_fn (stmt),
7426 arg0,
7427 arg1,
7428 strict_overflow_p, depth);
7431 /* Return true if return value of call STMT is known to be non-negative.
7432 If the return value is based on the assumption that signed overflow is
7433 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7434 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7436 static bool
7437 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7438 int depth)
7440 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7442 tree arg = gimple_phi_arg_def (stmt, i);
7443 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7444 return false;
7446 return true;
7449 /* Return true if STMT is known to compute a non-negative value.
7450 If the return value is based on the assumption that signed overflow is
7451 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7452 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7454 bool
7455 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7456 int depth)
7458 switch (gimple_code (stmt))
7460 case GIMPLE_ASSIGN:
7461 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7462 depth);
7463 case GIMPLE_CALL:
7464 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7465 depth);
7466 case GIMPLE_PHI:
7467 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7468 depth);
7469 default:
7470 return false;
7474 /* Return true if the floating-point value computed by assignment STMT
7475 is known to have an integer value. We also allow +Inf, -Inf and NaN
7476 to be considered integer values. Return false for signaling NaN.
7478 DEPTH is the current nesting depth of the query. */
7480 static bool
7481 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7483 enum tree_code code = gimple_assign_rhs_code (stmt);
7484 switch (get_gimple_rhs_class (code))
7486 case GIMPLE_UNARY_RHS:
7487 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7488 gimple_assign_rhs1 (stmt), depth);
7489 case GIMPLE_BINARY_RHS:
7490 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7491 gimple_assign_rhs1 (stmt),
7492 gimple_assign_rhs2 (stmt), depth);
7493 case GIMPLE_TERNARY_RHS:
7494 return false;
7495 case GIMPLE_SINGLE_RHS:
7496 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7497 case GIMPLE_INVALID_RHS:
7498 break;
7500 gcc_unreachable ();
7503 /* Return true if the floating-point value computed by call STMT is known
7504 to have an integer value. We also allow +Inf, -Inf and NaN to be
7505 considered integer values. Return false for signaling NaN.
7507 DEPTH is the current nesting depth of the query. */
7509 static bool
7510 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7512 tree arg0 = (gimple_call_num_args (stmt) > 0
7513 ? gimple_call_arg (stmt, 0)
7514 : NULL_TREE);
7515 tree arg1 = (gimple_call_num_args (stmt) > 1
7516 ? gimple_call_arg (stmt, 1)
7517 : NULL_TREE);
7518 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7519 arg0, arg1, depth);
7522 /* Return true if the floating-point result of phi STMT is known to have
7523 an integer value. We also allow +Inf, -Inf and NaN to be considered
7524 integer values. Return false for signaling NaN.
7526 DEPTH is the current nesting depth of the query. */
7528 static bool
7529 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7531 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7533 tree arg = gimple_phi_arg_def (stmt, i);
7534 if (!integer_valued_real_single_p (arg, depth + 1))
7535 return false;
7537 return true;
7540 /* Return true if the floating-point value computed by STMT is known
7541 to have an integer value. We also allow +Inf, -Inf and NaN to be
7542 considered integer values. Return false for signaling NaN.
7544 DEPTH is the current nesting depth of the query. */
7546 bool
7547 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7549 switch (gimple_code (stmt))
7551 case GIMPLE_ASSIGN:
7552 return gimple_assign_integer_valued_real_p (stmt, depth);
7553 case GIMPLE_CALL:
7554 return gimple_call_integer_valued_real_p (stmt, depth);
7555 case GIMPLE_PHI:
7556 return gimple_phi_integer_valued_real_p (stmt, depth);
7557 default:
7558 return false;