PR testsuite/85483: Move aarch64/sve/vcond_1.c test to g++.dg/other/
[official-gcc.git] / gcc / gimple-fold.c
blob7771988b83782d0bdcf64d40fe3a68a9678591d5
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "ipa-chkp.h"
59 #include "tree-cfg.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "asan.h"
64 #include "diagnostic-core.h"
65 #include "intl.h"
66 #include "calls.h"
67 #include "tree-vector-builder.h"
68 #include "tree-ssa-strlen.h"
70 /* Return true when DECL can be referenced from current unit.
71 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
72 We can get declarations that are not possible to reference for various
73 reasons:
75 1) When analyzing C++ virtual tables.
76 C++ virtual tables do have known constructors even
77 when they are keyed to other compilation unit.
78 Those tables can contain pointers to methods and vars
79 in other units. Those methods have both STATIC and EXTERNAL
80 set.
81 2) In WHOPR mode devirtualization might lead to reference
82 to method that was partitioned elsehwere.
83 In this case we have static VAR_DECL or FUNCTION_DECL
84 that has no corresponding callgraph/varpool node
85 declaring the body.
86 3) COMDAT functions referred by external vtables that
87 we devirtualize only during final compilation stage.
88 At this time we already decided that we will not output
89 the function body and thus we can't reference the symbol
90 directly. */
92 static bool
93 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
95 varpool_node *vnode;
96 struct cgraph_node *node;
97 symtab_node *snode;
99 if (DECL_ABSTRACT_P (decl))
100 return false;
102 /* We are concerned only about static/external vars and functions. */
103 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
104 || !VAR_OR_FUNCTION_DECL_P (decl))
105 return true;
107 /* Static objects can be referred only if they was not optimized out yet. */
108 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
110 /* Before we start optimizing unreachable code we can be sure all
111 static objects are defined. */
112 if (symtab->function_flags_ready)
113 return true;
114 snode = symtab_node::get (decl);
115 if (!snode || !snode->definition)
116 return false;
117 node = dyn_cast <cgraph_node *> (snode);
118 return !node || !node->global.inlined_to;
121 /* We will later output the initializer, so we can refer to it.
122 So we are concerned only when DECL comes from initializer of
123 external var or var that has been optimized out. */
124 if (!from_decl
125 || !VAR_P (from_decl)
126 || (!DECL_EXTERNAL (from_decl)
127 && (vnode = varpool_node::get (from_decl)) != NULL
128 && vnode->definition)
129 || (flag_ltrans
130 && (vnode = varpool_node::get (from_decl)) != NULL
131 && vnode->in_other_partition))
132 return true;
133 /* We are folding reference from external vtable. The vtable may reffer
134 to a symbol keyed to other compilation unit. The other compilation
135 unit may be in separate DSO and the symbol may be hidden. */
136 if (DECL_VISIBILITY_SPECIFIED (decl)
137 && DECL_EXTERNAL (decl)
138 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
139 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
140 return false;
141 /* When function is public, we always can introduce new reference.
142 Exception are the COMDAT functions where introducing a direct
143 reference imply need to include function body in the curren tunit. */
144 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
145 return true;
146 /* We have COMDAT. We are going to check if we still have definition
147 or if the definition is going to be output in other partition.
148 Bypass this when gimplifying; all needed functions will be produced.
150 As observed in PR20991 for already optimized out comdat virtual functions
151 it may be tempting to not necessarily give up because the copy will be
152 output elsewhere when corresponding vtable is output.
153 This is however not possible - ABI specify that COMDATs are output in
154 units where they are used and when the other unit was compiled with LTO
155 it is possible that vtable was kept public while the function itself
156 was privatized. */
157 if (!symtab->function_flags_ready)
158 return true;
160 snode = symtab_node::get (decl);
161 if (!snode
162 || ((!snode->definition || DECL_EXTERNAL (decl))
163 && (!snode->in_other_partition
164 || (!snode->forced_by_abi && !snode->force_output))))
165 return false;
166 node = dyn_cast <cgraph_node *> (snode);
167 return !node || !node->global.inlined_to;
170 /* Create a temporary for TYPE for a statement STMT. If the current function
171 is in SSA form, a SSA name is created. Otherwise a temporary register
172 is made. */
174 tree
175 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
177 if (gimple_in_ssa_p (cfun))
178 return make_ssa_name (type, stmt);
179 else
180 return create_tmp_reg (type);
183 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
184 acceptable form for is_gimple_min_invariant.
185 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
187 tree
188 canonicalize_constructor_val (tree cval, tree from_decl)
190 tree orig_cval = cval;
191 STRIP_NOPS (cval);
192 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
193 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
195 tree ptr = TREE_OPERAND (cval, 0);
196 if (is_gimple_min_invariant (ptr))
197 cval = build1_loc (EXPR_LOCATION (cval),
198 ADDR_EXPR, TREE_TYPE (ptr),
199 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
200 ptr,
201 fold_convert (ptr_type_node,
202 TREE_OPERAND (cval, 1))));
204 if (TREE_CODE (cval) == ADDR_EXPR)
206 tree base = NULL_TREE;
207 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
209 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
210 if (base)
211 TREE_OPERAND (cval, 0) = base;
213 else
214 base = get_base_address (TREE_OPERAND (cval, 0));
215 if (!base)
216 return NULL_TREE;
218 if (VAR_OR_FUNCTION_DECL_P (base)
219 && !can_refer_decl_in_current_unit_p (base, from_decl))
220 return NULL_TREE;
221 if (TREE_TYPE (base) == error_mark_node)
222 return NULL_TREE;
223 if (VAR_P (base))
224 TREE_ADDRESSABLE (base) = 1;
225 else if (TREE_CODE (base) == FUNCTION_DECL)
227 /* Make sure we create a cgraph node for functions we'll reference.
228 They can be non-existent if the reference comes from an entry
229 of an external vtable for example. */
230 cgraph_node::get_create (base);
232 /* Fixup types in global initializers. */
233 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
234 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
236 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
237 cval = fold_convert (TREE_TYPE (orig_cval), cval);
238 return cval;
240 if (TREE_OVERFLOW_P (cval))
241 return drop_tree_overflow (cval);
242 return orig_cval;
245 /* If SYM is a constant variable with known value, return the value.
246 NULL_TREE is returned otherwise. */
248 tree
249 get_symbol_constant_value (tree sym)
251 tree val = ctor_for_folding (sym);
252 if (val != error_mark_node)
254 if (val)
256 val = canonicalize_constructor_val (unshare_expr (val), sym);
257 if (val && is_gimple_min_invariant (val))
258 return val;
259 else
260 return NULL_TREE;
262 /* Variables declared 'const' without an initializer
263 have zero as the initializer if they may not be
264 overridden at link or run time. */
265 if (!val
266 && is_gimple_reg_type (TREE_TYPE (sym)))
267 return build_zero_cst (TREE_TYPE (sym));
270 return NULL_TREE;
275 /* Subroutine of fold_stmt. We perform several simplifications of the
276 memory reference tree EXPR and make sure to re-gimplify them properly
277 after propagation of constant addresses. IS_LHS is true if the
278 reference is supposed to be an lvalue. */
280 static tree
281 maybe_fold_reference (tree expr, bool is_lhs)
283 tree result;
285 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
286 || TREE_CODE (expr) == REALPART_EXPR
287 || TREE_CODE (expr) == IMAGPART_EXPR)
288 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
289 return fold_unary_loc (EXPR_LOCATION (expr),
290 TREE_CODE (expr),
291 TREE_TYPE (expr),
292 TREE_OPERAND (expr, 0));
293 else if (TREE_CODE (expr) == BIT_FIELD_REF
294 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
295 return fold_ternary_loc (EXPR_LOCATION (expr),
296 TREE_CODE (expr),
297 TREE_TYPE (expr),
298 TREE_OPERAND (expr, 0),
299 TREE_OPERAND (expr, 1),
300 TREE_OPERAND (expr, 2));
302 if (!is_lhs
303 && (result = fold_const_aggregate_ref (expr))
304 && is_gimple_min_invariant (result))
305 return result;
307 return NULL_TREE;
311 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
312 replacement rhs for the statement or NULL_TREE if no simplification
313 could be made. It is assumed that the operands have been previously
314 folded. */
316 static tree
317 fold_gimple_assign (gimple_stmt_iterator *si)
319 gimple *stmt = gsi_stmt (*si);
320 enum tree_code subcode = gimple_assign_rhs_code (stmt);
321 location_t loc = gimple_location (stmt);
323 tree result = NULL_TREE;
325 switch (get_gimple_rhs_class (subcode))
327 case GIMPLE_SINGLE_RHS:
329 tree rhs = gimple_assign_rhs1 (stmt);
331 if (TREE_CLOBBER_P (rhs))
332 return NULL_TREE;
334 if (REFERENCE_CLASS_P (rhs))
335 return maybe_fold_reference (rhs, false);
337 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
339 tree val = OBJ_TYPE_REF_EXPR (rhs);
340 if (is_gimple_min_invariant (val))
341 return val;
342 else if (flag_devirtualize && virtual_method_call_p (rhs))
344 bool final;
345 vec <cgraph_node *>targets
346 = possible_polymorphic_call_targets (rhs, stmt, &final);
347 if (final && targets.length () <= 1 && dbg_cnt (devirt))
349 if (dump_enabled_p ())
351 location_t loc = gimple_location_safe (stmt);
352 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
353 "resolving virtual function address "
354 "reference to function %s\n",
355 targets.length () == 1
356 ? targets[0]->name ()
357 : "NULL");
359 if (targets.length () == 1)
361 val = fold_convert (TREE_TYPE (val),
362 build_fold_addr_expr_loc
363 (loc, targets[0]->decl));
364 STRIP_USELESS_TYPE_CONVERSION (val);
366 else
367 /* We can not use __builtin_unreachable here because it
368 can not have address taken. */
369 val = build_int_cst (TREE_TYPE (val), 0);
370 return val;
375 else if (TREE_CODE (rhs) == ADDR_EXPR)
377 tree ref = TREE_OPERAND (rhs, 0);
378 tree tem = maybe_fold_reference (ref, true);
379 if (tem
380 && TREE_CODE (tem) == MEM_REF
381 && integer_zerop (TREE_OPERAND (tem, 1)))
382 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
383 else if (tem)
384 result = fold_convert (TREE_TYPE (rhs),
385 build_fold_addr_expr_loc (loc, tem));
386 else if (TREE_CODE (ref) == MEM_REF
387 && integer_zerop (TREE_OPERAND (ref, 1)))
388 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
390 if (result)
392 /* Strip away useless type conversions. Both the
393 NON_LVALUE_EXPR that may have been added by fold, and
394 "useless" type conversions that might now be apparent
395 due to propagation. */
396 STRIP_USELESS_TYPE_CONVERSION (result);
398 if (result != rhs && valid_gimple_rhs_p (result))
399 return result;
403 else if (TREE_CODE (rhs) == CONSTRUCTOR
404 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
406 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
407 unsigned i;
408 tree val;
410 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
411 if (! CONSTANT_CLASS_P (val))
412 return NULL_TREE;
414 return build_vector_from_ctor (TREE_TYPE (rhs),
415 CONSTRUCTOR_ELTS (rhs));
418 else if (DECL_P (rhs))
419 return get_symbol_constant_value (rhs);
421 break;
423 case GIMPLE_UNARY_RHS:
424 break;
426 case GIMPLE_BINARY_RHS:
427 break;
429 case GIMPLE_TERNARY_RHS:
430 result = fold_ternary_loc (loc, subcode,
431 TREE_TYPE (gimple_assign_lhs (stmt)),
432 gimple_assign_rhs1 (stmt),
433 gimple_assign_rhs2 (stmt),
434 gimple_assign_rhs3 (stmt));
436 if (result)
438 STRIP_USELESS_TYPE_CONVERSION (result);
439 if (valid_gimple_rhs_p (result))
440 return result;
442 break;
444 case GIMPLE_INVALID_RHS:
445 gcc_unreachable ();
448 return NULL_TREE;
452 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
453 adjusting the replacement stmts location and virtual operands.
454 If the statement has a lhs the last stmt in the sequence is expected
455 to assign to that lhs. */
457 static void
458 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
460 gimple *stmt = gsi_stmt (*si_p);
462 if (gimple_has_location (stmt))
463 annotate_all_with_location (stmts, gimple_location (stmt));
465 /* First iterate over the replacement statements backward, assigning
466 virtual operands to their defining statements. */
467 gimple *laststore = NULL;
468 for (gimple_stmt_iterator i = gsi_last (stmts);
469 !gsi_end_p (i); gsi_prev (&i))
471 gimple *new_stmt = gsi_stmt (i);
472 if ((gimple_assign_single_p (new_stmt)
473 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
474 || (is_gimple_call (new_stmt)
475 && (gimple_call_flags (new_stmt)
476 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
478 tree vdef;
479 if (!laststore)
480 vdef = gimple_vdef (stmt);
481 else
482 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
483 gimple_set_vdef (new_stmt, vdef);
484 if (vdef && TREE_CODE (vdef) == SSA_NAME)
485 SSA_NAME_DEF_STMT (vdef) = new_stmt;
486 laststore = new_stmt;
490 /* Second iterate over the statements forward, assigning virtual
491 operands to their uses. */
492 tree reaching_vuse = gimple_vuse (stmt);
493 for (gimple_stmt_iterator i = gsi_start (stmts);
494 !gsi_end_p (i); gsi_next (&i))
496 gimple *new_stmt = gsi_stmt (i);
497 /* If the new statement possibly has a VUSE, update it with exact SSA
498 name we know will reach this one. */
499 if (gimple_has_mem_ops (new_stmt))
500 gimple_set_vuse (new_stmt, reaching_vuse);
501 gimple_set_modified (new_stmt, true);
502 if (gimple_vdef (new_stmt))
503 reaching_vuse = gimple_vdef (new_stmt);
506 /* If the new sequence does not do a store release the virtual
507 definition of the original statement. */
508 if (reaching_vuse
509 && reaching_vuse == gimple_vuse (stmt))
511 tree vdef = gimple_vdef (stmt);
512 if (vdef
513 && TREE_CODE (vdef) == SSA_NAME)
515 unlink_stmt_vdef (stmt);
516 release_ssa_name (vdef);
520 /* Finally replace the original statement with the sequence. */
521 gsi_replace_with_seq (si_p, stmts, false);
524 /* Convert EXPR into a GIMPLE value suitable for substitution on the
525 RHS of an assignment. Insert the necessary statements before
526 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
527 is replaced. If the call is expected to produces a result, then it
528 is replaced by an assignment of the new RHS to the result variable.
529 If the result is to be ignored, then the call is replaced by a
530 GIMPLE_NOP. A proper VDEF chain is retained by making the first
531 VUSE and the last VDEF of the whole sequence be the same as the replaced
532 statement and using new SSA names for stores in between. */
534 void
535 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
537 tree lhs;
538 gimple *stmt, *new_stmt;
539 gimple_stmt_iterator i;
540 gimple_seq stmts = NULL;
542 stmt = gsi_stmt (*si_p);
544 gcc_assert (is_gimple_call (stmt));
546 push_gimplify_context (gimple_in_ssa_p (cfun));
548 lhs = gimple_call_lhs (stmt);
549 if (lhs == NULL_TREE)
551 gimplify_and_add (expr, &stmts);
552 /* We can end up with folding a memcpy of an empty class assignment
553 which gets optimized away by C++ gimplification. */
554 if (gimple_seq_empty_p (stmts))
556 pop_gimplify_context (NULL);
557 if (gimple_in_ssa_p (cfun))
559 unlink_stmt_vdef (stmt);
560 release_defs (stmt);
562 gsi_replace (si_p, gimple_build_nop (), false);
563 return;
566 else
568 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
569 new_stmt = gimple_build_assign (lhs, tmp);
570 i = gsi_last (stmts);
571 gsi_insert_after_without_update (&i, new_stmt,
572 GSI_CONTINUE_LINKING);
575 pop_gimplify_context (NULL);
577 gsi_replace_with_seq_vops (si_p, stmts);
581 /* Replace the call at *GSI with the gimple value VAL. */
583 void
584 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
586 gimple *stmt = gsi_stmt (*gsi);
587 tree lhs = gimple_call_lhs (stmt);
588 gimple *repl;
589 if (lhs)
591 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
592 val = fold_convert (TREE_TYPE (lhs), val);
593 repl = gimple_build_assign (lhs, val);
595 else
596 repl = gimple_build_nop ();
597 tree vdef = gimple_vdef (stmt);
598 if (vdef && TREE_CODE (vdef) == SSA_NAME)
600 unlink_stmt_vdef (stmt);
601 release_ssa_name (vdef);
603 gsi_replace (gsi, repl, false);
606 /* Replace the call at *GSI with the new call REPL and fold that
607 again. */
609 static void
610 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
612 gimple *stmt = gsi_stmt (*gsi);
613 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
614 gimple_set_location (repl, gimple_location (stmt));
615 if (gimple_vdef (stmt)
616 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
618 gimple_set_vdef (repl, gimple_vdef (stmt));
619 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
621 if (gimple_vuse (stmt))
622 gimple_set_vuse (repl, gimple_vuse (stmt));
623 gsi_replace (gsi, repl, false);
624 fold_stmt (gsi);
627 /* Return true if VAR is a VAR_DECL or a component thereof. */
629 static bool
630 var_decl_component_p (tree var)
632 tree inner = var;
633 while (handled_component_p (inner))
634 inner = TREE_OPERAND (inner, 0);
635 return SSA_VAR_P (inner);
638 /* If the SIZE argument representing the size of an object is in a range
639 of values of which exactly one is valid (and that is zero), return
640 true, otherwise false. */
642 static bool
643 size_must_be_zero_p (tree size)
645 if (integer_zerop (size))
646 return true;
648 if (TREE_CODE (size) != SSA_NAME)
649 return false;
651 wide_int min, max;
652 enum value_range_type rtype = get_range_info (size, &min, &max);
653 if (rtype != VR_ANTI_RANGE)
654 return false;
656 tree type = TREE_TYPE (size);
657 int prec = TYPE_PRECISION (type);
659 wide_int wone = wi::one (prec);
661 /* Compute the value of SSIZE_MAX, the largest positive value that
662 can be stored in ssize_t, the signed counterpart of size_t. */
663 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
665 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
668 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
669 diagnose (otherwise undefined) overlapping copies without preventing
670 folding. When folded, GCC guarantees that overlapping memcpy has
671 the same semantics as memmove. Call to the library memcpy need not
672 provide the same guarantee. Return false if no simplification can
673 be made. */
675 static bool
676 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
677 tree dest, tree src, int endp)
679 gimple *stmt = gsi_stmt (*gsi);
680 tree lhs = gimple_call_lhs (stmt);
681 tree len = gimple_call_arg (stmt, 2);
682 tree destvar, srcvar;
683 location_t loc = gimple_location (stmt);
685 bool nowarn = gimple_no_warning_p (stmt);
687 /* If the LEN parameter is a constant zero or in range where
688 the only valid value is zero, return DEST. */
689 if (size_must_be_zero_p (len))
691 gimple *repl;
692 if (gimple_call_lhs (stmt))
693 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
694 else
695 repl = gimple_build_nop ();
696 tree vdef = gimple_vdef (stmt);
697 if (vdef && TREE_CODE (vdef) == SSA_NAME)
699 unlink_stmt_vdef (stmt);
700 release_ssa_name (vdef);
702 gsi_replace (gsi, repl, false);
703 return true;
706 /* If SRC and DEST are the same (and not volatile), return
707 DEST{,+LEN,+LEN-1}. */
708 if (operand_equal_p (src, dest, 0))
710 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
711 It's safe and may even be emitted by GCC itself (see bug
712 32667). */
713 unlink_stmt_vdef (stmt);
714 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
715 release_ssa_name (gimple_vdef (stmt));
716 if (!lhs)
718 gsi_replace (gsi, gimple_build_nop (), false);
719 return true;
721 goto done;
723 else
725 tree srctype, desttype;
726 unsigned int src_align, dest_align;
727 tree off0;
729 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
730 pointers as wide integer) and also may result in huge function
731 size because of inlined bounds copy. Thus don't inline for
732 functions we want to instrument. */
733 if (flag_check_pointer_bounds
734 && chkp_instrumentable_p (cfun->decl)
735 /* Even if data may contain pointers we can inline if copy
736 less than a pointer size. */
737 && (!tree_fits_uhwi_p (len)
738 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
739 return false;
741 /* Build accesses at offset zero with a ref-all character type. */
742 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
743 ptr_mode, true), 0);
745 /* If we can perform the copy efficiently with first doing all loads
746 and then all stores inline it that way. Currently efficiently
747 means that we can load all the memory into a single integer
748 register which is what MOVE_MAX gives us. */
749 src_align = get_pointer_alignment (src);
750 dest_align = get_pointer_alignment (dest);
751 if (tree_fits_uhwi_p (len)
752 && compare_tree_int (len, MOVE_MAX) <= 0
753 /* ??? Don't transform copies from strings with known length this
754 confuses the tree-ssa-strlen.c. This doesn't handle
755 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
756 reason. */
757 && !c_strlen (src, 2))
759 unsigned ilen = tree_to_uhwi (len);
760 if (pow2p_hwi (ilen))
762 /* Detect invalid bounds and overlapping copies and issue
763 either -Warray-bounds or -Wrestrict. */
764 if (!nowarn
765 && check_bounds_or_overlap (as_a <gcall *>(stmt),
766 dest, src, len, len))
767 gimple_set_no_warning (stmt, true);
769 scalar_int_mode mode;
770 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
771 if (type
772 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
773 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
774 /* If the destination pointer is not aligned we must be able
775 to emit an unaligned store. */
776 && (dest_align >= GET_MODE_ALIGNMENT (mode)
777 || !targetm.slow_unaligned_access (mode, dest_align)
778 || (optab_handler (movmisalign_optab, mode)
779 != CODE_FOR_nothing)))
781 tree srctype = type;
782 tree desttype = type;
783 if (src_align < GET_MODE_ALIGNMENT (mode))
784 srctype = build_aligned_type (type, src_align);
785 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
786 tree tem = fold_const_aggregate_ref (srcmem);
787 if (tem)
788 srcmem = tem;
789 else if (src_align < GET_MODE_ALIGNMENT (mode)
790 && targetm.slow_unaligned_access (mode, src_align)
791 && (optab_handler (movmisalign_optab, mode)
792 == CODE_FOR_nothing))
793 srcmem = NULL_TREE;
794 if (srcmem)
796 gimple *new_stmt;
797 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
799 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
800 srcmem
801 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
802 new_stmt);
803 gimple_assign_set_lhs (new_stmt, srcmem);
804 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
805 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
807 if (dest_align < GET_MODE_ALIGNMENT (mode))
808 desttype = build_aligned_type (type, dest_align);
809 new_stmt
810 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
811 dest, off0),
812 srcmem);
813 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
814 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
815 if (gimple_vdef (new_stmt)
816 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
817 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
818 if (!lhs)
820 gsi_replace (gsi, new_stmt, false);
821 return true;
823 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
824 goto done;
830 if (endp == 3)
832 /* Both DEST and SRC must be pointer types.
833 ??? This is what old code did. Is the testing for pointer types
834 really mandatory?
836 If either SRC is readonly or length is 1, we can use memcpy. */
837 if (!dest_align || !src_align)
838 return false;
839 if (readonly_data_expr (src)
840 || (tree_fits_uhwi_p (len)
841 && (MIN (src_align, dest_align) / BITS_PER_UNIT
842 >= tree_to_uhwi (len))))
844 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
845 if (!fn)
846 return false;
847 gimple_call_set_fndecl (stmt, fn);
848 gimple_call_set_arg (stmt, 0, dest);
849 gimple_call_set_arg (stmt, 1, src);
850 fold_stmt (gsi);
851 return true;
854 /* If *src and *dest can't overlap, optimize into memcpy as well. */
855 if (TREE_CODE (src) == ADDR_EXPR
856 && TREE_CODE (dest) == ADDR_EXPR)
858 tree src_base, dest_base, fn;
859 poly_int64 src_offset = 0, dest_offset = 0;
860 poly_uint64 maxsize;
862 srcvar = TREE_OPERAND (src, 0);
863 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
864 if (src_base == NULL)
865 src_base = srcvar;
866 destvar = TREE_OPERAND (dest, 0);
867 dest_base = get_addr_base_and_unit_offset (destvar,
868 &dest_offset);
869 if (dest_base == NULL)
870 dest_base = destvar;
871 if (!poly_int_tree_p (len, &maxsize))
872 maxsize = -1;
873 if (SSA_VAR_P (src_base)
874 && SSA_VAR_P (dest_base))
876 if (operand_equal_p (src_base, dest_base, 0)
877 && ranges_maybe_overlap_p (src_offset, maxsize,
878 dest_offset, maxsize))
879 return false;
881 else if (TREE_CODE (src_base) == MEM_REF
882 && TREE_CODE (dest_base) == MEM_REF)
884 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
885 TREE_OPERAND (dest_base, 0), 0))
886 return false;
887 poly_offset_int full_src_offset
888 = mem_ref_offset (src_base) + src_offset;
889 poly_offset_int full_dest_offset
890 = mem_ref_offset (dest_base) + dest_offset;
891 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
892 full_dest_offset, maxsize))
893 return false;
895 else
896 return false;
898 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
899 if (!fn)
900 return false;
901 gimple_call_set_fndecl (stmt, fn);
902 gimple_call_set_arg (stmt, 0, dest);
903 gimple_call_set_arg (stmt, 1, src);
904 fold_stmt (gsi);
905 return true;
908 /* If the destination and source do not alias optimize into
909 memcpy as well. */
910 if ((is_gimple_min_invariant (dest)
911 || TREE_CODE (dest) == SSA_NAME)
912 && (is_gimple_min_invariant (src)
913 || TREE_CODE (src) == SSA_NAME))
915 ao_ref destr, srcr;
916 ao_ref_init_from_ptr_and_size (&destr, dest, len);
917 ao_ref_init_from_ptr_and_size (&srcr, src, len);
918 if (!refs_may_alias_p_1 (&destr, &srcr, false))
920 tree fn;
921 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
922 if (!fn)
923 return false;
924 gimple_call_set_fndecl (stmt, fn);
925 gimple_call_set_arg (stmt, 0, dest);
926 gimple_call_set_arg (stmt, 1, src);
927 fold_stmt (gsi);
928 return true;
932 return false;
935 if (!tree_fits_shwi_p (len))
936 return false;
937 if (!POINTER_TYPE_P (TREE_TYPE (src))
938 || !POINTER_TYPE_P (TREE_TYPE (dest)))
939 return false;
940 /* In the following try to find a type that is most natural to be
941 used for the memcpy source and destination and that allows
942 the most optimization when memcpy is turned into a plain assignment
943 using that type. In theory we could always use a char[len] type
944 but that only gains us that the destination and source possibly
945 no longer will have their address taken. */
946 srctype = TREE_TYPE (TREE_TYPE (src));
947 if (TREE_CODE (srctype) == ARRAY_TYPE
948 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
949 srctype = TREE_TYPE (srctype);
950 desttype = TREE_TYPE (TREE_TYPE (dest));
951 if (TREE_CODE (desttype) == ARRAY_TYPE
952 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
953 desttype = TREE_TYPE (desttype);
954 if (TREE_ADDRESSABLE (srctype)
955 || TREE_ADDRESSABLE (desttype))
956 return false;
958 /* Make sure we are not copying using a floating-point mode or
959 a type whose size possibly does not match its precision. */
960 if (FLOAT_MODE_P (TYPE_MODE (desttype))
961 || TREE_CODE (desttype) == BOOLEAN_TYPE
962 || TREE_CODE (desttype) == ENUMERAL_TYPE)
963 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
964 if (FLOAT_MODE_P (TYPE_MODE (srctype))
965 || TREE_CODE (srctype) == BOOLEAN_TYPE
966 || TREE_CODE (srctype) == ENUMERAL_TYPE)
967 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
968 if (!srctype)
969 srctype = desttype;
970 if (!desttype)
971 desttype = srctype;
972 if (!srctype)
973 return false;
975 src_align = get_pointer_alignment (src);
976 dest_align = get_pointer_alignment (dest);
977 if (dest_align < TYPE_ALIGN (desttype)
978 || src_align < TYPE_ALIGN (srctype))
979 return false;
981 destvar = NULL_TREE;
982 if (TREE_CODE (dest) == ADDR_EXPR
983 && var_decl_component_p (TREE_OPERAND (dest, 0))
984 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
985 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
987 srcvar = NULL_TREE;
988 if (TREE_CODE (src) == ADDR_EXPR
989 && var_decl_component_p (TREE_OPERAND (src, 0))
990 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
992 if (!destvar
993 || src_align >= TYPE_ALIGN (desttype))
994 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
995 src, off0);
996 else if (!STRICT_ALIGNMENT)
998 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
999 src_align);
1000 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1004 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1005 return false;
1007 if (srcvar == NULL_TREE)
1009 if (src_align >= TYPE_ALIGN (desttype))
1010 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1011 else
1013 if (STRICT_ALIGNMENT)
1014 return false;
1015 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1016 src_align);
1017 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1020 else if (destvar == NULL_TREE)
1022 if (dest_align >= TYPE_ALIGN (srctype))
1023 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1024 else
1026 if (STRICT_ALIGNMENT)
1027 return false;
1028 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1029 dest_align);
1030 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1034 /* Detect invalid bounds and overlapping copies and issue either
1035 -Warray-bounds or -Wrestrict. */
1036 if (!nowarn)
1037 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1039 gimple *new_stmt;
1040 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1042 tree tem = fold_const_aggregate_ref (srcvar);
1043 if (tem)
1044 srcvar = tem;
1045 if (! is_gimple_min_invariant (srcvar))
1047 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1048 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1049 new_stmt);
1050 gimple_assign_set_lhs (new_stmt, srcvar);
1051 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1052 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1054 new_stmt = gimple_build_assign (destvar, srcvar);
1055 goto set_vop_and_replace;
1058 /* We get an aggregate copy. Use an unsigned char[] type to
1059 perform the copying to preserve padding and to avoid any issues
1060 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1061 desttype = build_array_type_nelts (unsigned_char_type_node,
1062 tree_to_uhwi (len));
1063 srctype = desttype;
1064 if (src_align > TYPE_ALIGN (srctype))
1065 srctype = build_aligned_type (srctype, src_align);
1066 if (dest_align > TYPE_ALIGN (desttype))
1067 desttype = build_aligned_type (desttype, dest_align);
1068 new_stmt
1069 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1070 fold_build2 (MEM_REF, srctype, src, off0));
1071 set_vop_and_replace:
1072 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1073 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1074 if (gimple_vdef (new_stmt)
1075 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1076 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1077 if (!lhs)
1079 gsi_replace (gsi, new_stmt, false);
1080 return true;
1082 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1085 done:
1086 gimple_seq stmts = NULL;
1087 if (endp == 0 || endp == 3)
1088 len = NULL_TREE;
1089 else if (endp == 2)
1090 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1091 ssize_int (1));
1092 if (endp == 2 || endp == 1)
1094 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1095 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1096 TREE_TYPE (dest), dest, len);
1099 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1100 gimple *repl = gimple_build_assign (lhs, dest);
1101 gsi_replace (gsi, repl, false);
1102 return true;
1105 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1106 to built-in memcmp (a, b, len). */
1108 static bool
1109 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1111 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1113 if (!fn)
1114 return false;
1116 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1118 gimple *stmt = gsi_stmt (*gsi);
1119 tree a = gimple_call_arg (stmt, 0);
1120 tree b = gimple_call_arg (stmt, 1);
1121 tree len = gimple_call_arg (stmt, 2);
1123 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1124 replace_call_with_call_and_fold (gsi, repl);
1126 return true;
1129 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1130 to built-in memmove (dest, src, len). */
1132 static bool
1133 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1135 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1137 if (!fn)
1138 return false;
1140 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1141 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1142 len) into memmove (dest, src, len). */
1144 gimple *stmt = gsi_stmt (*gsi);
1145 tree src = gimple_call_arg (stmt, 0);
1146 tree dest = gimple_call_arg (stmt, 1);
1147 tree len = gimple_call_arg (stmt, 2);
1149 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1150 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1151 replace_call_with_call_and_fold (gsi, repl);
1153 return true;
1156 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1157 to built-in memset (dest, 0, len). */
1159 static bool
1160 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1162 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1164 if (!fn)
1165 return false;
1167 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1169 gimple *stmt = gsi_stmt (*gsi);
1170 tree dest = gimple_call_arg (stmt, 0);
1171 tree len = gimple_call_arg (stmt, 1);
1173 gimple_seq seq = NULL;
1174 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1175 gimple_seq_add_stmt_without_update (&seq, repl);
1176 gsi_replace_with_seq_vops (gsi, seq);
1177 fold_stmt (gsi);
1179 return true;
1182 /* Fold function call to builtin memset or bzero at *GSI setting the
1183 memory of size LEN to VAL. Return whether a simplification was made. */
1185 static bool
1186 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1188 gimple *stmt = gsi_stmt (*gsi);
1189 tree etype;
1190 unsigned HOST_WIDE_INT length, cval;
1192 /* If the LEN parameter is zero, return DEST. */
1193 if (integer_zerop (len))
1195 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1196 return true;
1199 if (! tree_fits_uhwi_p (len))
1200 return false;
1202 if (TREE_CODE (c) != INTEGER_CST)
1203 return false;
1205 tree dest = gimple_call_arg (stmt, 0);
1206 tree var = dest;
1207 if (TREE_CODE (var) != ADDR_EXPR)
1208 return false;
1210 var = TREE_OPERAND (var, 0);
1211 if (TREE_THIS_VOLATILE (var))
1212 return false;
1214 etype = TREE_TYPE (var);
1215 if (TREE_CODE (etype) == ARRAY_TYPE)
1216 etype = TREE_TYPE (etype);
1218 if (!INTEGRAL_TYPE_P (etype)
1219 && !POINTER_TYPE_P (etype))
1220 return NULL_TREE;
1222 if (! var_decl_component_p (var))
1223 return NULL_TREE;
1225 length = tree_to_uhwi (len);
1226 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1227 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1228 return NULL_TREE;
1230 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1231 return NULL_TREE;
1233 if (integer_zerop (c))
1234 cval = 0;
1235 else
1237 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1238 return NULL_TREE;
1240 cval = TREE_INT_CST_LOW (c);
1241 cval &= 0xff;
1242 cval |= cval << 8;
1243 cval |= cval << 16;
1244 cval |= (cval << 31) << 1;
1247 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1248 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1249 gimple_set_vuse (store, gimple_vuse (stmt));
1250 tree vdef = gimple_vdef (stmt);
1251 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1253 gimple_set_vdef (store, gimple_vdef (stmt));
1254 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1256 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1257 if (gimple_call_lhs (stmt))
1259 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1260 gsi_replace (gsi, asgn, false);
1262 else
1264 gimple_stmt_iterator gsi2 = *gsi;
1265 gsi_prev (gsi);
1266 gsi_remove (&gsi2, true);
1269 return true;
1273 /* Obtain the minimum and maximum string length or minimum and maximum
1274 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1275 If ARG is an SSA name variable, follow its use-def chains. When
1276 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1277 if we are unable to determine the length or value, return false.
1278 VISITED is a bitmap of visited variables.
1279 TYPE is 0 if string length should be obtained, 1 for maximum string
1280 length and 2 for maximum value ARG can have.
1281 When FUZZY is non-zero and the length of a string cannot be determined,
1282 the function instead considers as the maximum possible length the
1283 size of a character array it may refer to. If FUZZY is 2, it will handle
1284 PHIs and COND_EXPRs optimistically, if we can determine string length
1285 minimum and maximum, it will use the minimum from the ones where it
1286 can be determined.
1287 Set *FLEXP to true if the range of the string lengths has been
1288 obtained from the upper bound of an array at the end of a struct.
1289 Such an array may hold a string that's longer than its upper bound
1290 due to it being used as a poor-man's flexible array member. */
1292 static bool
1293 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1294 int fuzzy, bool *flexp)
1296 tree var, val = NULL_TREE;
1297 gimple *def_stmt;
1299 /* The minimum and maximum length. */
1300 tree *const minlen = length;
1301 tree *const maxlen = length + 1;
1303 if (TREE_CODE (arg) != SSA_NAME)
1305 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1306 if (TREE_CODE (arg) == ADDR_EXPR
1307 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1309 tree op = TREE_OPERAND (arg, 0);
1310 if (integer_zerop (TREE_OPERAND (op, 1)))
1312 tree aop0 = TREE_OPERAND (op, 0);
1313 if (TREE_CODE (aop0) == INDIRECT_REF
1314 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1315 return get_range_strlen (TREE_OPERAND (aop0, 0),
1316 length, visited, type, fuzzy, flexp);
1318 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF && fuzzy)
1320 /* Fail if an array is the last member of a struct object
1321 since it could be treated as a (fake) flexible array
1322 member. */
1323 tree idx = TREE_OPERAND (op, 1);
1325 arg = TREE_OPERAND (op, 0);
1326 tree optype = TREE_TYPE (arg);
1327 if (tree dom = TYPE_DOMAIN (optype))
1328 if (tree bound = TYPE_MAX_VALUE (dom))
1329 if (TREE_CODE (bound) == INTEGER_CST
1330 && TREE_CODE (idx) == INTEGER_CST
1331 && tree_int_cst_lt (bound, idx))
1332 return false;
1336 if (type == 2)
1338 val = arg;
1339 if (TREE_CODE (val) != INTEGER_CST
1340 || tree_int_cst_sgn (val) < 0)
1341 return false;
1343 else
1344 val = c_strlen (arg, 1);
1346 if (!val && fuzzy)
1348 if (TREE_CODE (arg) == ADDR_EXPR)
1349 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1350 visited, type, fuzzy, flexp);
1352 if (TREE_CODE (arg) == ARRAY_REF)
1354 tree type = TREE_TYPE (TREE_OPERAND (arg, 0));
1356 /* Determine the "innermost" array type. */
1357 while (TREE_CODE (type) == ARRAY_TYPE
1358 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1359 type = TREE_TYPE (type);
1361 /* Avoid arrays of pointers. */
1362 tree eltype = TREE_TYPE (type);
1363 if (TREE_CODE (type) != ARRAY_TYPE
1364 || !INTEGRAL_TYPE_P (eltype))
1365 return false;
1367 val = TYPE_SIZE_UNIT (type);
1368 if (!val || integer_zerop (val))
1369 return false;
1371 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1372 integer_one_node);
1373 /* Set the minimum size to zero since the string in
1374 the array could have zero length. */
1375 *minlen = ssize_int (0);
1377 if (TREE_CODE (TREE_OPERAND (arg, 0)) == COMPONENT_REF
1378 && type == TREE_TYPE (TREE_OPERAND (arg, 0))
1379 && array_at_struct_end_p (TREE_OPERAND (arg, 0)))
1380 *flexp = true;
1382 else if (TREE_CODE (arg) == COMPONENT_REF
1383 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1384 == ARRAY_TYPE))
1386 /* Use the type of the member array to determine the upper
1387 bound on the length of the array. This may be overly
1388 optimistic if the array itself isn't NUL-terminated and
1389 the caller relies on the subsequent member to contain
1390 the NUL but that would only be considered valid if
1391 the array were the last member of a struct.
1392 Set *FLEXP to true if the array whose bound is being
1393 used is at the end of a struct. */
1394 if (array_at_struct_end_p (arg))
1395 *flexp = true;
1397 arg = TREE_OPERAND (arg, 1);
1399 tree type = TREE_TYPE (arg);
1401 while (TREE_CODE (type) == ARRAY_TYPE
1402 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1403 type = TREE_TYPE (type);
1405 /* Fail when the array bound is unknown or zero. */
1406 val = TYPE_SIZE_UNIT (type);
1407 if (!val || integer_zerop (val))
1408 return false;
1409 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1410 integer_one_node);
1411 /* Set the minimum size to zero since the string in
1412 the array could have zero length. */
1413 *minlen = ssize_int (0);
1416 if (VAR_P (arg))
1418 tree type = TREE_TYPE (arg);
1419 if (POINTER_TYPE_P (type))
1420 type = TREE_TYPE (type);
1422 if (TREE_CODE (type) == ARRAY_TYPE)
1424 val = TYPE_SIZE_UNIT (type);
1425 if (!val
1426 || TREE_CODE (val) != INTEGER_CST
1427 || integer_zerop (val))
1428 return false;
1429 val = wide_int_to_tree (TREE_TYPE (val),
1430 wi::sub (wi::to_wide (val), 1));
1431 /* Set the minimum size to zero since the string in
1432 the array could have zero length. */
1433 *minlen = ssize_int (0);
1438 if (!val)
1439 return false;
1441 if (!*minlen
1442 || (type > 0
1443 && TREE_CODE (*minlen) == INTEGER_CST
1444 && TREE_CODE (val) == INTEGER_CST
1445 && tree_int_cst_lt (val, *minlen)))
1446 *minlen = val;
1448 if (*maxlen)
1450 if (type > 0)
1452 if (TREE_CODE (*maxlen) != INTEGER_CST
1453 || TREE_CODE (val) != INTEGER_CST)
1454 return false;
1456 if (tree_int_cst_lt (*maxlen, val))
1457 *maxlen = val;
1458 return true;
1460 else if (simple_cst_equal (val, *maxlen) != 1)
1461 return false;
1464 *maxlen = val;
1465 return true;
1468 /* If ARG is registered for SSA update we cannot look at its defining
1469 statement. */
1470 if (name_registered_for_update_p (arg))
1471 return false;
1473 /* If we were already here, break the infinite cycle. */
1474 if (!*visited)
1475 *visited = BITMAP_ALLOC (NULL);
1476 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1477 return true;
1479 var = arg;
1480 def_stmt = SSA_NAME_DEF_STMT (var);
1482 switch (gimple_code (def_stmt))
1484 case GIMPLE_ASSIGN:
1485 /* The RHS of the statement defining VAR must either have a
1486 constant length or come from another SSA_NAME with a constant
1487 length. */
1488 if (gimple_assign_single_p (def_stmt)
1489 || gimple_assign_unary_nop_p (def_stmt))
1491 tree rhs = gimple_assign_rhs1 (def_stmt);
1492 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
1494 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1496 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1497 gimple_assign_rhs3 (def_stmt) };
1499 for (unsigned int i = 0; i < 2; i++)
1500 if (!get_range_strlen (ops[i], length, visited, type, fuzzy,
1501 flexp))
1503 if (fuzzy == 2)
1504 *maxlen = build_all_ones_cst (size_type_node);
1505 else
1506 return false;
1508 return true;
1510 return false;
1512 case GIMPLE_PHI:
1513 /* All the arguments of the PHI node must have the same constant
1514 length. */
1515 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1517 tree arg = gimple_phi_arg (def_stmt, i)->def;
1519 /* If this PHI has itself as an argument, we cannot
1520 determine the string length of this argument. However,
1521 if we can find a constant string length for the other
1522 PHI args then we can still be sure that this is a
1523 constant string length. So be optimistic and just
1524 continue with the next argument. */
1525 if (arg == gimple_phi_result (def_stmt))
1526 continue;
1528 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
1530 if (fuzzy == 2)
1531 *maxlen = build_all_ones_cst (size_type_node);
1532 else
1533 return false;
1536 return true;
1538 default:
1539 return false;
1543 /* Determine the minimum and maximum value or string length that ARG
1544 refers to and store each in the first two elements of MINMAXLEN.
1545 For expressions that point to strings of unknown lengths that are
1546 character arrays, use the upper bound of the array as the maximum
1547 length. For example, given an expression like 'x ? array : "xyz"'
1548 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1549 to 0 and MINMAXLEN[1] to 7, the longest string that could be
1550 stored in array.
1551 Return true if the range of the string lengths has been obtained
1552 from the upper bound of an array at the end of a struct. Such
1553 an array may hold a string that's longer than its upper bound
1554 due to it being used as a poor-man's flexible array member.
1556 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1557 and false if PHIs and COND_EXPRs are to be handled optimistically,
1558 if we can determine string length minimum and maximum; it will use
1559 the minimum from the ones where it can be determined.
1560 STRICT false should be only used for warning code. */
1562 bool
1563 get_range_strlen (tree arg, tree minmaxlen[2], bool strict)
1565 bitmap visited = NULL;
1567 minmaxlen[0] = NULL_TREE;
1568 minmaxlen[1] = NULL_TREE;
1570 bool flexarray = false;
1571 if (!get_range_strlen (arg, minmaxlen, &visited, 1, strict ? 1 : 2,
1572 &flexarray))
1574 minmaxlen[0] = NULL_TREE;
1575 minmaxlen[1] = NULL_TREE;
1578 if (visited)
1579 BITMAP_FREE (visited);
1581 return flexarray;
1584 tree
1585 get_maxval_strlen (tree arg, int type)
1587 bitmap visited = NULL;
1588 tree len[2] = { NULL_TREE, NULL_TREE };
1590 bool dummy;
1591 if (!get_range_strlen (arg, len, &visited, type, 0, &dummy))
1592 len[1] = NULL_TREE;
1593 if (visited)
1594 BITMAP_FREE (visited);
1596 return len[1];
1600 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1601 If LEN is not NULL, it represents the length of the string to be
1602 copied. Return NULL_TREE if no simplification can be made. */
1604 static bool
1605 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1606 tree dest, tree src)
1608 gimple *stmt = gsi_stmt (*gsi);
1609 location_t loc = gimple_location (stmt);
1610 tree fn;
1612 /* If SRC and DEST are the same (and not volatile), return DEST. */
1613 if (operand_equal_p (src, dest, 0))
1615 if (!gimple_no_warning_p (stmt))
1617 tree func = gimple_call_fndecl (stmt);
1619 warning_at (loc, OPT_Wrestrict,
1620 "%qD source argument is the same as destination",
1621 func);
1624 replace_call_with_value (gsi, dest);
1625 return true;
1628 if (optimize_function_for_size_p (cfun))
1629 return false;
1631 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1632 if (!fn)
1633 return false;
1635 tree len = get_maxval_strlen (src, 0);
1636 if (!len)
1637 return false;
1639 len = fold_convert_loc (loc, size_type_node, len);
1640 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1641 len = force_gimple_operand_gsi (gsi, len, true,
1642 NULL_TREE, true, GSI_SAME_STMT);
1643 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1644 replace_call_with_call_and_fold (gsi, repl);
1645 return true;
1648 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1649 If SLEN is not NULL, it represents the length of the source string.
1650 Return NULL_TREE if no simplification can be made. */
1652 static bool
1653 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1654 tree dest, tree src, tree len)
1656 gimple *stmt = gsi_stmt (*gsi);
1657 location_t loc = gimple_location (stmt);
1658 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1660 /* If the LEN parameter is zero, return DEST. */
1661 if (integer_zerop (len))
1663 /* Avoid warning if the destination refers to a an array/pointer
1664 decorate with attribute nonstring. */
1665 if (!nonstring)
1667 tree fndecl = gimple_call_fndecl (stmt);
1668 gcall *call = as_a <gcall *> (stmt);
1670 /* Warn about the lack of nul termination: the result is not
1671 a (nul-terminated) string. */
1672 tree slen = get_maxval_strlen (src, 0);
1673 if (slen && !integer_zerop (slen))
1674 warning_at (loc, OPT_Wstringop_truncation,
1675 "%G%qD destination unchanged after copying no bytes "
1676 "from a string of length %E",
1677 call, fndecl, slen);
1678 else
1679 warning_at (loc, OPT_Wstringop_truncation,
1680 "%G%qD destination unchanged after copying no bytes",
1681 call, fndecl);
1684 replace_call_with_value (gsi, dest);
1685 return true;
1688 /* We can't compare slen with len as constants below if len is not a
1689 constant. */
1690 if (TREE_CODE (len) != INTEGER_CST)
1691 return false;
1693 /* Now, we must be passed a constant src ptr parameter. */
1694 tree slen = get_maxval_strlen (src, 0);
1695 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1696 return false;
1698 /* The size of the source string including the terminating nul. */
1699 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1701 /* We do not support simplification of this case, though we do
1702 support it when expanding trees into RTL. */
1703 /* FIXME: generate a call to __builtin_memset. */
1704 if (tree_int_cst_lt (ssize, len))
1705 return false;
1707 /* Diagnose truncation that leaves the copy unterminated. */
1708 maybe_diag_stxncpy_trunc (*gsi, src, len);
1710 /* OK transform into builtin memcpy. */
1711 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1712 if (!fn)
1713 return false;
1715 len = fold_convert_loc (loc, size_type_node, len);
1716 len = force_gimple_operand_gsi (gsi, len, true,
1717 NULL_TREE, true, GSI_SAME_STMT);
1718 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1719 replace_call_with_call_and_fold (gsi, repl);
1721 return true;
1724 /* Fold function call to builtin strchr or strrchr.
1725 If both arguments are constant, evaluate and fold the result,
1726 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1727 In general strlen is significantly faster than strchr
1728 due to being a simpler operation. */
1729 static bool
1730 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1732 gimple *stmt = gsi_stmt (*gsi);
1733 tree str = gimple_call_arg (stmt, 0);
1734 tree c = gimple_call_arg (stmt, 1);
1735 location_t loc = gimple_location (stmt);
1736 const char *p;
1737 char ch;
1739 if (!gimple_call_lhs (stmt))
1740 return false;
1742 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1744 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1746 if (p1 == NULL)
1748 replace_call_with_value (gsi, integer_zero_node);
1749 return true;
1752 tree len = build_int_cst (size_type_node, p1 - p);
1753 gimple_seq stmts = NULL;
1754 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1755 POINTER_PLUS_EXPR, str, len);
1756 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1757 gsi_replace_with_seq_vops (gsi, stmts);
1758 return true;
1761 if (!integer_zerop (c))
1762 return false;
1764 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1765 if (is_strrchr && optimize_function_for_size_p (cfun))
1767 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1769 if (strchr_fn)
1771 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1772 replace_call_with_call_and_fold (gsi, repl);
1773 return true;
1776 return false;
1779 tree len;
1780 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1782 if (!strlen_fn)
1783 return false;
1785 /* Create newstr = strlen (str). */
1786 gimple_seq stmts = NULL;
1787 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1788 gimple_set_location (new_stmt, loc);
1789 len = create_tmp_reg_or_ssa_name (size_type_node);
1790 gimple_call_set_lhs (new_stmt, len);
1791 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1793 /* Create (str p+ strlen (str)). */
1794 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1795 POINTER_PLUS_EXPR, str, len);
1796 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1797 gsi_replace_with_seq_vops (gsi, stmts);
1798 /* gsi now points at the assignment to the lhs, get a
1799 stmt iterator to the strlen.
1800 ??? We can't use gsi_for_stmt as that doesn't work when the
1801 CFG isn't built yet. */
1802 gimple_stmt_iterator gsi2 = *gsi;
1803 gsi_prev (&gsi2);
1804 fold_stmt (&gsi2);
1805 return true;
1808 /* Fold function call to builtin strstr.
1809 If both arguments are constant, evaluate and fold the result,
1810 additionally fold strstr (x, "") into x and strstr (x, "c")
1811 into strchr (x, 'c'). */
1812 static bool
1813 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1815 gimple *stmt = gsi_stmt (*gsi);
1816 tree haystack = gimple_call_arg (stmt, 0);
1817 tree needle = gimple_call_arg (stmt, 1);
1818 const char *p, *q;
1820 if (!gimple_call_lhs (stmt))
1821 return false;
1823 q = c_getstr (needle);
1824 if (q == NULL)
1825 return false;
1827 if ((p = c_getstr (haystack)))
1829 const char *r = strstr (p, q);
1831 if (r == NULL)
1833 replace_call_with_value (gsi, integer_zero_node);
1834 return true;
1837 tree len = build_int_cst (size_type_node, r - p);
1838 gimple_seq stmts = NULL;
1839 gimple *new_stmt
1840 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1841 haystack, len);
1842 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1843 gsi_replace_with_seq_vops (gsi, stmts);
1844 return true;
1847 /* For strstr (x, "") return x. */
1848 if (q[0] == '\0')
1850 replace_call_with_value (gsi, haystack);
1851 return true;
1854 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1855 if (q[1] == '\0')
1857 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1858 if (strchr_fn)
1860 tree c = build_int_cst (integer_type_node, q[0]);
1861 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1862 replace_call_with_call_and_fold (gsi, repl);
1863 return true;
1867 return false;
1870 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1871 to the call.
1873 Return NULL_TREE if no simplification was possible, otherwise return the
1874 simplified form of the call as a tree.
1876 The simplified form may be a constant or other expression which
1877 computes the same value, but in a more efficient manner (including
1878 calls to other builtin functions).
1880 The call may contain arguments which need to be evaluated, but
1881 which are not useful to determine the result of the call. In
1882 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1883 COMPOUND_EXPR will be an argument which must be evaluated.
1884 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1885 COMPOUND_EXPR in the chain will contain the tree for the simplified
1886 form of the builtin function call. */
1888 static bool
1889 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1891 gimple *stmt = gsi_stmt (*gsi);
1892 location_t loc = gimple_location (stmt);
1894 const char *p = c_getstr (src);
1896 /* If the string length is zero, return the dst parameter. */
1897 if (p && *p == '\0')
1899 replace_call_with_value (gsi, dst);
1900 return true;
1903 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1904 return false;
1906 /* See if we can store by pieces into (dst + strlen(dst)). */
1907 tree newdst;
1908 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1909 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1911 if (!strlen_fn || !memcpy_fn)
1912 return false;
1914 /* If the length of the source string isn't computable don't
1915 split strcat into strlen and memcpy. */
1916 tree len = get_maxval_strlen (src, 0);
1917 if (! len)
1918 return false;
1920 /* Create strlen (dst). */
1921 gimple_seq stmts = NULL, stmts2;
1922 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1923 gimple_set_location (repl, loc);
1924 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1925 gimple_call_set_lhs (repl, newdst);
1926 gimple_seq_add_stmt_without_update (&stmts, repl);
1928 /* Create (dst p+ strlen (dst)). */
1929 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1930 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1931 gimple_seq_add_seq_without_update (&stmts, stmts2);
1933 len = fold_convert_loc (loc, size_type_node, len);
1934 len = size_binop_loc (loc, PLUS_EXPR, len,
1935 build_int_cst (size_type_node, 1));
1936 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1937 gimple_seq_add_seq_without_update (&stmts, stmts2);
1939 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1940 gimple_seq_add_stmt_without_update (&stmts, repl);
1941 if (gimple_call_lhs (stmt))
1943 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1944 gimple_seq_add_stmt_without_update (&stmts, repl);
1945 gsi_replace_with_seq_vops (gsi, stmts);
1946 /* gsi now points at the assignment to the lhs, get a
1947 stmt iterator to the memcpy call.
1948 ??? We can't use gsi_for_stmt as that doesn't work when the
1949 CFG isn't built yet. */
1950 gimple_stmt_iterator gsi2 = *gsi;
1951 gsi_prev (&gsi2);
1952 fold_stmt (&gsi2);
1954 else
1956 gsi_replace_with_seq_vops (gsi, stmts);
1957 fold_stmt (gsi);
1959 return true;
1962 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1963 are the arguments to the call. */
1965 static bool
1966 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1968 gimple *stmt = gsi_stmt (*gsi);
1969 tree dest = gimple_call_arg (stmt, 0);
1970 tree src = gimple_call_arg (stmt, 1);
1971 tree size = gimple_call_arg (stmt, 2);
1972 tree fn;
1973 const char *p;
1976 p = c_getstr (src);
1977 /* If the SRC parameter is "", return DEST. */
1978 if (p && *p == '\0')
1980 replace_call_with_value (gsi, dest);
1981 return true;
1984 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1985 return false;
1987 /* If __builtin_strcat_chk is used, assume strcat is available. */
1988 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1989 if (!fn)
1990 return false;
1992 gimple *repl = gimple_build_call (fn, 2, dest, src);
1993 replace_call_with_call_and_fold (gsi, repl);
1994 return true;
1997 /* Simplify a call to the strncat builtin. */
1999 static bool
2000 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2002 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2003 tree dst = gimple_call_arg (stmt, 0);
2004 tree src = gimple_call_arg (stmt, 1);
2005 tree len = gimple_call_arg (stmt, 2);
2007 const char *p = c_getstr (src);
2009 /* If the requested length is zero, or the src parameter string
2010 length is zero, return the dst parameter. */
2011 if (integer_zerop (len) || (p && *p == '\0'))
2013 replace_call_with_value (gsi, dst);
2014 return true;
2017 if (TREE_CODE (len) != INTEGER_CST || !p)
2018 return false;
2020 unsigned srclen = strlen (p);
2022 int cmpsrc = compare_tree_int (len, srclen);
2024 /* Return early if the requested len is less than the string length.
2025 Warnings will be issued elsewhere later. */
2026 if (cmpsrc < 0)
2027 return false;
2029 unsigned HOST_WIDE_INT dstsize;
2031 bool nowarn = gimple_no_warning_p (stmt);
2033 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2035 int cmpdst = compare_tree_int (len, dstsize);
2037 if (cmpdst >= 0)
2039 tree fndecl = gimple_call_fndecl (stmt);
2041 /* Strncat copies (at most) LEN bytes and always appends
2042 the terminating NUL so the specified bound should never
2043 be equal to (or greater than) the size of the destination.
2044 If it is, the copy could overflow. */
2045 location_t loc = gimple_location (stmt);
2046 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2047 cmpdst == 0
2048 ? G_("%G%qD specified bound %E equals "
2049 "destination size")
2050 : G_("%G%qD specified bound %E exceeds "
2051 "destination size %wu"),
2052 stmt, fndecl, len, dstsize);
2053 if (nowarn)
2054 gimple_set_no_warning (stmt, true);
2058 if (!nowarn && cmpsrc == 0)
2060 tree fndecl = gimple_call_fndecl (stmt);
2062 /* To avoid certain truncation the specified bound should also
2063 not be equal to (or less than) the length of the source. */
2064 location_t loc = gimple_location (stmt);
2065 if (warning_at (loc, OPT_Wstringop_overflow_,
2066 "%G%qD specified bound %E equals source length",
2067 stmt, fndecl, len))
2068 gimple_set_no_warning (stmt, true);
2071 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2073 /* If the replacement _DECL isn't initialized, don't do the
2074 transformation. */
2075 if (!fn)
2076 return false;
2078 /* Otherwise, emit a call to strcat. */
2079 gcall *repl = gimple_build_call (fn, 2, dst, src);
2080 replace_call_with_call_and_fold (gsi, repl);
2081 return true;
2084 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2085 LEN, and SIZE. */
2087 static bool
2088 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2090 gimple *stmt = gsi_stmt (*gsi);
2091 tree dest = gimple_call_arg (stmt, 0);
2092 tree src = gimple_call_arg (stmt, 1);
2093 tree len = gimple_call_arg (stmt, 2);
2094 tree size = gimple_call_arg (stmt, 3);
2095 tree fn;
2096 const char *p;
2098 p = c_getstr (src);
2099 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2100 if ((p && *p == '\0')
2101 || integer_zerop (len))
2103 replace_call_with_value (gsi, dest);
2104 return true;
2107 if (! tree_fits_uhwi_p (size))
2108 return false;
2110 if (! integer_all_onesp (size))
2112 tree src_len = c_strlen (src, 1);
2113 if (src_len
2114 && tree_fits_uhwi_p (src_len)
2115 && tree_fits_uhwi_p (len)
2116 && ! tree_int_cst_lt (len, src_len))
2118 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2119 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2120 if (!fn)
2121 return false;
2123 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2124 replace_call_with_call_and_fold (gsi, repl);
2125 return true;
2127 return false;
2130 /* If __builtin_strncat_chk is used, assume strncat is available. */
2131 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2132 if (!fn)
2133 return false;
2135 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2136 replace_call_with_call_and_fold (gsi, repl);
2137 return true;
2140 /* Build and append gimple statements to STMTS that would load a first
2141 character of a memory location identified by STR. LOC is location
2142 of the statement. */
2144 static tree
2145 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2147 tree var;
2149 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2150 tree cst_uchar_ptr_node
2151 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2152 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2154 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2155 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2156 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2158 gimple_assign_set_lhs (stmt, var);
2159 gimple_seq_add_stmt_without_update (stmts, stmt);
2161 return var;
2164 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2165 FCODE is the name of the builtin. */
2167 static bool
2168 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2170 gimple *stmt = gsi_stmt (*gsi);
2171 tree callee = gimple_call_fndecl (stmt);
2172 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2174 tree type = integer_type_node;
2175 tree str1 = gimple_call_arg (stmt, 0);
2176 tree str2 = gimple_call_arg (stmt, 1);
2177 tree lhs = gimple_call_lhs (stmt);
2178 HOST_WIDE_INT length = -1;
2180 /* Handle strncmp and strncasecmp functions. */
2181 if (gimple_call_num_args (stmt) == 3)
2183 tree len = gimple_call_arg (stmt, 2);
2184 if (tree_fits_uhwi_p (len))
2185 length = tree_to_uhwi (len);
2188 /* If the LEN parameter is zero, return zero. */
2189 if (length == 0)
2191 replace_call_with_value (gsi, integer_zero_node);
2192 return true;
2195 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2196 if (operand_equal_p (str1, str2, 0))
2198 replace_call_with_value (gsi, integer_zero_node);
2199 return true;
2202 const char *p1 = c_getstr (str1);
2203 const char *p2 = c_getstr (str2);
2205 /* For known strings, return an immediate value. */
2206 if (p1 && p2)
2208 int r = 0;
2209 bool known_result = false;
2211 switch (fcode)
2213 case BUILT_IN_STRCMP:
2215 r = strcmp (p1, p2);
2216 known_result = true;
2217 break;
2219 case BUILT_IN_STRNCMP:
2221 if (length == -1)
2222 break;
2223 r = strncmp (p1, p2, length);
2224 known_result = true;
2225 break;
2227 /* Only handleable situation is where the string are equal (result 0),
2228 which is already handled by operand_equal_p case. */
2229 case BUILT_IN_STRCASECMP:
2230 break;
2231 case BUILT_IN_STRNCASECMP:
2233 if (length == -1)
2234 break;
2235 r = strncmp (p1, p2, length);
2236 if (r == 0)
2237 known_result = true;
2238 break;
2240 default:
2241 gcc_unreachable ();
2244 if (known_result)
2246 replace_call_with_value (gsi, build_cmp_result (type, r));
2247 return true;
2251 bool nonzero_length = length >= 1
2252 || fcode == BUILT_IN_STRCMP
2253 || fcode == BUILT_IN_STRCASECMP;
2255 location_t loc = gimple_location (stmt);
2257 /* If the second arg is "", return *(const unsigned char*)arg1. */
2258 if (p2 && *p2 == '\0' && nonzero_length)
2260 gimple_seq stmts = NULL;
2261 tree var = gimple_load_first_char (loc, str1, &stmts);
2262 if (lhs)
2264 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2265 gimple_seq_add_stmt_without_update (&stmts, stmt);
2268 gsi_replace_with_seq_vops (gsi, stmts);
2269 return true;
2272 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2273 if (p1 && *p1 == '\0' && nonzero_length)
2275 gimple_seq stmts = NULL;
2276 tree var = gimple_load_first_char (loc, str2, &stmts);
2278 if (lhs)
2280 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2281 stmt = gimple_build_assign (c, NOP_EXPR, var);
2282 gimple_seq_add_stmt_without_update (&stmts, stmt);
2284 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2285 gimple_seq_add_stmt_without_update (&stmts, stmt);
2288 gsi_replace_with_seq_vops (gsi, stmts);
2289 return true;
2292 /* If len parameter is one, return an expression corresponding to
2293 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2294 if (fcode == BUILT_IN_STRNCMP && length == 1)
2296 gimple_seq stmts = NULL;
2297 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2298 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2300 if (lhs)
2302 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2303 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2304 gimple_seq_add_stmt_without_update (&stmts, convert1);
2306 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2307 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2308 gimple_seq_add_stmt_without_update (&stmts, convert2);
2310 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2311 gimple_seq_add_stmt_without_update (&stmts, stmt);
2314 gsi_replace_with_seq_vops (gsi, stmts);
2315 return true;
2318 /* If length is larger than the length of one constant string,
2319 replace strncmp with corresponding strcmp */
2320 if (fcode == BUILT_IN_STRNCMP
2321 && length > 0
2322 && ((p2 && (size_t) length > strlen (p2))
2323 || (p1 && (size_t) length > strlen (p1))))
2325 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2326 if (!fn)
2327 return false;
2328 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2329 replace_call_with_call_and_fold (gsi, repl);
2330 return true;
2333 return false;
2336 /* Fold a call to the memchr pointed by GSI iterator. */
2338 static bool
2339 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2341 gimple *stmt = gsi_stmt (*gsi);
2342 tree lhs = gimple_call_lhs (stmt);
2343 tree arg1 = gimple_call_arg (stmt, 0);
2344 tree arg2 = gimple_call_arg (stmt, 1);
2345 tree len = gimple_call_arg (stmt, 2);
2347 /* If the LEN parameter is zero, return zero. */
2348 if (integer_zerop (len))
2350 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2351 return true;
2354 char c;
2355 if (TREE_CODE (arg2) != INTEGER_CST
2356 || !tree_fits_uhwi_p (len)
2357 || !target_char_cst_p (arg2, &c))
2358 return false;
2360 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2361 unsigned HOST_WIDE_INT string_length;
2362 const char *p1 = c_getstr (arg1, &string_length);
2364 if (p1)
2366 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2367 if (r == NULL)
2369 if (length <= string_length)
2371 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2372 return true;
2375 else
2377 unsigned HOST_WIDE_INT offset = r - p1;
2378 gimple_seq stmts = NULL;
2379 if (lhs != NULL_TREE)
2381 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2382 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2383 arg1, offset_cst);
2384 gimple_seq_add_stmt_without_update (&stmts, stmt);
2386 else
2387 gimple_seq_add_stmt_without_update (&stmts,
2388 gimple_build_nop ());
2390 gsi_replace_with_seq_vops (gsi, stmts);
2391 return true;
2395 return false;
2398 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2399 to the call. IGNORE is true if the value returned
2400 by the builtin will be ignored. UNLOCKED is true is true if this
2401 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2402 the known length of the string. Return NULL_TREE if no simplification
2403 was possible. */
2405 static bool
2406 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2407 tree arg0, tree arg1,
2408 bool unlocked)
2410 gimple *stmt = gsi_stmt (*gsi);
2412 /* If we're using an unlocked function, assume the other unlocked
2413 functions exist explicitly. */
2414 tree const fn_fputc = (unlocked
2415 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2416 : builtin_decl_implicit (BUILT_IN_FPUTC));
2417 tree const fn_fwrite = (unlocked
2418 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2419 : builtin_decl_implicit (BUILT_IN_FWRITE));
2421 /* If the return value is used, don't do the transformation. */
2422 if (gimple_call_lhs (stmt))
2423 return false;
2425 /* Get the length of the string passed to fputs. If the length
2426 can't be determined, punt. */
2427 tree len = get_maxval_strlen (arg0, 0);
2428 if (!len
2429 || TREE_CODE (len) != INTEGER_CST)
2430 return false;
2432 switch (compare_tree_int (len, 1))
2434 case -1: /* length is 0, delete the call entirely . */
2435 replace_call_with_value (gsi, integer_zero_node);
2436 return true;
2438 case 0: /* length is 1, call fputc. */
2440 const char *p = c_getstr (arg0);
2441 if (p != NULL)
2443 if (!fn_fputc)
2444 return false;
2446 gimple *repl = gimple_build_call (fn_fputc, 2,
2447 build_int_cst
2448 (integer_type_node, p[0]), arg1);
2449 replace_call_with_call_and_fold (gsi, repl);
2450 return true;
2453 /* FALLTHROUGH */
2454 case 1: /* length is greater than 1, call fwrite. */
2456 /* If optimizing for size keep fputs. */
2457 if (optimize_function_for_size_p (cfun))
2458 return false;
2459 /* New argument list transforming fputs(string, stream) to
2460 fwrite(string, 1, len, stream). */
2461 if (!fn_fwrite)
2462 return false;
2464 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2465 size_one_node, len, arg1);
2466 replace_call_with_call_and_fold (gsi, repl);
2467 return true;
2469 default:
2470 gcc_unreachable ();
2472 return false;
2475 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2476 DEST, SRC, LEN, and SIZE are the arguments to the call.
2477 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2478 code of the builtin. If MAXLEN is not NULL, it is maximum length
2479 passed as third argument. */
2481 static bool
2482 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2483 tree dest, tree src, tree len, tree size,
2484 enum built_in_function fcode)
2486 gimple *stmt = gsi_stmt (*gsi);
2487 location_t loc = gimple_location (stmt);
2488 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2489 tree fn;
2491 /* If SRC and DEST are the same (and not volatile), return DEST
2492 (resp. DEST+LEN for __mempcpy_chk). */
2493 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2495 if (fcode != BUILT_IN_MEMPCPY_CHK)
2497 replace_call_with_value (gsi, dest);
2498 return true;
2500 else
2502 gimple_seq stmts = NULL;
2503 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2504 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2505 TREE_TYPE (dest), dest, len);
2506 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2507 replace_call_with_value (gsi, temp);
2508 return true;
2512 if (! tree_fits_uhwi_p (size))
2513 return false;
2515 tree maxlen = get_maxval_strlen (len, 2);
2516 if (! integer_all_onesp (size))
2518 if (! tree_fits_uhwi_p (len))
2520 /* If LEN is not constant, try MAXLEN too.
2521 For MAXLEN only allow optimizing into non-_ocs function
2522 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2523 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2525 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2527 /* (void) __mempcpy_chk () can be optimized into
2528 (void) __memcpy_chk (). */
2529 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2530 if (!fn)
2531 return false;
2533 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2534 replace_call_with_call_and_fold (gsi, repl);
2535 return true;
2537 return false;
2540 else
2541 maxlen = len;
2543 if (tree_int_cst_lt (size, maxlen))
2544 return false;
2547 fn = NULL_TREE;
2548 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2549 mem{cpy,pcpy,move,set} is available. */
2550 switch (fcode)
2552 case BUILT_IN_MEMCPY_CHK:
2553 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2554 break;
2555 case BUILT_IN_MEMPCPY_CHK:
2556 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2557 break;
2558 case BUILT_IN_MEMMOVE_CHK:
2559 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2560 break;
2561 case BUILT_IN_MEMSET_CHK:
2562 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2563 break;
2564 default:
2565 break;
2568 if (!fn)
2569 return false;
2571 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2572 replace_call_with_call_and_fold (gsi, repl);
2573 return true;
2576 /* Fold a call to the __st[rp]cpy_chk builtin.
2577 DEST, SRC, and SIZE are the arguments to the call.
2578 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2579 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2580 strings passed as second argument. */
2582 static bool
2583 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2584 tree dest,
2585 tree src, tree size,
2586 enum built_in_function fcode)
2588 gimple *stmt = gsi_stmt (*gsi);
2589 location_t loc = gimple_location (stmt);
2590 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2591 tree len, fn;
2593 /* If SRC and DEST are the same (and not volatile), return DEST. */
2594 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2596 if (!gimple_no_warning_p (stmt))
2598 tree func = gimple_call_fndecl (stmt);
2600 warning_at (loc, OPT_Wrestrict,
2601 "%qD source argument is the same as destination",
2602 func);
2605 replace_call_with_value (gsi, dest);
2606 return true;
2609 if (! tree_fits_uhwi_p (size))
2610 return false;
2612 tree maxlen = get_maxval_strlen (src, 1);
2613 if (! integer_all_onesp (size))
2615 len = c_strlen (src, 1);
2616 if (! len || ! tree_fits_uhwi_p (len))
2618 /* If LEN is not constant, try MAXLEN too.
2619 For MAXLEN only allow optimizing into non-_ocs function
2620 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2621 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2623 if (fcode == BUILT_IN_STPCPY_CHK)
2625 if (! ignore)
2626 return false;
2628 /* If return value of __stpcpy_chk is ignored,
2629 optimize into __strcpy_chk. */
2630 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2631 if (!fn)
2632 return false;
2634 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2635 replace_call_with_call_and_fold (gsi, repl);
2636 return true;
2639 if (! len || TREE_SIDE_EFFECTS (len))
2640 return false;
2642 /* If c_strlen returned something, but not a constant,
2643 transform __strcpy_chk into __memcpy_chk. */
2644 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2645 if (!fn)
2646 return false;
2648 gimple_seq stmts = NULL;
2649 len = gimple_convert (&stmts, loc, size_type_node, len);
2650 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2651 build_int_cst (size_type_node, 1));
2652 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2653 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2654 replace_call_with_call_and_fold (gsi, repl);
2655 return true;
2658 else
2659 maxlen = len;
2661 if (! tree_int_cst_lt (maxlen, size))
2662 return false;
2665 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2666 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2667 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2668 if (!fn)
2669 return false;
2671 gimple *repl = gimple_build_call (fn, 2, dest, src);
2672 replace_call_with_call_and_fold (gsi, repl);
2673 return true;
2676 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2677 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2678 length passed as third argument. IGNORE is true if return value can be
2679 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2681 static bool
2682 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2683 tree dest, tree src,
2684 tree len, tree size,
2685 enum built_in_function fcode)
2687 gimple *stmt = gsi_stmt (*gsi);
2688 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2689 tree fn;
2691 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2693 /* If return value of __stpncpy_chk is ignored,
2694 optimize into __strncpy_chk. */
2695 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2696 if (fn)
2698 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2699 replace_call_with_call_and_fold (gsi, repl);
2700 return true;
2704 if (! tree_fits_uhwi_p (size))
2705 return false;
2707 tree maxlen = get_maxval_strlen (len, 2);
2708 if (! integer_all_onesp (size))
2710 if (! tree_fits_uhwi_p (len))
2712 /* If LEN is not constant, try MAXLEN too.
2713 For MAXLEN only allow optimizing into non-_ocs function
2714 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2715 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2716 return false;
2718 else
2719 maxlen = len;
2721 if (tree_int_cst_lt (size, maxlen))
2722 return false;
2725 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2726 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2727 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2728 if (!fn)
2729 return false;
2731 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2732 replace_call_with_call_and_fold (gsi, repl);
2733 return true;
2736 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2737 Return NULL_TREE if no simplification can be made. */
2739 static bool
2740 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2742 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2743 location_t loc = gimple_location (stmt);
2744 tree dest = gimple_call_arg (stmt, 0);
2745 tree src = gimple_call_arg (stmt, 1);
2746 tree fn, len, lenp1;
2748 /* If the result is unused, replace stpcpy with strcpy. */
2749 if (gimple_call_lhs (stmt) == NULL_TREE)
2751 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2752 if (!fn)
2753 return false;
2754 gimple_call_set_fndecl (stmt, fn);
2755 fold_stmt (gsi);
2756 return true;
2759 len = c_strlen (src, 1);
2760 if (!len
2761 || TREE_CODE (len) != INTEGER_CST)
2762 return false;
2764 if (optimize_function_for_size_p (cfun)
2765 /* If length is zero it's small enough. */
2766 && !integer_zerop (len))
2767 return false;
2769 /* If the source has a known length replace stpcpy with memcpy. */
2770 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2771 if (!fn)
2772 return false;
2774 gimple_seq stmts = NULL;
2775 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2776 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2777 tem, build_int_cst (size_type_node, 1));
2778 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2779 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2780 gimple_set_vuse (repl, gimple_vuse (stmt));
2781 gimple_set_vdef (repl, gimple_vdef (stmt));
2782 if (gimple_vdef (repl)
2783 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2784 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2785 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2786 /* Replace the result with dest + len. */
2787 stmts = NULL;
2788 tem = gimple_convert (&stmts, loc, sizetype, len);
2789 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2790 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2791 POINTER_PLUS_EXPR, dest, tem);
2792 gsi_replace (gsi, ret, false);
2793 /* Finally fold the memcpy call. */
2794 gimple_stmt_iterator gsi2 = *gsi;
2795 gsi_prev (&gsi2);
2796 fold_stmt (&gsi2);
2797 return true;
2800 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2801 NULL_TREE if a normal call should be emitted rather than expanding
2802 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2803 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2804 passed as second argument. */
2806 static bool
2807 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2808 enum built_in_function fcode)
2810 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2811 tree dest, size, len, fn, fmt, flag;
2812 const char *fmt_str;
2814 /* Verify the required arguments in the original call. */
2815 if (gimple_call_num_args (stmt) < 5)
2816 return false;
2818 dest = gimple_call_arg (stmt, 0);
2819 len = gimple_call_arg (stmt, 1);
2820 flag = gimple_call_arg (stmt, 2);
2821 size = gimple_call_arg (stmt, 3);
2822 fmt = gimple_call_arg (stmt, 4);
2824 if (! tree_fits_uhwi_p (size))
2825 return false;
2827 if (! integer_all_onesp (size))
2829 tree maxlen = get_maxval_strlen (len, 2);
2830 if (! tree_fits_uhwi_p (len))
2832 /* If LEN is not constant, try MAXLEN too.
2833 For MAXLEN only allow optimizing into non-_ocs function
2834 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2835 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2836 return false;
2838 else
2839 maxlen = len;
2841 if (tree_int_cst_lt (size, maxlen))
2842 return false;
2845 if (!init_target_chars ())
2846 return false;
2848 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2849 or if format doesn't contain % chars or is "%s". */
2850 if (! integer_zerop (flag))
2852 fmt_str = c_getstr (fmt);
2853 if (fmt_str == NULL)
2854 return false;
2855 if (strchr (fmt_str, target_percent) != NULL
2856 && strcmp (fmt_str, target_percent_s))
2857 return false;
2860 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2861 available. */
2862 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2863 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2864 if (!fn)
2865 return false;
2867 /* Replace the called function and the first 5 argument by 3 retaining
2868 trailing varargs. */
2869 gimple_call_set_fndecl (stmt, fn);
2870 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2871 gimple_call_set_arg (stmt, 0, dest);
2872 gimple_call_set_arg (stmt, 1, len);
2873 gimple_call_set_arg (stmt, 2, fmt);
2874 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2875 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2876 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2877 fold_stmt (gsi);
2878 return true;
2881 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2882 Return NULL_TREE if a normal call should be emitted rather than
2883 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2884 or BUILT_IN_VSPRINTF_CHK. */
2886 static bool
2887 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2888 enum built_in_function fcode)
2890 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2891 tree dest, size, len, fn, fmt, flag;
2892 const char *fmt_str;
2893 unsigned nargs = gimple_call_num_args (stmt);
2895 /* Verify the required arguments in the original call. */
2896 if (nargs < 4)
2897 return false;
2898 dest = gimple_call_arg (stmt, 0);
2899 flag = gimple_call_arg (stmt, 1);
2900 size = gimple_call_arg (stmt, 2);
2901 fmt = gimple_call_arg (stmt, 3);
2903 if (! tree_fits_uhwi_p (size))
2904 return false;
2906 len = NULL_TREE;
2908 if (!init_target_chars ())
2909 return false;
2911 /* Check whether the format is a literal string constant. */
2912 fmt_str = c_getstr (fmt);
2913 if (fmt_str != NULL)
2915 /* If the format doesn't contain % args or %%, we know the size. */
2916 if (strchr (fmt_str, target_percent) == 0)
2918 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2919 len = build_int_cstu (size_type_node, strlen (fmt_str));
2921 /* If the format is "%s" and first ... argument is a string literal,
2922 we know the size too. */
2923 else if (fcode == BUILT_IN_SPRINTF_CHK
2924 && strcmp (fmt_str, target_percent_s) == 0)
2926 tree arg;
2928 if (nargs == 5)
2930 arg = gimple_call_arg (stmt, 4);
2931 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2933 len = c_strlen (arg, 1);
2934 if (! len || ! tree_fits_uhwi_p (len))
2935 len = NULL_TREE;
2941 if (! integer_all_onesp (size))
2943 if (! len || ! tree_int_cst_lt (len, size))
2944 return false;
2947 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2948 or if format doesn't contain % chars or is "%s". */
2949 if (! integer_zerop (flag))
2951 if (fmt_str == NULL)
2952 return false;
2953 if (strchr (fmt_str, target_percent) != NULL
2954 && strcmp (fmt_str, target_percent_s))
2955 return false;
2958 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2959 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2960 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2961 if (!fn)
2962 return false;
2964 /* Replace the called function and the first 4 argument by 2 retaining
2965 trailing varargs. */
2966 gimple_call_set_fndecl (stmt, fn);
2967 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2968 gimple_call_set_arg (stmt, 0, dest);
2969 gimple_call_set_arg (stmt, 1, fmt);
2970 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2971 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2972 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2973 fold_stmt (gsi);
2974 return true;
2977 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2978 ORIG may be null if this is a 2-argument call. We don't attempt to
2979 simplify calls with more than 3 arguments.
2981 Return true if simplification was possible, otherwise false. */
2983 bool
2984 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2986 gimple *stmt = gsi_stmt (*gsi);
2987 tree dest = gimple_call_arg (stmt, 0);
2988 tree fmt = gimple_call_arg (stmt, 1);
2989 tree orig = NULL_TREE;
2990 const char *fmt_str = NULL;
2992 /* Verify the required arguments in the original call. We deal with two
2993 types of sprintf() calls: 'sprintf (str, fmt)' and
2994 'sprintf (dest, "%s", orig)'. */
2995 if (gimple_call_num_args (stmt) > 3)
2996 return false;
2998 if (gimple_call_num_args (stmt) == 3)
2999 orig = gimple_call_arg (stmt, 2);
3001 /* Check whether the format is a literal string constant. */
3002 fmt_str = c_getstr (fmt);
3003 if (fmt_str == NULL)
3004 return false;
3006 if (!init_target_chars ())
3007 return false;
3009 /* If the format doesn't contain % args or %%, use strcpy. */
3010 if (strchr (fmt_str, target_percent) == NULL)
3012 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3014 if (!fn)
3015 return false;
3017 /* Don't optimize sprintf (buf, "abc", ptr++). */
3018 if (orig)
3019 return false;
3021 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3022 'format' is known to contain no % formats. */
3023 gimple_seq stmts = NULL;
3024 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3025 gimple_seq_add_stmt_without_update (&stmts, repl);
3026 if (gimple_call_lhs (stmt))
3028 repl = gimple_build_assign (gimple_call_lhs (stmt),
3029 build_int_cst (integer_type_node,
3030 strlen (fmt_str)));
3031 gimple_seq_add_stmt_without_update (&stmts, repl);
3032 gsi_replace_with_seq_vops (gsi, stmts);
3033 /* gsi now points at the assignment to the lhs, get a
3034 stmt iterator to the memcpy call.
3035 ??? We can't use gsi_for_stmt as that doesn't work when the
3036 CFG isn't built yet. */
3037 gimple_stmt_iterator gsi2 = *gsi;
3038 gsi_prev (&gsi2);
3039 fold_stmt (&gsi2);
3041 else
3043 gsi_replace_with_seq_vops (gsi, stmts);
3044 fold_stmt (gsi);
3046 return true;
3049 /* If the format is "%s", use strcpy if the result isn't used. */
3050 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3052 tree fn;
3053 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3055 if (!fn)
3056 return false;
3058 /* Don't crash on sprintf (str1, "%s"). */
3059 if (!orig)
3060 return false;
3062 tree orig_len = NULL_TREE;
3063 if (gimple_call_lhs (stmt))
3065 orig_len = get_maxval_strlen (orig, 0);
3066 if (!orig_len)
3067 return false;
3070 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3071 gimple_seq stmts = NULL;
3072 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3073 gimple_seq_add_stmt_without_update (&stmts, repl);
3074 if (gimple_call_lhs (stmt))
3076 if (!useless_type_conversion_p (integer_type_node,
3077 TREE_TYPE (orig_len)))
3078 orig_len = fold_convert (integer_type_node, orig_len);
3079 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3080 gimple_seq_add_stmt_without_update (&stmts, repl);
3081 gsi_replace_with_seq_vops (gsi, stmts);
3082 /* gsi now points at the assignment to the lhs, get a
3083 stmt iterator to the memcpy call.
3084 ??? We can't use gsi_for_stmt as that doesn't work when the
3085 CFG isn't built yet. */
3086 gimple_stmt_iterator gsi2 = *gsi;
3087 gsi_prev (&gsi2);
3088 fold_stmt (&gsi2);
3090 else
3092 gsi_replace_with_seq_vops (gsi, stmts);
3093 fold_stmt (gsi);
3095 return true;
3097 return false;
3100 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3101 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3102 attempt to simplify calls with more than 4 arguments.
3104 Return true if simplification was possible, otherwise false. */
3106 bool
3107 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3109 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3110 tree dest = gimple_call_arg (stmt, 0);
3111 tree destsize = gimple_call_arg (stmt, 1);
3112 tree fmt = gimple_call_arg (stmt, 2);
3113 tree orig = NULL_TREE;
3114 const char *fmt_str = NULL;
3116 if (gimple_call_num_args (stmt) > 4)
3117 return false;
3119 if (gimple_call_num_args (stmt) == 4)
3120 orig = gimple_call_arg (stmt, 3);
3122 if (!tree_fits_uhwi_p (destsize))
3123 return false;
3124 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3126 /* Check whether the format is a literal string constant. */
3127 fmt_str = c_getstr (fmt);
3128 if (fmt_str == NULL)
3129 return false;
3131 if (!init_target_chars ())
3132 return false;
3134 /* If the format doesn't contain % args or %%, use strcpy. */
3135 if (strchr (fmt_str, target_percent) == NULL)
3137 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3138 if (!fn)
3139 return false;
3141 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3142 if (orig)
3143 return false;
3145 /* We could expand this as
3146 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3147 or to
3148 memcpy (str, fmt_with_nul_at_cstm1, cst);
3149 but in the former case that might increase code size
3150 and in the latter case grow .rodata section too much.
3151 So punt for now. */
3152 size_t len = strlen (fmt_str);
3153 if (len >= destlen)
3154 return false;
3156 gimple_seq stmts = NULL;
3157 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3158 gimple_seq_add_stmt_without_update (&stmts, repl);
3159 if (gimple_call_lhs (stmt))
3161 repl = gimple_build_assign (gimple_call_lhs (stmt),
3162 build_int_cst (integer_type_node, len));
3163 gimple_seq_add_stmt_without_update (&stmts, repl);
3164 gsi_replace_with_seq_vops (gsi, stmts);
3165 /* gsi now points at the assignment to the lhs, get a
3166 stmt iterator to the memcpy call.
3167 ??? We can't use gsi_for_stmt as that doesn't work when the
3168 CFG isn't built yet. */
3169 gimple_stmt_iterator gsi2 = *gsi;
3170 gsi_prev (&gsi2);
3171 fold_stmt (&gsi2);
3173 else
3175 gsi_replace_with_seq_vops (gsi, stmts);
3176 fold_stmt (gsi);
3178 return true;
3181 /* If the format is "%s", use strcpy if the result isn't used. */
3182 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3184 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3185 if (!fn)
3186 return false;
3188 /* Don't crash on snprintf (str1, cst, "%s"). */
3189 if (!orig)
3190 return false;
3192 tree orig_len = get_maxval_strlen (orig, 0);
3193 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3194 return false;
3196 /* We could expand this as
3197 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3198 or to
3199 memcpy (str1, str2_with_nul_at_cstm1, cst);
3200 but in the former case that might increase code size
3201 and in the latter case grow .rodata section too much.
3202 So punt for now. */
3203 if (compare_tree_int (orig_len, destlen) >= 0)
3204 return false;
3206 /* Convert snprintf (str1, cst, "%s", str2) into
3207 strcpy (str1, str2) if strlen (str2) < cst. */
3208 gimple_seq stmts = NULL;
3209 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3210 gimple_seq_add_stmt_without_update (&stmts, repl);
3211 if (gimple_call_lhs (stmt))
3213 if (!useless_type_conversion_p (integer_type_node,
3214 TREE_TYPE (orig_len)))
3215 orig_len = fold_convert (integer_type_node, orig_len);
3216 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3217 gimple_seq_add_stmt_without_update (&stmts, repl);
3218 gsi_replace_with_seq_vops (gsi, stmts);
3219 /* gsi now points at the assignment to the lhs, get a
3220 stmt iterator to the memcpy call.
3221 ??? We can't use gsi_for_stmt as that doesn't work when the
3222 CFG isn't built yet. */
3223 gimple_stmt_iterator gsi2 = *gsi;
3224 gsi_prev (&gsi2);
3225 fold_stmt (&gsi2);
3227 else
3229 gsi_replace_with_seq_vops (gsi, stmts);
3230 fold_stmt (gsi);
3232 return true;
3234 return false;
3237 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3238 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3239 more than 3 arguments, and ARG may be null in the 2-argument case.
3241 Return NULL_TREE if no simplification was possible, otherwise return the
3242 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3243 code of the function to be simplified. */
3245 static bool
3246 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3247 tree fp, tree fmt, tree arg,
3248 enum built_in_function fcode)
3250 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3251 tree fn_fputc, fn_fputs;
3252 const char *fmt_str = NULL;
3254 /* If the return value is used, don't do the transformation. */
3255 if (gimple_call_lhs (stmt) != NULL_TREE)
3256 return false;
3258 /* Check whether the format is a literal string constant. */
3259 fmt_str = c_getstr (fmt);
3260 if (fmt_str == NULL)
3261 return false;
3263 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3265 /* If we're using an unlocked function, assume the other
3266 unlocked functions exist explicitly. */
3267 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3268 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3270 else
3272 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3273 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3276 if (!init_target_chars ())
3277 return false;
3279 /* If the format doesn't contain % args or %%, use strcpy. */
3280 if (strchr (fmt_str, target_percent) == NULL)
3282 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3283 && arg)
3284 return false;
3286 /* If the format specifier was "", fprintf does nothing. */
3287 if (fmt_str[0] == '\0')
3289 replace_call_with_value (gsi, NULL_TREE);
3290 return true;
3293 /* When "string" doesn't contain %, replace all cases of
3294 fprintf (fp, string) with fputs (string, fp). The fputs
3295 builtin will take care of special cases like length == 1. */
3296 if (fn_fputs)
3298 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3299 replace_call_with_call_and_fold (gsi, repl);
3300 return true;
3304 /* The other optimizations can be done only on the non-va_list variants. */
3305 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3306 return false;
3308 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3309 else if (strcmp (fmt_str, target_percent_s) == 0)
3311 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3312 return false;
3313 if (fn_fputs)
3315 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3316 replace_call_with_call_and_fold (gsi, repl);
3317 return true;
3321 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3322 else if (strcmp (fmt_str, target_percent_c) == 0)
3324 if (!arg
3325 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3326 return false;
3327 if (fn_fputc)
3329 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3330 replace_call_with_call_and_fold (gsi, repl);
3331 return true;
3335 return false;
3338 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3339 FMT and ARG are the arguments to the call; we don't fold cases with
3340 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3342 Return NULL_TREE if no simplification was possible, otherwise return the
3343 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3344 code of the function to be simplified. */
3346 static bool
3347 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3348 tree arg, enum built_in_function fcode)
3350 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3351 tree fn_putchar, fn_puts, newarg;
3352 const char *fmt_str = NULL;
3354 /* If the return value is used, don't do the transformation. */
3355 if (gimple_call_lhs (stmt) != NULL_TREE)
3356 return false;
3358 /* Check whether the format is a literal string constant. */
3359 fmt_str = c_getstr (fmt);
3360 if (fmt_str == NULL)
3361 return false;
3363 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3365 /* If we're using an unlocked function, assume the other
3366 unlocked functions exist explicitly. */
3367 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3368 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3370 else
3372 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3373 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3376 if (!init_target_chars ())
3377 return false;
3379 if (strcmp (fmt_str, target_percent_s) == 0
3380 || strchr (fmt_str, target_percent) == NULL)
3382 const char *str;
3384 if (strcmp (fmt_str, target_percent_s) == 0)
3386 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3387 return false;
3389 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3390 return false;
3392 str = c_getstr (arg);
3393 if (str == NULL)
3394 return false;
3396 else
3398 /* The format specifier doesn't contain any '%' characters. */
3399 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3400 && arg)
3401 return false;
3402 str = fmt_str;
3405 /* If the string was "", printf does nothing. */
3406 if (str[0] == '\0')
3408 replace_call_with_value (gsi, NULL_TREE);
3409 return true;
3412 /* If the string has length of 1, call putchar. */
3413 if (str[1] == '\0')
3415 /* Given printf("c"), (where c is any one character,)
3416 convert "c"[0] to an int and pass that to the replacement
3417 function. */
3418 newarg = build_int_cst (integer_type_node, str[0]);
3419 if (fn_putchar)
3421 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3422 replace_call_with_call_and_fold (gsi, repl);
3423 return true;
3426 else
3428 /* If the string was "string\n", call puts("string"). */
3429 size_t len = strlen (str);
3430 if ((unsigned char)str[len - 1] == target_newline
3431 && (size_t) (int) len == len
3432 && (int) len > 0)
3434 char *newstr;
3435 tree offset_node, string_cst;
3437 /* Create a NUL-terminated string that's one char shorter
3438 than the original, stripping off the trailing '\n'. */
3439 newarg = build_string_literal (len, str);
3440 string_cst = string_constant (newarg, &offset_node);
3441 gcc_checking_assert (string_cst
3442 && (TREE_STRING_LENGTH (string_cst)
3443 == (int) len)
3444 && integer_zerop (offset_node)
3445 && (unsigned char)
3446 TREE_STRING_POINTER (string_cst)[len - 1]
3447 == target_newline);
3448 /* build_string_literal creates a new STRING_CST,
3449 modify it in place to avoid double copying. */
3450 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3451 newstr[len - 1] = '\0';
3452 if (fn_puts)
3454 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3455 replace_call_with_call_and_fold (gsi, repl);
3456 return true;
3459 else
3460 /* We'd like to arrange to call fputs(string,stdout) here,
3461 but we need stdout and don't have a way to get it yet. */
3462 return false;
3466 /* The other optimizations can be done only on the non-va_list variants. */
3467 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3468 return false;
3470 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3471 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3473 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3474 return false;
3475 if (fn_puts)
3477 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3478 replace_call_with_call_and_fold (gsi, repl);
3479 return true;
3483 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3484 else if (strcmp (fmt_str, target_percent_c) == 0)
3486 if (!arg || ! useless_type_conversion_p (integer_type_node,
3487 TREE_TYPE (arg)))
3488 return false;
3489 if (fn_putchar)
3491 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3492 replace_call_with_call_and_fold (gsi, repl);
3493 return true;
3497 return false;
3502 /* Fold a call to __builtin_strlen with known length LEN. */
3504 static bool
3505 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3507 gimple *stmt = gsi_stmt (*gsi);
3509 wide_int minlen;
3510 wide_int maxlen;
3512 tree lenrange[2];
3513 if (!get_range_strlen (gimple_call_arg (stmt, 0), lenrange, true)
3514 && lenrange[0] && TREE_CODE (lenrange[0]) == INTEGER_CST
3515 && lenrange[1] && TREE_CODE (lenrange[1]) == INTEGER_CST)
3517 /* The range of lengths refers to either a single constant
3518 string or to the longest and shortest constant string
3519 referenced by the argument of the strlen() call, or to
3520 the strings that can possibly be stored in the arrays
3521 the argument refers to. */
3522 minlen = wi::to_wide (lenrange[0]);
3523 maxlen = wi::to_wide (lenrange[1]);
3525 else
3527 unsigned prec = TYPE_PRECISION (sizetype);
3529 minlen = wi::shwi (0, prec);
3530 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3533 if (minlen == maxlen)
3535 lenrange[0] = force_gimple_operand_gsi (gsi, lenrange[0], true, NULL,
3536 true, GSI_SAME_STMT);
3537 replace_call_with_value (gsi, lenrange[0]);
3538 return true;
3541 tree lhs = gimple_call_lhs (stmt);
3542 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3543 set_range_info (lhs, VR_RANGE, minlen, maxlen);
3545 return false;
3548 /* Fold a call to __builtin_acc_on_device. */
3550 static bool
3551 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3553 /* Defer folding until we know which compiler we're in. */
3554 if (symtab->state != EXPANSION)
3555 return false;
3557 unsigned val_host = GOMP_DEVICE_HOST;
3558 unsigned val_dev = GOMP_DEVICE_NONE;
3560 #ifdef ACCEL_COMPILER
3561 val_host = GOMP_DEVICE_NOT_HOST;
3562 val_dev = ACCEL_COMPILER_acc_device;
3563 #endif
3565 location_t loc = gimple_location (gsi_stmt (*gsi));
3567 tree host_eq = make_ssa_name (boolean_type_node);
3568 gimple *host_ass = gimple_build_assign
3569 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3570 gimple_set_location (host_ass, loc);
3571 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3573 tree dev_eq = make_ssa_name (boolean_type_node);
3574 gimple *dev_ass = gimple_build_assign
3575 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3576 gimple_set_location (dev_ass, loc);
3577 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3579 tree result = make_ssa_name (boolean_type_node);
3580 gimple *result_ass = gimple_build_assign
3581 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3582 gimple_set_location (result_ass, loc);
3583 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3585 replace_call_with_value (gsi, result);
3587 return true;
3590 /* Fold realloc (0, n) -> malloc (n). */
3592 static bool
3593 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3595 gimple *stmt = gsi_stmt (*gsi);
3596 tree arg = gimple_call_arg (stmt, 0);
3597 tree size = gimple_call_arg (stmt, 1);
3599 if (operand_equal_p (arg, null_pointer_node, 0))
3601 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3602 if (fn_malloc)
3604 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3605 replace_call_with_call_and_fold (gsi, repl);
3606 return true;
3609 return false;
3612 /* Fold the non-target builtin at *GSI and return whether any simplification
3613 was made. */
3615 static bool
3616 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3618 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3619 tree callee = gimple_call_fndecl (stmt);
3621 /* Give up for always_inline inline builtins until they are
3622 inlined. */
3623 if (avoid_folding_inline_builtin (callee))
3624 return false;
3626 unsigned n = gimple_call_num_args (stmt);
3627 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3628 switch (fcode)
3630 case BUILT_IN_BCMP:
3631 return gimple_fold_builtin_bcmp (gsi);
3632 case BUILT_IN_BCOPY:
3633 return gimple_fold_builtin_bcopy (gsi);
3634 case BUILT_IN_BZERO:
3635 return gimple_fold_builtin_bzero (gsi);
3637 case BUILT_IN_MEMSET:
3638 return gimple_fold_builtin_memset (gsi,
3639 gimple_call_arg (stmt, 1),
3640 gimple_call_arg (stmt, 2));
3641 case BUILT_IN_MEMCPY:
3642 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3643 gimple_call_arg (stmt, 1), 0);
3644 case BUILT_IN_MEMPCPY:
3645 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3646 gimple_call_arg (stmt, 1), 1);
3647 case BUILT_IN_MEMMOVE:
3648 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3649 gimple_call_arg (stmt, 1), 3);
3650 case BUILT_IN_SPRINTF_CHK:
3651 case BUILT_IN_VSPRINTF_CHK:
3652 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3653 case BUILT_IN_STRCAT_CHK:
3654 return gimple_fold_builtin_strcat_chk (gsi);
3655 case BUILT_IN_STRNCAT_CHK:
3656 return gimple_fold_builtin_strncat_chk (gsi);
3657 case BUILT_IN_STRLEN:
3658 return gimple_fold_builtin_strlen (gsi);
3659 case BUILT_IN_STRCPY:
3660 return gimple_fold_builtin_strcpy (gsi,
3661 gimple_call_arg (stmt, 0),
3662 gimple_call_arg (stmt, 1));
3663 case BUILT_IN_STRNCPY:
3664 return gimple_fold_builtin_strncpy (gsi,
3665 gimple_call_arg (stmt, 0),
3666 gimple_call_arg (stmt, 1),
3667 gimple_call_arg (stmt, 2));
3668 case BUILT_IN_STRCAT:
3669 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3670 gimple_call_arg (stmt, 1));
3671 case BUILT_IN_STRNCAT:
3672 return gimple_fold_builtin_strncat (gsi);
3673 case BUILT_IN_INDEX:
3674 case BUILT_IN_STRCHR:
3675 return gimple_fold_builtin_strchr (gsi, false);
3676 case BUILT_IN_RINDEX:
3677 case BUILT_IN_STRRCHR:
3678 return gimple_fold_builtin_strchr (gsi, true);
3679 case BUILT_IN_STRSTR:
3680 return gimple_fold_builtin_strstr (gsi);
3681 case BUILT_IN_STRCMP:
3682 case BUILT_IN_STRCASECMP:
3683 case BUILT_IN_STRNCMP:
3684 case BUILT_IN_STRNCASECMP:
3685 return gimple_fold_builtin_string_compare (gsi);
3686 case BUILT_IN_MEMCHR:
3687 return gimple_fold_builtin_memchr (gsi);
3688 case BUILT_IN_FPUTS:
3689 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3690 gimple_call_arg (stmt, 1), false);
3691 case BUILT_IN_FPUTS_UNLOCKED:
3692 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3693 gimple_call_arg (stmt, 1), true);
3694 case BUILT_IN_MEMCPY_CHK:
3695 case BUILT_IN_MEMPCPY_CHK:
3696 case BUILT_IN_MEMMOVE_CHK:
3697 case BUILT_IN_MEMSET_CHK:
3698 return gimple_fold_builtin_memory_chk (gsi,
3699 gimple_call_arg (stmt, 0),
3700 gimple_call_arg (stmt, 1),
3701 gimple_call_arg (stmt, 2),
3702 gimple_call_arg (stmt, 3),
3703 fcode);
3704 case BUILT_IN_STPCPY:
3705 return gimple_fold_builtin_stpcpy (gsi);
3706 case BUILT_IN_STRCPY_CHK:
3707 case BUILT_IN_STPCPY_CHK:
3708 return gimple_fold_builtin_stxcpy_chk (gsi,
3709 gimple_call_arg (stmt, 0),
3710 gimple_call_arg (stmt, 1),
3711 gimple_call_arg (stmt, 2),
3712 fcode);
3713 case BUILT_IN_STRNCPY_CHK:
3714 case BUILT_IN_STPNCPY_CHK:
3715 return gimple_fold_builtin_stxncpy_chk (gsi,
3716 gimple_call_arg (stmt, 0),
3717 gimple_call_arg (stmt, 1),
3718 gimple_call_arg (stmt, 2),
3719 gimple_call_arg (stmt, 3),
3720 fcode);
3721 case BUILT_IN_SNPRINTF_CHK:
3722 case BUILT_IN_VSNPRINTF_CHK:
3723 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3725 case BUILT_IN_FPRINTF:
3726 case BUILT_IN_FPRINTF_UNLOCKED:
3727 case BUILT_IN_VFPRINTF:
3728 if (n == 2 || n == 3)
3729 return gimple_fold_builtin_fprintf (gsi,
3730 gimple_call_arg (stmt, 0),
3731 gimple_call_arg (stmt, 1),
3732 n == 3
3733 ? gimple_call_arg (stmt, 2)
3734 : NULL_TREE,
3735 fcode);
3736 break;
3737 case BUILT_IN_FPRINTF_CHK:
3738 case BUILT_IN_VFPRINTF_CHK:
3739 if (n == 3 || n == 4)
3740 return gimple_fold_builtin_fprintf (gsi,
3741 gimple_call_arg (stmt, 0),
3742 gimple_call_arg (stmt, 2),
3743 n == 4
3744 ? gimple_call_arg (stmt, 3)
3745 : NULL_TREE,
3746 fcode);
3747 break;
3748 case BUILT_IN_PRINTF:
3749 case BUILT_IN_PRINTF_UNLOCKED:
3750 case BUILT_IN_VPRINTF:
3751 if (n == 1 || n == 2)
3752 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3753 n == 2
3754 ? gimple_call_arg (stmt, 1)
3755 : NULL_TREE, fcode);
3756 break;
3757 case BUILT_IN_PRINTF_CHK:
3758 case BUILT_IN_VPRINTF_CHK:
3759 if (n == 2 || n == 3)
3760 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3761 n == 3
3762 ? gimple_call_arg (stmt, 2)
3763 : NULL_TREE, fcode);
3764 break;
3765 case BUILT_IN_ACC_ON_DEVICE:
3766 return gimple_fold_builtin_acc_on_device (gsi,
3767 gimple_call_arg (stmt, 0));
3768 case BUILT_IN_REALLOC:
3769 return gimple_fold_builtin_realloc (gsi);
3771 default:;
3774 /* Try the generic builtin folder. */
3775 bool ignore = (gimple_call_lhs (stmt) == NULL);
3776 tree result = fold_call_stmt (stmt, ignore);
3777 if (result)
3779 if (ignore)
3780 STRIP_NOPS (result);
3781 else
3782 result = fold_convert (gimple_call_return_type (stmt), result);
3783 if (!update_call_from_tree (gsi, result))
3784 gimplify_and_update_call_from_tree (gsi, result);
3785 return true;
3788 return false;
3791 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3792 function calls to constants, where possible. */
3794 static tree
3795 fold_internal_goacc_dim (const gimple *call)
3797 int axis = oacc_get_ifn_dim_arg (call);
3798 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3799 tree result = NULL_TREE;
3800 tree type = TREE_TYPE (gimple_call_lhs (call));
3802 switch (gimple_call_internal_fn (call))
3804 case IFN_GOACC_DIM_POS:
3805 /* If the size is 1, we know the answer. */
3806 if (size == 1)
3807 result = build_int_cst (type, 0);
3808 break;
3809 case IFN_GOACC_DIM_SIZE:
3810 /* If the size is not dynamic, we know the answer. */
3811 if (size)
3812 result = build_int_cst (type, size);
3813 break;
3814 default:
3815 break;
3818 return result;
3821 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3822 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3823 &var where var is only addressable because of such calls. */
3825 bool
3826 optimize_atomic_compare_exchange_p (gimple *stmt)
3828 if (gimple_call_num_args (stmt) != 6
3829 || !flag_inline_atomics
3830 || !optimize
3831 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3832 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3833 || !gimple_vdef (stmt)
3834 || !gimple_vuse (stmt))
3835 return false;
3837 tree fndecl = gimple_call_fndecl (stmt);
3838 switch (DECL_FUNCTION_CODE (fndecl))
3840 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3841 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3842 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3843 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3844 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3845 break;
3846 default:
3847 return false;
3850 tree expected = gimple_call_arg (stmt, 1);
3851 if (TREE_CODE (expected) != ADDR_EXPR
3852 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3853 return false;
3855 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3856 if (!is_gimple_reg_type (etype)
3857 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3858 || TREE_THIS_VOLATILE (etype)
3859 || VECTOR_TYPE_P (etype)
3860 || TREE_CODE (etype) == COMPLEX_TYPE
3861 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3862 might not preserve all the bits. See PR71716. */
3863 || SCALAR_FLOAT_TYPE_P (etype)
3864 || maybe_ne (TYPE_PRECISION (etype),
3865 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3866 return false;
3868 tree weak = gimple_call_arg (stmt, 3);
3869 if (!integer_zerop (weak) && !integer_onep (weak))
3870 return false;
3872 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3873 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3874 machine_mode mode = TYPE_MODE (itype);
3876 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3877 == CODE_FOR_nothing
3878 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3879 return false;
3881 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
3882 return false;
3884 return true;
3887 /* Fold
3888 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3889 into
3890 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3891 i = IMAGPART_EXPR <t>;
3892 r = (_Bool) i;
3893 e = REALPART_EXPR <t>; */
3895 void
3896 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3898 gimple *stmt = gsi_stmt (*gsi);
3899 tree fndecl = gimple_call_fndecl (stmt);
3900 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3901 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3902 tree ctype = build_complex_type (itype);
3903 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3904 bool throws = false;
3905 edge e = NULL;
3906 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3907 expected);
3908 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3909 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3910 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3912 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3913 build1 (VIEW_CONVERT_EXPR, itype,
3914 gimple_assign_lhs (g)));
3915 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3917 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3918 + int_size_in_bytes (itype);
3919 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3920 gimple_call_arg (stmt, 0),
3921 gimple_assign_lhs (g),
3922 gimple_call_arg (stmt, 2),
3923 build_int_cst (integer_type_node, flag),
3924 gimple_call_arg (stmt, 4),
3925 gimple_call_arg (stmt, 5));
3926 tree lhs = make_ssa_name (ctype);
3927 gimple_call_set_lhs (g, lhs);
3928 gimple_set_vdef (g, gimple_vdef (stmt));
3929 gimple_set_vuse (g, gimple_vuse (stmt));
3930 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3931 tree oldlhs = gimple_call_lhs (stmt);
3932 if (stmt_can_throw_internal (stmt))
3934 throws = true;
3935 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3937 gimple_call_set_nothrow (as_a <gcall *> (g),
3938 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3939 gimple_call_set_lhs (stmt, NULL_TREE);
3940 gsi_replace (gsi, g, true);
3941 if (oldlhs)
3943 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3944 build1 (IMAGPART_EXPR, itype, lhs));
3945 if (throws)
3947 gsi_insert_on_edge_immediate (e, g);
3948 *gsi = gsi_for_stmt (g);
3950 else
3951 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3952 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3953 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3955 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3956 build1 (REALPART_EXPR, itype, lhs));
3957 if (throws && oldlhs == NULL_TREE)
3959 gsi_insert_on_edge_immediate (e, g);
3960 *gsi = gsi_for_stmt (g);
3962 else
3963 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3964 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3966 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3967 VIEW_CONVERT_EXPR,
3968 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3969 gimple_assign_lhs (g)));
3970 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3972 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3973 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3974 *gsi = gsiret;
3977 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3978 doesn't fit into TYPE. The test for overflow should be regardless of
3979 -fwrapv, and even for unsigned types. */
3981 bool
3982 arith_overflowed_p (enum tree_code code, const_tree type,
3983 const_tree arg0, const_tree arg1)
3985 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3986 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3987 widest2_int_cst;
3988 widest2_int warg0 = widest2_int_cst (arg0);
3989 widest2_int warg1 = widest2_int_cst (arg1);
3990 widest2_int wres;
3991 switch (code)
3993 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3994 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3995 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3996 default: gcc_unreachable ();
3998 signop sign = TYPE_SIGN (type);
3999 if (sign == UNSIGNED && wi::neg_p (wres))
4000 return true;
4001 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4004 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4005 The statement may be replaced by another statement, e.g., if the call
4006 simplifies to a constant value. Return true if any changes were made.
4007 It is assumed that the operands have been previously folded. */
4009 static bool
4010 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4012 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4013 tree callee;
4014 bool changed = false;
4015 unsigned i;
4017 /* Fold *& in call arguments. */
4018 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4019 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4021 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4022 if (tmp)
4024 gimple_call_set_arg (stmt, i, tmp);
4025 changed = true;
4029 /* Check for virtual calls that became direct calls. */
4030 callee = gimple_call_fn (stmt);
4031 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4033 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4035 if (dump_file && virtual_method_call_p (callee)
4036 && !possible_polymorphic_call_target_p
4037 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4038 (OBJ_TYPE_REF_EXPR (callee)))))
4040 fprintf (dump_file,
4041 "Type inheritance inconsistent devirtualization of ");
4042 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4043 fprintf (dump_file, " to ");
4044 print_generic_expr (dump_file, callee, TDF_SLIM);
4045 fprintf (dump_file, "\n");
4048 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4049 changed = true;
4051 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4053 bool final;
4054 vec <cgraph_node *>targets
4055 = possible_polymorphic_call_targets (callee, stmt, &final);
4056 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4058 tree lhs = gimple_call_lhs (stmt);
4059 if (dump_enabled_p ())
4061 location_t loc = gimple_location_safe (stmt);
4062 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
4063 "folding virtual function call to %s\n",
4064 targets.length () == 1
4065 ? targets[0]->name ()
4066 : "__builtin_unreachable");
4068 if (targets.length () == 1)
4070 tree fndecl = targets[0]->decl;
4071 gimple_call_set_fndecl (stmt, fndecl);
4072 changed = true;
4073 /* If changing the call to __cxa_pure_virtual
4074 or similar noreturn function, adjust gimple_call_fntype
4075 too. */
4076 if (gimple_call_noreturn_p (stmt)
4077 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4078 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4079 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4080 == void_type_node))
4081 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4082 /* If the call becomes noreturn, remove the lhs. */
4083 if (lhs
4084 && gimple_call_noreturn_p (stmt)
4085 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4086 || should_remove_lhs_p (lhs)))
4088 if (TREE_CODE (lhs) == SSA_NAME)
4090 tree var = create_tmp_var (TREE_TYPE (lhs));
4091 tree def = get_or_create_ssa_default_def (cfun, var);
4092 gimple *new_stmt = gimple_build_assign (lhs, def);
4093 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4095 gimple_call_set_lhs (stmt, NULL_TREE);
4097 maybe_remove_unused_call_args (cfun, stmt);
4099 else
4101 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4102 gimple *new_stmt = gimple_build_call (fndecl, 0);
4103 gimple_set_location (new_stmt, gimple_location (stmt));
4104 /* If the call had a SSA name as lhs morph that into
4105 an uninitialized value. */
4106 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4108 tree var = create_tmp_var (TREE_TYPE (lhs));
4109 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4110 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4111 set_ssa_default_def (cfun, var, lhs);
4113 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4114 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4115 gsi_replace (gsi, new_stmt, false);
4116 return true;
4122 /* Check for indirect calls that became direct calls, and then
4123 no longer require a static chain. */
4124 if (gimple_call_chain (stmt))
4126 tree fn = gimple_call_fndecl (stmt);
4127 if (fn && !DECL_STATIC_CHAIN (fn))
4129 gimple_call_set_chain (stmt, NULL);
4130 changed = true;
4132 else
4134 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4135 if (tmp)
4137 gimple_call_set_chain (stmt, tmp);
4138 changed = true;
4143 if (inplace)
4144 return changed;
4146 /* Check for builtins that CCP can handle using information not
4147 available in the generic fold routines. */
4148 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4150 if (gimple_fold_builtin (gsi))
4151 changed = true;
4153 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4155 changed |= targetm.gimple_fold_builtin (gsi);
4157 else if (gimple_call_internal_p (stmt))
4159 enum tree_code subcode = ERROR_MARK;
4160 tree result = NULL_TREE;
4161 bool cplx_result = false;
4162 tree overflow = NULL_TREE;
4163 switch (gimple_call_internal_fn (stmt))
4165 case IFN_BUILTIN_EXPECT:
4166 result = fold_builtin_expect (gimple_location (stmt),
4167 gimple_call_arg (stmt, 0),
4168 gimple_call_arg (stmt, 1),
4169 gimple_call_arg (stmt, 2));
4170 break;
4171 case IFN_UBSAN_OBJECT_SIZE:
4173 tree offset = gimple_call_arg (stmt, 1);
4174 tree objsize = gimple_call_arg (stmt, 2);
4175 if (integer_all_onesp (objsize)
4176 || (TREE_CODE (offset) == INTEGER_CST
4177 && TREE_CODE (objsize) == INTEGER_CST
4178 && tree_int_cst_le (offset, objsize)))
4180 replace_call_with_value (gsi, NULL_TREE);
4181 return true;
4184 break;
4185 case IFN_UBSAN_PTR:
4186 if (integer_zerop (gimple_call_arg (stmt, 1)))
4188 replace_call_with_value (gsi, NULL_TREE);
4189 return true;
4191 break;
4192 case IFN_UBSAN_BOUNDS:
4194 tree index = gimple_call_arg (stmt, 1);
4195 tree bound = gimple_call_arg (stmt, 2);
4196 if (TREE_CODE (index) == INTEGER_CST
4197 && TREE_CODE (bound) == INTEGER_CST)
4199 index = fold_convert (TREE_TYPE (bound), index);
4200 if (TREE_CODE (index) == INTEGER_CST
4201 && tree_int_cst_le (index, bound))
4203 replace_call_with_value (gsi, NULL_TREE);
4204 return true;
4208 break;
4209 case IFN_GOACC_DIM_SIZE:
4210 case IFN_GOACC_DIM_POS:
4211 result = fold_internal_goacc_dim (stmt);
4212 break;
4213 case IFN_UBSAN_CHECK_ADD:
4214 subcode = PLUS_EXPR;
4215 break;
4216 case IFN_UBSAN_CHECK_SUB:
4217 subcode = MINUS_EXPR;
4218 break;
4219 case IFN_UBSAN_CHECK_MUL:
4220 subcode = MULT_EXPR;
4221 break;
4222 case IFN_ADD_OVERFLOW:
4223 subcode = PLUS_EXPR;
4224 cplx_result = true;
4225 break;
4226 case IFN_SUB_OVERFLOW:
4227 subcode = MINUS_EXPR;
4228 cplx_result = true;
4229 break;
4230 case IFN_MUL_OVERFLOW:
4231 subcode = MULT_EXPR;
4232 cplx_result = true;
4233 break;
4234 default:
4235 break;
4237 if (subcode != ERROR_MARK)
4239 tree arg0 = gimple_call_arg (stmt, 0);
4240 tree arg1 = gimple_call_arg (stmt, 1);
4241 tree type = TREE_TYPE (arg0);
4242 if (cplx_result)
4244 tree lhs = gimple_call_lhs (stmt);
4245 if (lhs == NULL_TREE)
4246 type = NULL_TREE;
4247 else
4248 type = TREE_TYPE (TREE_TYPE (lhs));
4250 if (type == NULL_TREE)
4252 /* x = y + 0; x = y - 0; x = y * 0; */
4253 else if (integer_zerop (arg1))
4254 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4255 /* x = 0 + y; x = 0 * y; */
4256 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4257 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4258 /* x = y - y; */
4259 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4260 result = integer_zero_node;
4261 /* x = y * 1; x = 1 * y; */
4262 else if (subcode == MULT_EXPR && integer_onep (arg1))
4263 result = arg0;
4264 else if (subcode == MULT_EXPR && integer_onep (arg0))
4265 result = arg1;
4266 else if (TREE_CODE (arg0) == INTEGER_CST
4267 && TREE_CODE (arg1) == INTEGER_CST)
4269 if (cplx_result)
4270 result = int_const_binop (subcode, fold_convert (type, arg0),
4271 fold_convert (type, arg1));
4272 else
4273 result = int_const_binop (subcode, arg0, arg1);
4274 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4276 if (cplx_result)
4277 overflow = build_one_cst (type);
4278 else
4279 result = NULL_TREE;
4282 if (result)
4284 if (result == integer_zero_node)
4285 result = build_zero_cst (type);
4286 else if (cplx_result && TREE_TYPE (result) != type)
4288 if (TREE_CODE (result) == INTEGER_CST)
4290 if (arith_overflowed_p (PLUS_EXPR, type, result,
4291 integer_zero_node))
4292 overflow = build_one_cst (type);
4294 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4295 && TYPE_UNSIGNED (type))
4296 || (TYPE_PRECISION (type)
4297 < (TYPE_PRECISION (TREE_TYPE (result))
4298 + (TYPE_UNSIGNED (TREE_TYPE (result))
4299 && !TYPE_UNSIGNED (type)))))
4300 result = NULL_TREE;
4301 if (result)
4302 result = fold_convert (type, result);
4307 if (result)
4309 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4310 result = drop_tree_overflow (result);
4311 if (cplx_result)
4313 if (overflow == NULL_TREE)
4314 overflow = build_zero_cst (TREE_TYPE (result));
4315 tree ctype = build_complex_type (TREE_TYPE (result));
4316 if (TREE_CODE (result) == INTEGER_CST
4317 && TREE_CODE (overflow) == INTEGER_CST)
4318 result = build_complex (ctype, result, overflow);
4319 else
4320 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4321 ctype, result, overflow);
4323 if (!update_call_from_tree (gsi, result))
4324 gimplify_and_update_call_from_tree (gsi, result);
4325 changed = true;
4329 return changed;
4333 /* Return true whether NAME has a use on STMT. */
4335 static bool
4336 has_use_on_stmt (tree name, gimple *stmt)
4338 imm_use_iterator iter;
4339 use_operand_p use_p;
4340 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4341 if (USE_STMT (use_p) == stmt)
4342 return true;
4343 return false;
4346 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4347 gimple_simplify.
4349 Replaces *GSI with the simplification result in RCODE and OPS
4350 and the associated statements in *SEQ. Does the replacement
4351 according to INPLACE and returns true if the operation succeeded. */
4353 static bool
4354 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4355 code_helper rcode, tree *ops,
4356 gimple_seq *seq, bool inplace)
4358 gimple *stmt = gsi_stmt (*gsi);
4360 /* Play safe and do not allow abnormals to be mentioned in
4361 newly created statements. See also maybe_push_res_to_seq.
4362 As an exception allow such uses if there was a use of the
4363 same SSA name on the old stmt. */
4364 if ((TREE_CODE (ops[0]) == SSA_NAME
4365 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
4366 && !has_use_on_stmt (ops[0], stmt))
4367 || (ops[1]
4368 && TREE_CODE (ops[1]) == SSA_NAME
4369 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
4370 && !has_use_on_stmt (ops[1], stmt))
4371 || (ops[2]
4372 && TREE_CODE (ops[2]) == SSA_NAME
4373 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
4374 && !has_use_on_stmt (ops[2], stmt))
4375 || (COMPARISON_CLASS_P (ops[0])
4376 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
4377 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
4378 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
4379 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
4380 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
4381 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
4382 return false;
4384 /* Don't insert new statements when INPLACE is true, even if we could
4385 reuse STMT for the final statement. */
4386 if (inplace && !gimple_seq_empty_p (*seq))
4387 return false;
4389 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4391 gcc_assert (rcode.is_tree_code ());
4392 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
4393 /* GIMPLE_CONDs condition may not throw. */
4394 && (!flag_exceptions
4395 || !cfun->can_throw_non_call_exceptions
4396 || !operation_could_trap_p (rcode,
4397 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4398 false, NULL_TREE)))
4399 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
4400 else if (rcode == SSA_NAME)
4401 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4402 build_zero_cst (TREE_TYPE (ops[0])));
4403 else if (rcode == INTEGER_CST)
4405 if (integer_zerop (ops[0]))
4406 gimple_cond_make_false (cond_stmt);
4407 else
4408 gimple_cond_make_true (cond_stmt);
4410 else if (!inplace)
4412 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
4413 ops, seq);
4414 if (!res)
4415 return false;
4416 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4417 build_zero_cst (TREE_TYPE (res)));
4419 else
4420 return false;
4421 if (dump_file && (dump_flags & TDF_DETAILS))
4423 fprintf (dump_file, "gimple_simplified to ");
4424 if (!gimple_seq_empty_p (*seq))
4425 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4426 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4427 0, TDF_SLIM);
4429 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4430 return true;
4432 else if (is_gimple_assign (stmt)
4433 && rcode.is_tree_code ())
4435 if (!inplace
4436 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
4438 maybe_build_generic_op (rcode,
4439 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
4440 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
4441 if (dump_file && (dump_flags & TDF_DETAILS))
4443 fprintf (dump_file, "gimple_simplified to ");
4444 if (!gimple_seq_empty_p (*seq))
4445 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4446 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4447 0, TDF_SLIM);
4449 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4450 return true;
4453 else if (rcode.is_fn_code ()
4454 && gimple_call_combined_fn (stmt) == rcode)
4456 unsigned i;
4457 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4459 gcc_assert (ops[i] != NULL_TREE);
4460 gimple_call_set_arg (stmt, i, ops[i]);
4462 if (i < 3)
4463 gcc_assert (ops[i] == NULL_TREE);
4464 if (dump_file && (dump_flags & TDF_DETAILS))
4466 fprintf (dump_file, "gimple_simplified to ");
4467 if (!gimple_seq_empty_p (*seq))
4468 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4469 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4471 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4472 return true;
4474 else if (!inplace)
4476 if (gimple_has_lhs (stmt))
4478 tree lhs = gimple_get_lhs (stmt);
4479 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
4480 ops, seq, lhs))
4481 return false;
4482 if (dump_file && (dump_flags & TDF_DETAILS))
4484 fprintf (dump_file, "gimple_simplified to ");
4485 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4487 gsi_replace_with_seq_vops (gsi, *seq);
4488 return true;
4490 else
4491 gcc_unreachable ();
4494 return false;
4497 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4499 static bool
4500 maybe_canonicalize_mem_ref_addr (tree *t)
4502 bool res = false;
4504 if (TREE_CODE (*t) == ADDR_EXPR)
4505 t = &TREE_OPERAND (*t, 0);
4507 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4508 generic vector extension. The actual vector referenced is
4509 view-converted to an array type for this purpose. If the index
4510 is constant the canonical representation in the middle-end is a
4511 BIT_FIELD_REF so re-write the former to the latter here. */
4512 if (TREE_CODE (*t) == ARRAY_REF
4513 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4514 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4515 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4517 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4518 if (VECTOR_TYPE_P (vtype))
4520 tree low = array_ref_low_bound (*t);
4521 if (TREE_CODE (low) == INTEGER_CST)
4523 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4525 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4526 wi::to_widest (low));
4527 idx = wi::mul (idx, wi::to_widest
4528 (TYPE_SIZE (TREE_TYPE (*t))));
4529 widest_int ext
4530 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4531 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4533 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4534 TREE_TYPE (*t),
4535 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4536 TYPE_SIZE (TREE_TYPE (*t)),
4537 wide_int_to_tree (bitsizetype, idx));
4538 res = true;
4545 while (handled_component_p (*t))
4546 t = &TREE_OPERAND (*t, 0);
4548 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4549 of invariant addresses into a SSA name MEM_REF address. */
4550 if (TREE_CODE (*t) == MEM_REF
4551 || TREE_CODE (*t) == TARGET_MEM_REF)
4553 tree addr = TREE_OPERAND (*t, 0);
4554 if (TREE_CODE (addr) == ADDR_EXPR
4555 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4556 || handled_component_p (TREE_OPERAND (addr, 0))))
4558 tree base;
4559 poly_int64 coffset;
4560 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4561 &coffset);
4562 if (!base)
4563 gcc_unreachable ();
4565 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4566 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4567 TREE_OPERAND (*t, 1),
4568 size_int (coffset));
4569 res = true;
4571 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4572 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4575 /* Canonicalize back MEM_REFs to plain reference trees if the object
4576 accessed is a decl that has the same access semantics as the MEM_REF. */
4577 if (TREE_CODE (*t) == MEM_REF
4578 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4579 && integer_zerop (TREE_OPERAND (*t, 1))
4580 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4582 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4583 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4584 if (/* Same volatile qualification. */
4585 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4586 /* Same TBAA behavior with -fstrict-aliasing. */
4587 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4588 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4589 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4590 /* Same alignment. */
4591 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4592 /* We have to look out here to not drop a required conversion
4593 from the rhs to the lhs if *t appears on the lhs or vice-versa
4594 if it appears on the rhs. Thus require strict type
4595 compatibility. */
4596 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4598 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4599 res = true;
4603 /* Canonicalize TARGET_MEM_REF in particular with respect to
4604 the indexes becoming constant. */
4605 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4607 tree tem = maybe_fold_tmr (*t);
4608 if (tem)
4610 *t = tem;
4611 res = true;
4615 return res;
4618 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4619 distinguishes both cases. */
4621 static bool
4622 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4624 bool changed = false;
4625 gimple *stmt = gsi_stmt (*gsi);
4626 bool nowarning = gimple_no_warning_p (stmt);
4627 unsigned i;
4628 fold_defer_overflow_warnings ();
4630 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4631 after propagation.
4632 ??? This shouldn't be done in generic folding but in the
4633 propagation helpers which also know whether an address was
4634 propagated.
4635 Also canonicalize operand order. */
4636 switch (gimple_code (stmt))
4638 case GIMPLE_ASSIGN:
4639 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4641 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4642 if ((REFERENCE_CLASS_P (*rhs)
4643 || TREE_CODE (*rhs) == ADDR_EXPR)
4644 && maybe_canonicalize_mem_ref_addr (rhs))
4645 changed = true;
4646 tree *lhs = gimple_assign_lhs_ptr (stmt);
4647 if (REFERENCE_CLASS_P (*lhs)
4648 && maybe_canonicalize_mem_ref_addr (lhs))
4649 changed = true;
4651 else
4653 /* Canonicalize operand order. */
4654 enum tree_code code = gimple_assign_rhs_code (stmt);
4655 if (TREE_CODE_CLASS (code) == tcc_comparison
4656 || commutative_tree_code (code)
4657 || commutative_ternary_tree_code (code))
4659 tree rhs1 = gimple_assign_rhs1 (stmt);
4660 tree rhs2 = gimple_assign_rhs2 (stmt);
4661 if (tree_swap_operands_p (rhs1, rhs2))
4663 gimple_assign_set_rhs1 (stmt, rhs2);
4664 gimple_assign_set_rhs2 (stmt, rhs1);
4665 if (TREE_CODE_CLASS (code) == tcc_comparison)
4666 gimple_assign_set_rhs_code (stmt,
4667 swap_tree_comparison (code));
4668 changed = true;
4672 break;
4673 case GIMPLE_CALL:
4675 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4677 tree *arg = gimple_call_arg_ptr (stmt, i);
4678 if (REFERENCE_CLASS_P (*arg)
4679 && maybe_canonicalize_mem_ref_addr (arg))
4680 changed = true;
4682 tree *lhs = gimple_call_lhs_ptr (stmt);
4683 if (*lhs
4684 && REFERENCE_CLASS_P (*lhs)
4685 && maybe_canonicalize_mem_ref_addr (lhs))
4686 changed = true;
4687 break;
4689 case GIMPLE_ASM:
4691 gasm *asm_stmt = as_a <gasm *> (stmt);
4692 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4694 tree link = gimple_asm_output_op (asm_stmt, i);
4695 tree op = TREE_VALUE (link);
4696 if (REFERENCE_CLASS_P (op)
4697 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4698 changed = true;
4700 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4702 tree link = gimple_asm_input_op (asm_stmt, i);
4703 tree op = TREE_VALUE (link);
4704 if ((REFERENCE_CLASS_P (op)
4705 || TREE_CODE (op) == ADDR_EXPR)
4706 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4707 changed = true;
4710 break;
4711 case GIMPLE_DEBUG:
4712 if (gimple_debug_bind_p (stmt))
4714 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4715 if (*val
4716 && (REFERENCE_CLASS_P (*val)
4717 || TREE_CODE (*val) == ADDR_EXPR)
4718 && maybe_canonicalize_mem_ref_addr (val))
4719 changed = true;
4721 break;
4722 case GIMPLE_COND:
4724 /* Canonicalize operand order. */
4725 tree lhs = gimple_cond_lhs (stmt);
4726 tree rhs = gimple_cond_rhs (stmt);
4727 if (tree_swap_operands_p (lhs, rhs))
4729 gcond *gc = as_a <gcond *> (stmt);
4730 gimple_cond_set_lhs (gc, rhs);
4731 gimple_cond_set_rhs (gc, lhs);
4732 gimple_cond_set_code (gc,
4733 swap_tree_comparison (gimple_cond_code (gc)));
4734 changed = true;
4737 default:;
4740 /* Dispatch to pattern-based folding. */
4741 if (!inplace
4742 || is_gimple_assign (stmt)
4743 || gimple_code (stmt) == GIMPLE_COND)
4745 gimple_seq seq = NULL;
4746 code_helper rcode;
4747 tree ops[3] = {};
4748 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4749 valueize, valueize))
4751 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4752 changed = true;
4753 else
4754 gimple_seq_discard (seq);
4758 stmt = gsi_stmt (*gsi);
4760 /* Fold the main computation performed by the statement. */
4761 switch (gimple_code (stmt))
4763 case GIMPLE_ASSIGN:
4765 /* Try to canonicalize for boolean-typed X the comparisons
4766 X == 0, X == 1, X != 0, and X != 1. */
4767 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4768 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4770 tree lhs = gimple_assign_lhs (stmt);
4771 tree op1 = gimple_assign_rhs1 (stmt);
4772 tree op2 = gimple_assign_rhs2 (stmt);
4773 tree type = TREE_TYPE (op1);
4775 /* Check whether the comparison operands are of the same boolean
4776 type as the result type is.
4777 Check that second operand is an integer-constant with value
4778 one or zero. */
4779 if (TREE_CODE (op2) == INTEGER_CST
4780 && (integer_zerop (op2) || integer_onep (op2))
4781 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4783 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4784 bool is_logical_not = false;
4786 /* X == 0 and X != 1 is a logical-not.of X
4787 X == 1 and X != 0 is X */
4788 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4789 || (cmp_code == NE_EXPR && integer_onep (op2)))
4790 is_logical_not = true;
4792 if (is_logical_not == false)
4793 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4794 /* Only for one-bit precision typed X the transformation
4795 !X -> ~X is valied. */
4796 else if (TYPE_PRECISION (type) == 1)
4797 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4798 /* Otherwise we use !X -> X ^ 1. */
4799 else
4800 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4801 build_int_cst (type, 1));
4802 changed = true;
4803 break;
4807 unsigned old_num_ops = gimple_num_ops (stmt);
4808 tree lhs = gimple_assign_lhs (stmt);
4809 tree new_rhs = fold_gimple_assign (gsi);
4810 if (new_rhs
4811 && !useless_type_conversion_p (TREE_TYPE (lhs),
4812 TREE_TYPE (new_rhs)))
4813 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4814 if (new_rhs
4815 && (!inplace
4816 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4818 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4819 changed = true;
4821 break;
4824 case GIMPLE_CALL:
4825 changed |= gimple_fold_call (gsi, inplace);
4826 break;
4828 case GIMPLE_ASM:
4829 /* Fold *& in asm operands. */
4831 gasm *asm_stmt = as_a <gasm *> (stmt);
4832 size_t noutputs;
4833 const char **oconstraints;
4834 const char *constraint;
4835 bool allows_mem, allows_reg;
4837 noutputs = gimple_asm_noutputs (asm_stmt);
4838 oconstraints = XALLOCAVEC (const char *, noutputs);
4840 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4842 tree link = gimple_asm_output_op (asm_stmt, i);
4843 tree op = TREE_VALUE (link);
4844 oconstraints[i]
4845 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4846 if (REFERENCE_CLASS_P (op)
4847 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4849 TREE_VALUE (link) = op;
4850 changed = true;
4853 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4855 tree link = gimple_asm_input_op (asm_stmt, i);
4856 tree op = TREE_VALUE (link);
4857 constraint
4858 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4859 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4860 oconstraints, &allows_mem, &allows_reg);
4861 if (REFERENCE_CLASS_P (op)
4862 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4863 != NULL_TREE)
4865 TREE_VALUE (link) = op;
4866 changed = true;
4870 break;
4872 case GIMPLE_DEBUG:
4873 if (gimple_debug_bind_p (stmt))
4875 tree val = gimple_debug_bind_get_value (stmt);
4876 if (val
4877 && REFERENCE_CLASS_P (val))
4879 tree tem = maybe_fold_reference (val, false);
4880 if (tem)
4882 gimple_debug_bind_set_value (stmt, tem);
4883 changed = true;
4886 else if (val
4887 && TREE_CODE (val) == ADDR_EXPR)
4889 tree ref = TREE_OPERAND (val, 0);
4890 tree tem = maybe_fold_reference (ref, false);
4891 if (tem)
4893 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4894 gimple_debug_bind_set_value (stmt, tem);
4895 changed = true;
4899 break;
4901 case GIMPLE_RETURN:
4903 greturn *ret_stmt = as_a<greturn *> (stmt);
4904 tree ret = gimple_return_retval(ret_stmt);
4906 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4908 tree val = valueize (ret);
4909 if (val && val != ret
4910 && may_propagate_copy (ret, val))
4912 gimple_return_set_retval (ret_stmt, val);
4913 changed = true;
4917 break;
4919 default:;
4922 stmt = gsi_stmt (*gsi);
4924 /* Fold *& on the lhs. */
4925 if (gimple_has_lhs (stmt))
4927 tree lhs = gimple_get_lhs (stmt);
4928 if (lhs && REFERENCE_CLASS_P (lhs))
4930 tree new_lhs = maybe_fold_reference (lhs, true);
4931 if (new_lhs)
4933 gimple_set_lhs (stmt, new_lhs);
4934 changed = true;
4939 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4940 return changed;
4943 /* Valueziation callback that ends up not following SSA edges. */
4945 tree
4946 no_follow_ssa_edges (tree)
4948 return NULL_TREE;
4951 /* Valueization callback that ends up following single-use SSA edges only. */
4953 tree
4954 follow_single_use_edges (tree val)
4956 if (TREE_CODE (val) == SSA_NAME
4957 && !has_single_use (val))
4958 return NULL_TREE;
4959 return val;
4962 /* Fold the statement pointed to by GSI. In some cases, this function may
4963 replace the whole statement with a new one. Returns true iff folding
4964 makes any changes.
4965 The statement pointed to by GSI should be in valid gimple form but may
4966 be in unfolded state as resulting from for example constant propagation
4967 which can produce *&x = 0. */
4969 bool
4970 fold_stmt (gimple_stmt_iterator *gsi)
4972 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4975 bool
4976 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4978 return fold_stmt_1 (gsi, false, valueize);
4981 /* Perform the minimal folding on statement *GSI. Only operations like
4982 *&x created by constant propagation are handled. The statement cannot
4983 be replaced with a new one. Return true if the statement was
4984 changed, false otherwise.
4985 The statement *GSI should be in valid gimple form but may
4986 be in unfolded state as resulting from for example constant propagation
4987 which can produce *&x = 0. */
4989 bool
4990 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4992 gimple *stmt = gsi_stmt (*gsi);
4993 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4994 gcc_assert (gsi_stmt (*gsi) == stmt);
4995 return changed;
4998 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4999 if EXPR is null or we don't know how.
5000 If non-null, the result always has boolean type. */
5002 static tree
5003 canonicalize_bool (tree expr, bool invert)
5005 if (!expr)
5006 return NULL_TREE;
5007 else if (invert)
5009 if (integer_nonzerop (expr))
5010 return boolean_false_node;
5011 else if (integer_zerop (expr))
5012 return boolean_true_node;
5013 else if (TREE_CODE (expr) == SSA_NAME)
5014 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5015 build_int_cst (TREE_TYPE (expr), 0));
5016 else if (COMPARISON_CLASS_P (expr))
5017 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5018 boolean_type_node,
5019 TREE_OPERAND (expr, 0),
5020 TREE_OPERAND (expr, 1));
5021 else
5022 return NULL_TREE;
5024 else
5026 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5027 return expr;
5028 if (integer_nonzerop (expr))
5029 return boolean_true_node;
5030 else if (integer_zerop (expr))
5031 return boolean_false_node;
5032 else if (TREE_CODE (expr) == SSA_NAME)
5033 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5034 build_int_cst (TREE_TYPE (expr), 0));
5035 else if (COMPARISON_CLASS_P (expr))
5036 return fold_build2 (TREE_CODE (expr),
5037 boolean_type_node,
5038 TREE_OPERAND (expr, 0),
5039 TREE_OPERAND (expr, 1));
5040 else
5041 return NULL_TREE;
5045 /* Check to see if a boolean expression EXPR is logically equivalent to the
5046 comparison (OP1 CODE OP2). Check for various identities involving
5047 SSA_NAMEs. */
5049 static bool
5050 same_bool_comparison_p (const_tree expr, enum tree_code code,
5051 const_tree op1, const_tree op2)
5053 gimple *s;
5055 /* The obvious case. */
5056 if (TREE_CODE (expr) == code
5057 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5058 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5059 return true;
5061 /* Check for comparing (name, name != 0) and the case where expr
5062 is an SSA_NAME with a definition matching the comparison. */
5063 if (TREE_CODE (expr) == SSA_NAME
5064 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5066 if (operand_equal_p (expr, op1, 0))
5067 return ((code == NE_EXPR && integer_zerop (op2))
5068 || (code == EQ_EXPR && integer_nonzerop (op2)));
5069 s = SSA_NAME_DEF_STMT (expr);
5070 if (is_gimple_assign (s)
5071 && gimple_assign_rhs_code (s) == code
5072 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5073 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5074 return true;
5077 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5078 of name is a comparison, recurse. */
5079 if (TREE_CODE (op1) == SSA_NAME
5080 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5082 s = SSA_NAME_DEF_STMT (op1);
5083 if (is_gimple_assign (s)
5084 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5086 enum tree_code c = gimple_assign_rhs_code (s);
5087 if ((c == NE_EXPR && integer_zerop (op2))
5088 || (c == EQ_EXPR && integer_nonzerop (op2)))
5089 return same_bool_comparison_p (expr, c,
5090 gimple_assign_rhs1 (s),
5091 gimple_assign_rhs2 (s));
5092 if ((c == EQ_EXPR && integer_zerop (op2))
5093 || (c == NE_EXPR && integer_nonzerop (op2)))
5094 return same_bool_comparison_p (expr,
5095 invert_tree_comparison (c, false),
5096 gimple_assign_rhs1 (s),
5097 gimple_assign_rhs2 (s));
5100 return false;
5103 /* Check to see if two boolean expressions OP1 and OP2 are logically
5104 equivalent. */
5106 static bool
5107 same_bool_result_p (const_tree op1, const_tree op2)
5109 /* Simple cases first. */
5110 if (operand_equal_p (op1, op2, 0))
5111 return true;
5113 /* Check the cases where at least one of the operands is a comparison.
5114 These are a bit smarter than operand_equal_p in that they apply some
5115 identifies on SSA_NAMEs. */
5116 if (COMPARISON_CLASS_P (op2)
5117 && same_bool_comparison_p (op1, TREE_CODE (op2),
5118 TREE_OPERAND (op2, 0),
5119 TREE_OPERAND (op2, 1)))
5120 return true;
5121 if (COMPARISON_CLASS_P (op1)
5122 && same_bool_comparison_p (op2, TREE_CODE (op1),
5123 TREE_OPERAND (op1, 0),
5124 TREE_OPERAND (op1, 1)))
5125 return true;
5127 /* Default case. */
5128 return false;
5131 /* Forward declarations for some mutually recursive functions. */
5133 static tree
5134 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5135 enum tree_code code2, tree op2a, tree op2b);
5136 static tree
5137 and_var_with_comparison (tree var, bool invert,
5138 enum tree_code code2, tree op2a, tree op2b);
5139 static tree
5140 and_var_with_comparison_1 (gimple *stmt,
5141 enum tree_code code2, tree op2a, tree op2b);
5142 static tree
5143 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5144 enum tree_code code2, tree op2a, tree op2b);
5145 static tree
5146 or_var_with_comparison (tree var, bool invert,
5147 enum tree_code code2, tree op2a, tree op2b);
5148 static tree
5149 or_var_with_comparison_1 (gimple *stmt,
5150 enum tree_code code2, tree op2a, tree op2b);
5152 /* Helper function for and_comparisons_1: try to simplify the AND of the
5153 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5154 If INVERT is true, invert the value of the VAR before doing the AND.
5155 Return NULL_EXPR if we can't simplify this to a single expression. */
5157 static tree
5158 and_var_with_comparison (tree var, bool invert,
5159 enum tree_code code2, tree op2a, tree op2b)
5161 tree t;
5162 gimple *stmt = SSA_NAME_DEF_STMT (var);
5164 /* We can only deal with variables whose definitions are assignments. */
5165 if (!is_gimple_assign (stmt))
5166 return NULL_TREE;
5168 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5169 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5170 Then we only have to consider the simpler non-inverted cases. */
5171 if (invert)
5172 t = or_var_with_comparison_1 (stmt,
5173 invert_tree_comparison (code2, false),
5174 op2a, op2b);
5175 else
5176 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5177 return canonicalize_bool (t, invert);
5180 /* Try to simplify the AND of the ssa variable defined by the assignment
5181 STMT with the comparison specified by (OP2A CODE2 OP2B).
5182 Return NULL_EXPR if we can't simplify this to a single expression. */
5184 static tree
5185 and_var_with_comparison_1 (gimple *stmt,
5186 enum tree_code code2, tree op2a, tree op2b)
5188 tree var = gimple_assign_lhs (stmt);
5189 tree true_test_var = NULL_TREE;
5190 tree false_test_var = NULL_TREE;
5191 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5193 /* Check for identities like (var AND (var == 0)) => false. */
5194 if (TREE_CODE (op2a) == SSA_NAME
5195 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5197 if ((code2 == NE_EXPR && integer_zerop (op2b))
5198 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5200 true_test_var = op2a;
5201 if (var == true_test_var)
5202 return var;
5204 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5205 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5207 false_test_var = op2a;
5208 if (var == false_test_var)
5209 return boolean_false_node;
5213 /* If the definition is a comparison, recurse on it. */
5214 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5216 tree t = and_comparisons_1 (innercode,
5217 gimple_assign_rhs1 (stmt),
5218 gimple_assign_rhs2 (stmt),
5219 code2,
5220 op2a,
5221 op2b);
5222 if (t)
5223 return t;
5226 /* If the definition is an AND or OR expression, we may be able to
5227 simplify by reassociating. */
5228 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5229 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5231 tree inner1 = gimple_assign_rhs1 (stmt);
5232 tree inner2 = gimple_assign_rhs2 (stmt);
5233 gimple *s;
5234 tree t;
5235 tree partial = NULL_TREE;
5236 bool is_and = (innercode == BIT_AND_EXPR);
5238 /* Check for boolean identities that don't require recursive examination
5239 of inner1/inner2:
5240 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5241 inner1 AND (inner1 OR inner2) => inner1
5242 !inner1 AND (inner1 AND inner2) => false
5243 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5244 Likewise for similar cases involving inner2. */
5245 if (inner1 == true_test_var)
5246 return (is_and ? var : inner1);
5247 else if (inner2 == true_test_var)
5248 return (is_and ? var : inner2);
5249 else if (inner1 == false_test_var)
5250 return (is_and
5251 ? boolean_false_node
5252 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5253 else if (inner2 == false_test_var)
5254 return (is_and
5255 ? boolean_false_node
5256 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5258 /* Next, redistribute/reassociate the AND across the inner tests.
5259 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5260 if (TREE_CODE (inner1) == SSA_NAME
5261 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5262 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5263 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5264 gimple_assign_rhs1 (s),
5265 gimple_assign_rhs2 (s),
5266 code2, op2a, op2b)))
5268 /* Handle the AND case, where we are reassociating:
5269 (inner1 AND inner2) AND (op2a code2 op2b)
5270 => (t AND inner2)
5271 If the partial result t is a constant, we win. Otherwise
5272 continue on to try reassociating with the other inner test. */
5273 if (is_and)
5275 if (integer_onep (t))
5276 return inner2;
5277 else if (integer_zerop (t))
5278 return boolean_false_node;
5281 /* Handle the OR case, where we are redistributing:
5282 (inner1 OR inner2) AND (op2a code2 op2b)
5283 => (t OR (inner2 AND (op2a code2 op2b))) */
5284 else if (integer_onep (t))
5285 return boolean_true_node;
5287 /* Save partial result for later. */
5288 partial = t;
5291 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5292 if (TREE_CODE (inner2) == SSA_NAME
5293 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5294 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5295 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5296 gimple_assign_rhs1 (s),
5297 gimple_assign_rhs2 (s),
5298 code2, op2a, op2b)))
5300 /* Handle the AND case, where we are reassociating:
5301 (inner1 AND inner2) AND (op2a code2 op2b)
5302 => (inner1 AND t) */
5303 if (is_and)
5305 if (integer_onep (t))
5306 return inner1;
5307 else if (integer_zerop (t))
5308 return boolean_false_node;
5309 /* If both are the same, we can apply the identity
5310 (x AND x) == x. */
5311 else if (partial && same_bool_result_p (t, partial))
5312 return t;
5315 /* Handle the OR case. where we are redistributing:
5316 (inner1 OR inner2) AND (op2a code2 op2b)
5317 => (t OR (inner1 AND (op2a code2 op2b)))
5318 => (t OR partial) */
5319 else
5321 if (integer_onep (t))
5322 return boolean_true_node;
5323 else if (partial)
5325 /* We already got a simplification for the other
5326 operand to the redistributed OR expression. The
5327 interesting case is when at least one is false.
5328 Or, if both are the same, we can apply the identity
5329 (x OR x) == x. */
5330 if (integer_zerop (partial))
5331 return t;
5332 else if (integer_zerop (t))
5333 return partial;
5334 else if (same_bool_result_p (t, partial))
5335 return t;
5340 return NULL_TREE;
5343 /* Try to simplify the AND of two comparisons defined by
5344 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5345 If this can be done without constructing an intermediate value,
5346 return the resulting tree; otherwise NULL_TREE is returned.
5347 This function is deliberately asymmetric as it recurses on SSA_DEFs
5348 in the first comparison but not the second. */
5350 static tree
5351 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5352 enum tree_code code2, tree op2a, tree op2b)
5354 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5356 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5357 if (operand_equal_p (op1a, op2a, 0)
5358 && operand_equal_p (op1b, op2b, 0))
5360 /* Result will be either NULL_TREE, or a combined comparison. */
5361 tree t = combine_comparisons (UNKNOWN_LOCATION,
5362 TRUTH_ANDIF_EXPR, code1, code2,
5363 truth_type, op1a, op1b);
5364 if (t)
5365 return t;
5368 /* Likewise the swapped case of the above. */
5369 if (operand_equal_p (op1a, op2b, 0)
5370 && operand_equal_p (op1b, op2a, 0))
5372 /* Result will be either NULL_TREE, or a combined comparison. */
5373 tree t = combine_comparisons (UNKNOWN_LOCATION,
5374 TRUTH_ANDIF_EXPR, code1,
5375 swap_tree_comparison (code2),
5376 truth_type, op1a, op1b);
5377 if (t)
5378 return t;
5381 /* If both comparisons are of the same value against constants, we might
5382 be able to merge them. */
5383 if (operand_equal_p (op1a, op2a, 0)
5384 && TREE_CODE (op1b) == INTEGER_CST
5385 && TREE_CODE (op2b) == INTEGER_CST)
5387 int cmp = tree_int_cst_compare (op1b, op2b);
5389 /* If we have (op1a == op1b), we should either be able to
5390 return that or FALSE, depending on whether the constant op1b
5391 also satisfies the other comparison against op2b. */
5392 if (code1 == EQ_EXPR)
5394 bool done = true;
5395 bool val;
5396 switch (code2)
5398 case EQ_EXPR: val = (cmp == 0); break;
5399 case NE_EXPR: val = (cmp != 0); break;
5400 case LT_EXPR: val = (cmp < 0); break;
5401 case GT_EXPR: val = (cmp > 0); break;
5402 case LE_EXPR: val = (cmp <= 0); break;
5403 case GE_EXPR: val = (cmp >= 0); break;
5404 default: done = false;
5406 if (done)
5408 if (val)
5409 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5410 else
5411 return boolean_false_node;
5414 /* Likewise if the second comparison is an == comparison. */
5415 else if (code2 == EQ_EXPR)
5417 bool done = true;
5418 bool val;
5419 switch (code1)
5421 case EQ_EXPR: val = (cmp == 0); break;
5422 case NE_EXPR: val = (cmp != 0); break;
5423 case LT_EXPR: val = (cmp > 0); break;
5424 case GT_EXPR: val = (cmp < 0); break;
5425 case LE_EXPR: val = (cmp >= 0); break;
5426 case GE_EXPR: val = (cmp <= 0); break;
5427 default: done = false;
5429 if (done)
5431 if (val)
5432 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5433 else
5434 return boolean_false_node;
5438 /* Same business with inequality tests. */
5439 else if (code1 == NE_EXPR)
5441 bool val;
5442 switch (code2)
5444 case EQ_EXPR: val = (cmp != 0); break;
5445 case NE_EXPR: val = (cmp == 0); break;
5446 case LT_EXPR: val = (cmp >= 0); break;
5447 case GT_EXPR: val = (cmp <= 0); break;
5448 case LE_EXPR: val = (cmp > 0); break;
5449 case GE_EXPR: val = (cmp < 0); break;
5450 default:
5451 val = false;
5453 if (val)
5454 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5456 else if (code2 == NE_EXPR)
5458 bool val;
5459 switch (code1)
5461 case EQ_EXPR: val = (cmp == 0); break;
5462 case NE_EXPR: val = (cmp != 0); break;
5463 case LT_EXPR: val = (cmp <= 0); break;
5464 case GT_EXPR: val = (cmp >= 0); break;
5465 case LE_EXPR: val = (cmp < 0); break;
5466 case GE_EXPR: val = (cmp > 0); break;
5467 default:
5468 val = false;
5470 if (val)
5471 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5474 /* Chose the more restrictive of two < or <= comparisons. */
5475 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5476 && (code2 == LT_EXPR || code2 == LE_EXPR))
5478 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5479 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5480 else
5481 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5484 /* Likewise chose the more restrictive of two > or >= comparisons. */
5485 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5486 && (code2 == GT_EXPR || code2 == GE_EXPR))
5488 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5489 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5490 else
5491 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5494 /* Check for singleton ranges. */
5495 else if (cmp == 0
5496 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5497 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5498 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5500 /* Check for disjoint ranges. */
5501 else if (cmp <= 0
5502 && (code1 == LT_EXPR || code1 == LE_EXPR)
5503 && (code2 == GT_EXPR || code2 == GE_EXPR))
5504 return boolean_false_node;
5505 else if (cmp >= 0
5506 && (code1 == GT_EXPR || code1 == GE_EXPR)
5507 && (code2 == LT_EXPR || code2 == LE_EXPR))
5508 return boolean_false_node;
5511 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5512 NAME's definition is a truth value. See if there are any simplifications
5513 that can be done against the NAME's definition. */
5514 if (TREE_CODE (op1a) == SSA_NAME
5515 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5516 && (integer_zerop (op1b) || integer_onep (op1b)))
5518 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5519 || (code1 == NE_EXPR && integer_onep (op1b)));
5520 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5521 switch (gimple_code (stmt))
5523 case GIMPLE_ASSIGN:
5524 /* Try to simplify by copy-propagating the definition. */
5525 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5527 case GIMPLE_PHI:
5528 /* If every argument to the PHI produces the same result when
5529 ANDed with the second comparison, we win.
5530 Do not do this unless the type is bool since we need a bool
5531 result here anyway. */
5532 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5534 tree result = NULL_TREE;
5535 unsigned i;
5536 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5538 tree arg = gimple_phi_arg_def (stmt, i);
5540 /* If this PHI has itself as an argument, ignore it.
5541 If all the other args produce the same result,
5542 we're still OK. */
5543 if (arg == gimple_phi_result (stmt))
5544 continue;
5545 else if (TREE_CODE (arg) == INTEGER_CST)
5547 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5549 if (!result)
5550 result = boolean_false_node;
5551 else if (!integer_zerop (result))
5552 return NULL_TREE;
5554 else if (!result)
5555 result = fold_build2 (code2, boolean_type_node,
5556 op2a, op2b);
5557 else if (!same_bool_comparison_p (result,
5558 code2, op2a, op2b))
5559 return NULL_TREE;
5561 else if (TREE_CODE (arg) == SSA_NAME
5562 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5564 tree temp;
5565 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5566 /* In simple cases we can look through PHI nodes,
5567 but we have to be careful with loops.
5568 See PR49073. */
5569 if (! dom_info_available_p (CDI_DOMINATORS)
5570 || gimple_bb (def_stmt) == gimple_bb (stmt)
5571 || dominated_by_p (CDI_DOMINATORS,
5572 gimple_bb (def_stmt),
5573 gimple_bb (stmt)))
5574 return NULL_TREE;
5575 temp = and_var_with_comparison (arg, invert, code2,
5576 op2a, op2b);
5577 if (!temp)
5578 return NULL_TREE;
5579 else if (!result)
5580 result = temp;
5581 else if (!same_bool_result_p (result, temp))
5582 return NULL_TREE;
5584 else
5585 return NULL_TREE;
5587 return result;
5590 default:
5591 break;
5594 return NULL_TREE;
5597 /* Try to simplify the AND of two comparisons, specified by
5598 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5599 If this can be simplified to a single expression (without requiring
5600 introducing more SSA variables to hold intermediate values),
5601 return the resulting tree. Otherwise return NULL_TREE.
5602 If the result expression is non-null, it has boolean type. */
5604 tree
5605 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5606 enum tree_code code2, tree op2a, tree op2b)
5608 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5609 if (t)
5610 return t;
5611 else
5612 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5615 /* Helper function for or_comparisons_1: try to simplify the OR of the
5616 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5617 If INVERT is true, invert the value of VAR before doing the OR.
5618 Return NULL_EXPR if we can't simplify this to a single expression. */
5620 static tree
5621 or_var_with_comparison (tree var, bool invert,
5622 enum tree_code code2, tree op2a, tree op2b)
5624 tree t;
5625 gimple *stmt = SSA_NAME_DEF_STMT (var);
5627 /* We can only deal with variables whose definitions are assignments. */
5628 if (!is_gimple_assign (stmt))
5629 return NULL_TREE;
5631 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5632 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5633 Then we only have to consider the simpler non-inverted cases. */
5634 if (invert)
5635 t = and_var_with_comparison_1 (stmt,
5636 invert_tree_comparison (code2, false),
5637 op2a, op2b);
5638 else
5639 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5640 return canonicalize_bool (t, invert);
5643 /* Try to simplify the OR of the ssa variable defined by the assignment
5644 STMT with the comparison specified by (OP2A CODE2 OP2B).
5645 Return NULL_EXPR if we can't simplify this to a single expression. */
5647 static tree
5648 or_var_with_comparison_1 (gimple *stmt,
5649 enum tree_code code2, tree op2a, tree op2b)
5651 tree var = gimple_assign_lhs (stmt);
5652 tree true_test_var = NULL_TREE;
5653 tree false_test_var = NULL_TREE;
5654 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5656 /* Check for identities like (var OR (var != 0)) => true . */
5657 if (TREE_CODE (op2a) == SSA_NAME
5658 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5660 if ((code2 == NE_EXPR && integer_zerop (op2b))
5661 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5663 true_test_var = op2a;
5664 if (var == true_test_var)
5665 return var;
5667 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5668 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5670 false_test_var = op2a;
5671 if (var == false_test_var)
5672 return boolean_true_node;
5676 /* If the definition is a comparison, recurse on it. */
5677 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5679 tree t = or_comparisons_1 (innercode,
5680 gimple_assign_rhs1 (stmt),
5681 gimple_assign_rhs2 (stmt),
5682 code2,
5683 op2a,
5684 op2b);
5685 if (t)
5686 return t;
5689 /* If the definition is an AND or OR expression, we may be able to
5690 simplify by reassociating. */
5691 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5692 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5694 tree inner1 = gimple_assign_rhs1 (stmt);
5695 tree inner2 = gimple_assign_rhs2 (stmt);
5696 gimple *s;
5697 tree t;
5698 tree partial = NULL_TREE;
5699 bool is_or = (innercode == BIT_IOR_EXPR);
5701 /* Check for boolean identities that don't require recursive examination
5702 of inner1/inner2:
5703 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5704 inner1 OR (inner1 AND inner2) => inner1
5705 !inner1 OR (inner1 OR inner2) => true
5706 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5708 if (inner1 == true_test_var)
5709 return (is_or ? var : inner1);
5710 else if (inner2 == true_test_var)
5711 return (is_or ? var : inner2);
5712 else if (inner1 == false_test_var)
5713 return (is_or
5714 ? boolean_true_node
5715 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5716 else if (inner2 == false_test_var)
5717 return (is_or
5718 ? boolean_true_node
5719 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5721 /* Next, redistribute/reassociate the OR across the inner tests.
5722 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5723 if (TREE_CODE (inner1) == SSA_NAME
5724 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5725 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5726 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5727 gimple_assign_rhs1 (s),
5728 gimple_assign_rhs2 (s),
5729 code2, op2a, op2b)))
5731 /* Handle the OR case, where we are reassociating:
5732 (inner1 OR inner2) OR (op2a code2 op2b)
5733 => (t OR inner2)
5734 If the partial result t is a constant, we win. Otherwise
5735 continue on to try reassociating with the other inner test. */
5736 if (is_or)
5738 if (integer_onep (t))
5739 return boolean_true_node;
5740 else if (integer_zerop (t))
5741 return inner2;
5744 /* Handle the AND case, where we are redistributing:
5745 (inner1 AND inner2) OR (op2a code2 op2b)
5746 => (t AND (inner2 OR (op2a code op2b))) */
5747 else if (integer_zerop (t))
5748 return boolean_false_node;
5750 /* Save partial result for later. */
5751 partial = t;
5754 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5755 if (TREE_CODE (inner2) == SSA_NAME
5756 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5757 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5758 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5759 gimple_assign_rhs1 (s),
5760 gimple_assign_rhs2 (s),
5761 code2, op2a, op2b)))
5763 /* Handle the OR case, where we are reassociating:
5764 (inner1 OR inner2) OR (op2a code2 op2b)
5765 => (inner1 OR t)
5766 => (t OR partial) */
5767 if (is_or)
5769 if (integer_zerop (t))
5770 return inner1;
5771 else if (integer_onep (t))
5772 return boolean_true_node;
5773 /* If both are the same, we can apply the identity
5774 (x OR x) == x. */
5775 else if (partial && same_bool_result_p (t, partial))
5776 return t;
5779 /* Handle the AND case, where we are redistributing:
5780 (inner1 AND inner2) OR (op2a code2 op2b)
5781 => (t AND (inner1 OR (op2a code2 op2b)))
5782 => (t AND partial) */
5783 else
5785 if (integer_zerop (t))
5786 return boolean_false_node;
5787 else if (partial)
5789 /* We already got a simplification for the other
5790 operand to the redistributed AND expression. The
5791 interesting case is when at least one is true.
5792 Or, if both are the same, we can apply the identity
5793 (x AND x) == x. */
5794 if (integer_onep (partial))
5795 return t;
5796 else if (integer_onep (t))
5797 return partial;
5798 else if (same_bool_result_p (t, partial))
5799 return t;
5804 return NULL_TREE;
5807 /* Try to simplify the OR of two comparisons defined by
5808 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5809 If this can be done without constructing an intermediate value,
5810 return the resulting tree; otherwise NULL_TREE is returned.
5811 This function is deliberately asymmetric as it recurses on SSA_DEFs
5812 in the first comparison but not the second. */
5814 static tree
5815 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5816 enum tree_code code2, tree op2a, tree op2b)
5818 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5820 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5821 if (operand_equal_p (op1a, op2a, 0)
5822 && operand_equal_p (op1b, op2b, 0))
5824 /* Result will be either NULL_TREE, or a combined comparison. */
5825 tree t = combine_comparisons (UNKNOWN_LOCATION,
5826 TRUTH_ORIF_EXPR, code1, code2,
5827 truth_type, op1a, op1b);
5828 if (t)
5829 return t;
5832 /* Likewise the swapped case of the above. */
5833 if (operand_equal_p (op1a, op2b, 0)
5834 && operand_equal_p (op1b, op2a, 0))
5836 /* Result will be either NULL_TREE, or a combined comparison. */
5837 tree t = combine_comparisons (UNKNOWN_LOCATION,
5838 TRUTH_ORIF_EXPR, code1,
5839 swap_tree_comparison (code2),
5840 truth_type, op1a, op1b);
5841 if (t)
5842 return t;
5845 /* If both comparisons are of the same value against constants, we might
5846 be able to merge them. */
5847 if (operand_equal_p (op1a, op2a, 0)
5848 && TREE_CODE (op1b) == INTEGER_CST
5849 && TREE_CODE (op2b) == INTEGER_CST)
5851 int cmp = tree_int_cst_compare (op1b, op2b);
5853 /* If we have (op1a != op1b), we should either be able to
5854 return that or TRUE, depending on whether the constant op1b
5855 also satisfies the other comparison against op2b. */
5856 if (code1 == NE_EXPR)
5858 bool done = true;
5859 bool val;
5860 switch (code2)
5862 case EQ_EXPR: val = (cmp == 0); break;
5863 case NE_EXPR: val = (cmp != 0); break;
5864 case LT_EXPR: val = (cmp < 0); break;
5865 case GT_EXPR: val = (cmp > 0); break;
5866 case LE_EXPR: val = (cmp <= 0); break;
5867 case GE_EXPR: val = (cmp >= 0); break;
5868 default: done = false;
5870 if (done)
5872 if (val)
5873 return boolean_true_node;
5874 else
5875 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5878 /* Likewise if the second comparison is a != comparison. */
5879 else if (code2 == NE_EXPR)
5881 bool done = true;
5882 bool val;
5883 switch (code1)
5885 case EQ_EXPR: val = (cmp == 0); break;
5886 case NE_EXPR: val = (cmp != 0); break;
5887 case LT_EXPR: val = (cmp > 0); break;
5888 case GT_EXPR: val = (cmp < 0); break;
5889 case LE_EXPR: val = (cmp >= 0); break;
5890 case GE_EXPR: val = (cmp <= 0); break;
5891 default: done = false;
5893 if (done)
5895 if (val)
5896 return boolean_true_node;
5897 else
5898 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5902 /* See if an equality test is redundant with the other comparison. */
5903 else if (code1 == EQ_EXPR)
5905 bool val;
5906 switch (code2)
5908 case EQ_EXPR: val = (cmp == 0); break;
5909 case NE_EXPR: val = (cmp != 0); break;
5910 case LT_EXPR: val = (cmp < 0); break;
5911 case GT_EXPR: val = (cmp > 0); break;
5912 case LE_EXPR: val = (cmp <= 0); break;
5913 case GE_EXPR: val = (cmp >= 0); break;
5914 default:
5915 val = false;
5917 if (val)
5918 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5920 else if (code2 == EQ_EXPR)
5922 bool val;
5923 switch (code1)
5925 case EQ_EXPR: val = (cmp == 0); break;
5926 case NE_EXPR: val = (cmp != 0); break;
5927 case LT_EXPR: val = (cmp > 0); break;
5928 case GT_EXPR: val = (cmp < 0); break;
5929 case LE_EXPR: val = (cmp >= 0); break;
5930 case GE_EXPR: val = (cmp <= 0); break;
5931 default:
5932 val = false;
5934 if (val)
5935 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5938 /* Chose the less restrictive of two < or <= comparisons. */
5939 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5940 && (code2 == LT_EXPR || code2 == LE_EXPR))
5942 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5943 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5944 else
5945 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5948 /* Likewise chose the less restrictive of two > or >= comparisons. */
5949 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5950 && (code2 == GT_EXPR || code2 == GE_EXPR))
5952 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5953 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5954 else
5955 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5958 /* Check for singleton ranges. */
5959 else if (cmp == 0
5960 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5961 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5962 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5964 /* Check for less/greater pairs that don't restrict the range at all. */
5965 else if (cmp >= 0
5966 && (code1 == LT_EXPR || code1 == LE_EXPR)
5967 && (code2 == GT_EXPR || code2 == GE_EXPR))
5968 return boolean_true_node;
5969 else if (cmp <= 0
5970 && (code1 == GT_EXPR || code1 == GE_EXPR)
5971 && (code2 == LT_EXPR || code2 == LE_EXPR))
5972 return boolean_true_node;
5975 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5976 NAME's definition is a truth value. See if there are any simplifications
5977 that can be done against the NAME's definition. */
5978 if (TREE_CODE (op1a) == SSA_NAME
5979 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5980 && (integer_zerop (op1b) || integer_onep (op1b)))
5982 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5983 || (code1 == NE_EXPR && integer_onep (op1b)));
5984 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5985 switch (gimple_code (stmt))
5987 case GIMPLE_ASSIGN:
5988 /* Try to simplify by copy-propagating the definition. */
5989 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5991 case GIMPLE_PHI:
5992 /* If every argument to the PHI produces the same result when
5993 ORed with the second comparison, we win.
5994 Do not do this unless the type is bool since we need a bool
5995 result here anyway. */
5996 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5998 tree result = NULL_TREE;
5999 unsigned i;
6000 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6002 tree arg = gimple_phi_arg_def (stmt, i);
6004 /* If this PHI has itself as an argument, ignore it.
6005 If all the other args produce the same result,
6006 we're still OK. */
6007 if (arg == gimple_phi_result (stmt))
6008 continue;
6009 else if (TREE_CODE (arg) == INTEGER_CST)
6011 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6013 if (!result)
6014 result = boolean_true_node;
6015 else if (!integer_onep (result))
6016 return NULL_TREE;
6018 else if (!result)
6019 result = fold_build2 (code2, boolean_type_node,
6020 op2a, op2b);
6021 else if (!same_bool_comparison_p (result,
6022 code2, op2a, op2b))
6023 return NULL_TREE;
6025 else if (TREE_CODE (arg) == SSA_NAME
6026 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6028 tree temp;
6029 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6030 /* In simple cases we can look through PHI nodes,
6031 but we have to be careful with loops.
6032 See PR49073. */
6033 if (! dom_info_available_p (CDI_DOMINATORS)
6034 || gimple_bb (def_stmt) == gimple_bb (stmt)
6035 || dominated_by_p (CDI_DOMINATORS,
6036 gimple_bb (def_stmt),
6037 gimple_bb (stmt)))
6038 return NULL_TREE;
6039 temp = or_var_with_comparison (arg, invert, code2,
6040 op2a, op2b);
6041 if (!temp)
6042 return NULL_TREE;
6043 else if (!result)
6044 result = temp;
6045 else if (!same_bool_result_p (result, temp))
6046 return NULL_TREE;
6048 else
6049 return NULL_TREE;
6051 return result;
6054 default:
6055 break;
6058 return NULL_TREE;
6061 /* Try to simplify the OR of two comparisons, specified by
6062 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6063 If this can be simplified to a single expression (without requiring
6064 introducing more SSA variables to hold intermediate values),
6065 return the resulting tree. Otherwise return NULL_TREE.
6066 If the result expression is non-null, it has boolean type. */
6068 tree
6069 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6070 enum tree_code code2, tree op2a, tree op2b)
6072 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6073 if (t)
6074 return t;
6075 else
6076 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6080 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6082 Either NULL_TREE, a simplified but non-constant or a constant
6083 is returned.
6085 ??? This should go into a gimple-fold-inline.h file to be eventually
6086 privatized with the single valueize function used in the various TUs
6087 to avoid the indirect function call overhead. */
6089 tree
6090 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6091 tree (*gvalueize) (tree))
6093 code_helper rcode;
6094 tree ops[3] = {};
6095 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6096 edges if there are intermediate VARYING defs. For this reason
6097 do not follow SSA edges here even though SCCVN can technically
6098 just deal fine with that. */
6099 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
6101 tree res = NULL_TREE;
6102 if (gimple_simplified_result_is_gimple_val (rcode, ops))
6103 res = ops[0];
6104 else if (mprts_hook)
6105 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
6106 if (res)
6108 if (dump_file && dump_flags & TDF_DETAILS)
6110 fprintf (dump_file, "Match-and-simplified ");
6111 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6112 fprintf (dump_file, " to ");
6113 print_generic_expr (dump_file, res);
6114 fprintf (dump_file, "\n");
6116 return res;
6120 location_t loc = gimple_location (stmt);
6121 switch (gimple_code (stmt))
6123 case GIMPLE_ASSIGN:
6125 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6127 switch (get_gimple_rhs_class (subcode))
6129 case GIMPLE_SINGLE_RHS:
6131 tree rhs = gimple_assign_rhs1 (stmt);
6132 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6134 if (TREE_CODE (rhs) == SSA_NAME)
6136 /* If the RHS is an SSA_NAME, return its known constant value,
6137 if any. */
6138 return (*valueize) (rhs);
6140 /* Handle propagating invariant addresses into address
6141 operations. */
6142 else if (TREE_CODE (rhs) == ADDR_EXPR
6143 && !is_gimple_min_invariant (rhs))
6145 poly_int64 offset = 0;
6146 tree base;
6147 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6148 &offset,
6149 valueize);
6150 if (base
6151 && (CONSTANT_CLASS_P (base)
6152 || decl_address_invariant_p (base)))
6153 return build_invariant_address (TREE_TYPE (rhs),
6154 base, offset);
6156 else if (TREE_CODE (rhs) == CONSTRUCTOR
6157 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6158 && known_eq (CONSTRUCTOR_NELTS (rhs),
6159 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6161 unsigned i, nelts;
6162 tree val;
6164 nelts = CONSTRUCTOR_NELTS (rhs);
6165 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6166 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6168 val = (*valueize) (val);
6169 if (TREE_CODE (val) == INTEGER_CST
6170 || TREE_CODE (val) == REAL_CST
6171 || TREE_CODE (val) == FIXED_CST)
6172 vec.quick_push (val);
6173 else
6174 return NULL_TREE;
6177 return vec.build ();
6179 if (subcode == OBJ_TYPE_REF)
6181 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6182 /* If callee is constant, we can fold away the wrapper. */
6183 if (is_gimple_min_invariant (val))
6184 return val;
6187 if (kind == tcc_reference)
6189 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6190 || TREE_CODE (rhs) == REALPART_EXPR
6191 || TREE_CODE (rhs) == IMAGPART_EXPR)
6192 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6194 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6195 return fold_unary_loc (EXPR_LOCATION (rhs),
6196 TREE_CODE (rhs),
6197 TREE_TYPE (rhs), val);
6199 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6200 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6202 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6203 return fold_ternary_loc (EXPR_LOCATION (rhs),
6204 TREE_CODE (rhs),
6205 TREE_TYPE (rhs), val,
6206 TREE_OPERAND (rhs, 1),
6207 TREE_OPERAND (rhs, 2));
6209 else if (TREE_CODE (rhs) == MEM_REF
6210 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6212 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6213 if (TREE_CODE (val) == ADDR_EXPR
6214 && is_gimple_min_invariant (val))
6216 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6217 unshare_expr (val),
6218 TREE_OPERAND (rhs, 1));
6219 if (tem)
6220 rhs = tem;
6223 return fold_const_aggregate_ref_1 (rhs, valueize);
6225 else if (kind == tcc_declaration)
6226 return get_symbol_constant_value (rhs);
6227 return rhs;
6230 case GIMPLE_UNARY_RHS:
6231 return NULL_TREE;
6233 case GIMPLE_BINARY_RHS:
6234 /* Translate &x + CST into an invariant form suitable for
6235 further propagation. */
6236 if (subcode == POINTER_PLUS_EXPR)
6238 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6239 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6240 if (TREE_CODE (op0) == ADDR_EXPR
6241 && TREE_CODE (op1) == INTEGER_CST)
6243 tree off = fold_convert (ptr_type_node, op1);
6244 return build_fold_addr_expr_loc
6245 (loc,
6246 fold_build2 (MEM_REF,
6247 TREE_TYPE (TREE_TYPE (op0)),
6248 unshare_expr (op0), off));
6251 /* Canonicalize bool != 0 and bool == 0 appearing after
6252 valueization. While gimple_simplify handles this
6253 it can get confused by the ~X == 1 -> X == 0 transform
6254 which we cant reduce to a SSA name or a constant
6255 (and we have no way to tell gimple_simplify to not
6256 consider those transforms in the first place). */
6257 else if (subcode == EQ_EXPR
6258 || subcode == NE_EXPR)
6260 tree lhs = gimple_assign_lhs (stmt);
6261 tree op0 = gimple_assign_rhs1 (stmt);
6262 if (useless_type_conversion_p (TREE_TYPE (lhs),
6263 TREE_TYPE (op0)))
6265 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6266 op0 = (*valueize) (op0);
6267 if (TREE_CODE (op0) == INTEGER_CST)
6268 std::swap (op0, op1);
6269 if (TREE_CODE (op1) == INTEGER_CST
6270 && ((subcode == NE_EXPR && integer_zerop (op1))
6271 || (subcode == EQ_EXPR && integer_onep (op1))))
6272 return op0;
6275 return NULL_TREE;
6277 case GIMPLE_TERNARY_RHS:
6279 /* Handle ternary operators that can appear in GIMPLE form. */
6280 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6281 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6282 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6283 return fold_ternary_loc (loc, subcode,
6284 gimple_expr_type (stmt), op0, op1, op2);
6287 default:
6288 gcc_unreachable ();
6292 case GIMPLE_CALL:
6294 tree fn;
6295 gcall *call_stmt = as_a <gcall *> (stmt);
6297 if (gimple_call_internal_p (stmt))
6299 enum tree_code subcode = ERROR_MARK;
6300 switch (gimple_call_internal_fn (stmt))
6302 case IFN_UBSAN_CHECK_ADD:
6303 subcode = PLUS_EXPR;
6304 break;
6305 case IFN_UBSAN_CHECK_SUB:
6306 subcode = MINUS_EXPR;
6307 break;
6308 case IFN_UBSAN_CHECK_MUL:
6309 subcode = MULT_EXPR;
6310 break;
6311 case IFN_BUILTIN_EXPECT:
6313 tree arg0 = gimple_call_arg (stmt, 0);
6314 tree op0 = (*valueize) (arg0);
6315 if (TREE_CODE (op0) == INTEGER_CST)
6316 return op0;
6317 return NULL_TREE;
6319 default:
6320 return NULL_TREE;
6322 tree arg0 = gimple_call_arg (stmt, 0);
6323 tree arg1 = gimple_call_arg (stmt, 1);
6324 tree op0 = (*valueize) (arg0);
6325 tree op1 = (*valueize) (arg1);
6327 if (TREE_CODE (op0) != INTEGER_CST
6328 || TREE_CODE (op1) != INTEGER_CST)
6330 switch (subcode)
6332 case MULT_EXPR:
6333 /* x * 0 = 0 * x = 0 without overflow. */
6334 if (integer_zerop (op0) || integer_zerop (op1))
6335 return build_zero_cst (TREE_TYPE (arg0));
6336 break;
6337 case MINUS_EXPR:
6338 /* y - y = 0 without overflow. */
6339 if (operand_equal_p (op0, op1, 0))
6340 return build_zero_cst (TREE_TYPE (arg0));
6341 break;
6342 default:
6343 break;
6346 tree res
6347 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6348 if (res
6349 && TREE_CODE (res) == INTEGER_CST
6350 && !TREE_OVERFLOW (res))
6351 return res;
6352 return NULL_TREE;
6355 fn = (*valueize) (gimple_call_fn (stmt));
6356 if (TREE_CODE (fn) == ADDR_EXPR
6357 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6358 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6359 && gimple_builtin_call_types_compatible_p (stmt,
6360 TREE_OPERAND (fn, 0)))
6362 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6363 tree retval;
6364 unsigned i;
6365 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6366 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6367 retval = fold_builtin_call_array (loc,
6368 gimple_call_return_type (call_stmt),
6369 fn, gimple_call_num_args (stmt), args);
6370 if (retval)
6372 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6373 STRIP_NOPS (retval);
6374 retval = fold_convert (gimple_call_return_type (call_stmt),
6375 retval);
6377 return retval;
6379 return NULL_TREE;
6382 default:
6383 return NULL_TREE;
6387 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6388 Returns NULL_TREE if folding to a constant is not possible, otherwise
6389 returns a constant according to is_gimple_min_invariant. */
6391 tree
6392 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6394 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6395 if (res && is_gimple_min_invariant (res))
6396 return res;
6397 return NULL_TREE;
6401 /* The following set of functions are supposed to fold references using
6402 their constant initializers. */
6404 /* See if we can find constructor defining value of BASE.
6405 When we know the consructor with constant offset (such as
6406 base is array[40] and we do know constructor of array), then
6407 BIT_OFFSET is adjusted accordingly.
6409 As a special case, return error_mark_node when constructor
6410 is not explicitly available, but it is known to be zero
6411 such as 'static const int a;'. */
6412 static tree
6413 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6414 tree (*valueize)(tree))
6416 poly_int64 bit_offset2, size, max_size;
6417 bool reverse;
6419 if (TREE_CODE (base) == MEM_REF)
6421 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6422 if (!boff.to_shwi (bit_offset))
6423 return NULL_TREE;
6425 if (valueize
6426 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6427 base = valueize (TREE_OPERAND (base, 0));
6428 if (!base || TREE_CODE (base) != ADDR_EXPR)
6429 return NULL_TREE;
6430 base = TREE_OPERAND (base, 0);
6432 else if (valueize
6433 && TREE_CODE (base) == SSA_NAME)
6434 base = valueize (base);
6436 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6437 DECL_INITIAL. If BASE is a nested reference into another
6438 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6439 the inner reference. */
6440 switch (TREE_CODE (base))
6442 case VAR_DECL:
6443 case CONST_DECL:
6445 tree init = ctor_for_folding (base);
6447 /* Our semantic is exact opposite of ctor_for_folding;
6448 NULL means unknown, while error_mark_node is 0. */
6449 if (init == error_mark_node)
6450 return NULL_TREE;
6451 if (!init)
6452 return error_mark_node;
6453 return init;
6456 case VIEW_CONVERT_EXPR:
6457 return get_base_constructor (TREE_OPERAND (base, 0),
6458 bit_offset, valueize);
6460 case ARRAY_REF:
6461 case COMPONENT_REF:
6462 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6463 &reverse);
6464 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6465 return NULL_TREE;
6466 *bit_offset += bit_offset2;
6467 return get_base_constructor (base, bit_offset, valueize);
6469 case CONSTRUCTOR:
6470 return base;
6472 default:
6473 if (CONSTANT_CLASS_P (base))
6474 return base;
6476 return NULL_TREE;
6480 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6481 SIZE to the memory at bit OFFSET. */
6483 static tree
6484 fold_array_ctor_reference (tree type, tree ctor,
6485 unsigned HOST_WIDE_INT offset,
6486 unsigned HOST_WIDE_INT size,
6487 tree from_decl)
6489 offset_int low_bound;
6490 offset_int elt_size;
6491 offset_int access_index;
6492 tree domain_type = NULL_TREE;
6493 HOST_WIDE_INT inner_offset;
6495 /* Compute low bound and elt size. */
6496 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6497 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6498 if (domain_type && TYPE_MIN_VALUE (domain_type))
6500 /* Static constructors for variably sized objects makes no sense. */
6501 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6502 return NULL_TREE;
6503 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6505 else
6506 low_bound = 0;
6507 /* Static constructors for variably sized objects makes no sense. */
6508 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6509 return NULL_TREE;
6510 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6512 /* We can handle only constantly sized accesses that are known to not
6513 be larger than size of array element. */
6514 if (!TYPE_SIZE_UNIT (type)
6515 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6516 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6517 || elt_size == 0)
6518 return NULL_TREE;
6520 /* Compute the array index we look for. */
6521 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6522 elt_size);
6523 access_index += low_bound;
6525 /* And offset within the access. */
6526 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6528 /* See if the array field is large enough to span whole access. We do not
6529 care to fold accesses spanning multiple array indexes. */
6530 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6531 return NULL_TREE;
6532 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6533 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
6535 /* When memory is not explicitely mentioned in constructor,
6536 it is 0 (or out of range). */
6537 return build_zero_cst (type);
6540 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6541 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6543 static tree
6544 fold_nonarray_ctor_reference (tree type, tree ctor,
6545 unsigned HOST_WIDE_INT offset,
6546 unsigned HOST_WIDE_INT size,
6547 tree from_decl)
6549 unsigned HOST_WIDE_INT cnt;
6550 tree cfield, cval;
6552 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6553 cval)
6555 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6556 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6557 tree field_size = DECL_SIZE (cfield);
6558 offset_int bitoffset;
6559 offset_int bitoffset_end, access_end;
6561 /* Variable sized objects in static constructors makes no sense,
6562 but field_size can be NULL for flexible array members. */
6563 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6564 && TREE_CODE (byte_offset) == INTEGER_CST
6565 && (field_size != NULL_TREE
6566 ? TREE_CODE (field_size) == INTEGER_CST
6567 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6569 /* Compute bit offset of the field. */
6570 bitoffset = (wi::to_offset (field_offset)
6571 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6572 /* Compute bit offset where the field ends. */
6573 if (field_size != NULL_TREE)
6574 bitoffset_end = bitoffset + wi::to_offset (field_size);
6575 else
6576 bitoffset_end = 0;
6578 access_end = offset_int (offset) + size;
6580 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6581 [BITOFFSET, BITOFFSET_END)? */
6582 if (wi::cmps (access_end, bitoffset) > 0
6583 && (field_size == NULL_TREE
6584 || wi::lts_p (offset, bitoffset_end)))
6586 offset_int inner_offset = offset_int (offset) - bitoffset;
6587 /* We do have overlap. Now see if field is large enough to
6588 cover the access. Give up for accesses spanning multiple
6589 fields. */
6590 if (wi::cmps (access_end, bitoffset_end) > 0)
6591 return NULL_TREE;
6592 if (offset < bitoffset)
6593 return NULL_TREE;
6594 return fold_ctor_reference (type, cval,
6595 inner_offset.to_uhwi (), size,
6596 from_decl);
6599 /* When memory is not explicitely mentioned in constructor, it is 0. */
6600 return build_zero_cst (type);
6603 /* CTOR is value initializing memory, fold reference of type TYPE and
6604 size POLY_SIZE to the memory at bit POLY_OFFSET. */
6606 tree
6607 fold_ctor_reference (tree type, tree ctor, poly_uint64 poly_offset,
6608 poly_uint64 poly_size, tree from_decl)
6610 tree ret;
6612 /* We found the field with exact match. */
6613 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6614 && known_eq (poly_offset, 0U))
6615 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6617 /* The remaining optimizations need a constant size and offset. */
6618 unsigned HOST_WIDE_INT size, offset;
6619 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6620 return NULL_TREE;
6622 /* We are at the end of walk, see if we can view convert the
6623 result. */
6624 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6625 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6626 && !compare_tree_int (TYPE_SIZE (type), size)
6627 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6629 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6630 if (ret)
6632 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6633 if (ret)
6634 STRIP_USELESS_TYPE_CONVERSION (ret);
6636 return ret;
6638 /* For constants and byte-aligned/sized reads try to go through
6639 native_encode/interpret. */
6640 if (CONSTANT_CLASS_P (ctor)
6641 && BITS_PER_UNIT == 8
6642 && offset % BITS_PER_UNIT == 0
6643 && size % BITS_PER_UNIT == 0
6644 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6646 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6647 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6648 offset / BITS_PER_UNIT);
6649 if (len > 0)
6650 return native_interpret_expr (type, buf, len);
6652 if (TREE_CODE (ctor) == CONSTRUCTOR)
6655 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6656 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6657 return fold_array_ctor_reference (type, ctor, offset, size,
6658 from_decl);
6659 else
6660 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6661 from_decl);
6664 return NULL_TREE;
6667 /* Return the tree representing the element referenced by T if T is an
6668 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6669 names using VALUEIZE. Return NULL_TREE otherwise. */
6671 tree
6672 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6674 tree ctor, idx, base;
6675 poly_int64 offset, size, max_size;
6676 tree tem;
6677 bool reverse;
6679 if (TREE_THIS_VOLATILE (t))
6680 return NULL_TREE;
6682 if (DECL_P (t))
6683 return get_symbol_constant_value (t);
6685 tem = fold_read_from_constant_string (t);
6686 if (tem)
6687 return tem;
6689 switch (TREE_CODE (t))
6691 case ARRAY_REF:
6692 case ARRAY_RANGE_REF:
6693 /* Constant indexes are handled well by get_base_constructor.
6694 Only special case variable offsets.
6695 FIXME: This code can't handle nested references with variable indexes
6696 (they will be handled only by iteration of ccp). Perhaps we can bring
6697 get_ref_base_and_extent here and make it use a valueize callback. */
6698 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6699 && valueize
6700 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6701 && poly_int_tree_p (idx))
6703 tree low_bound, unit_size;
6705 /* If the resulting bit-offset is constant, track it. */
6706 if ((low_bound = array_ref_low_bound (t),
6707 poly_int_tree_p (low_bound))
6708 && (unit_size = array_ref_element_size (t),
6709 tree_fits_uhwi_p (unit_size)))
6711 poly_offset_int woffset
6712 = wi::sext (wi::to_poly_offset (idx)
6713 - wi::to_poly_offset (low_bound),
6714 TYPE_PRECISION (TREE_TYPE (idx)));
6716 if (woffset.to_shwi (&offset))
6718 /* TODO: This code seems wrong, multiply then check
6719 to see if it fits. */
6720 offset *= tree_to_uhwi (unit_size);
6721 offset *= BITS_PER_UNIT;
6723 base = TREE_OPERAND (t, 0);
6724 ctor = get_base_constructor (base, &offset, valueize);
6725 /* Empty constructor. Always fold to 0. */
6726 if (ctor == error_mark_node)
6727 return build_zero_cst (TREE_TYPE (t));
6728 /* Out of bound array access. Value is undefined,
6729 but don't fold. */
6730 if (maybe_lt (offset, 0))
6731 return NULL_TREE;
6732 /* We can not determine ctor. */
6733 if (!ctor)
6734 return NULL_TREE;
6735 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6736 tree_to_uhwi (unit_size)
6737 * BITS_PER_UNIT,
6738 base);
6742 /* Fallthru. */
6744 case COMPONENT_REF:
6745 case BIT_FIELD_REF:
6746 case TARGET_MEM_REF:
6747 case MEM_REF:
6748 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6749 ctor = get_base_constructor (base, &offset, valueize);
6751 /* Empty constructor. Always fold to 0. */
6752 if (ctor == error_mark_node)
6753 return build_zero_cst (TREE_TYPE (t));
6754 /* We do not know precise address. */
6755 if (!known_size_p (max_size) || maybe_ne (max_size, size))
6756 return NULL_TREE;
6757 /* We can not determine ctor. */
6758 if (!ctor)
6759 return NULL_TREE;
6761 /* Out of bound array access. Value is undefined, but don't fold. */
6762 if (maybe_lt (offset, 0))
6763 return NULL_TREE;
6765 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6766 base);
6768 case REALPART_EXPR:
6769 case IMAGPART_EXPR:
6771 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6772 if (c && TREE_CODE (c) == COMPLEX_CST)
6773 return fold_build1_loc (EXPR_LOCATION (t),
6774 TREE_CODE (t), TREE_TYPE (t), c);
6775 break;
6778 default:
6779 break;
6782 return NULL_TREE;
6785 tree
6786 fold_const_aggregate_ref (tree t)
6788 return fold_const_aggregate_ref_1 (t, NULL);
6791 /* Lookup virtual method with index TOKEN in a virtual table V
6792 at OFFSET.
6793 Set CAN_REFER if non-NULL to false if method
6794 is not referable or if the virtual table is ill-formed (such as rewriten
6795 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6797 tree
6798 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6799 tree v,
6800 unsigned HOST_WIDE_INT offset,
6801 bool *can_refer)
6803 tree vtable = v, init, fn;
6804 unsigned HOST_WIDE_INT size;
6805 unsigned HOST_WIDE_INT elt_size, access_index;
6806 tree domain_type;
6808 if (can_refer)
6809 *can_refer = true;
6811 /* First of all double check we have virtual table. */
6812 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6814 /* Pass down that we lost track of the target. */
6815 if (can_refer)
6816 *can_refer = false;
6817 return NULL_TREE;
6820 init = ctor_for_folding (v);
6822 /* The virtual tables should always be born with constructors
6823 and we always should assume that they are avaialble for
6824 folding. At the moment we do not stream them in all cases,
6825 but it should never happen that ctor seem unreachable. */
6826 gcc_assert (init);
6827 if (init == error_mark_node)
6829 /* Pass down that we lost track of the target. */
6830 if (can_refer)
6831 *can_refer = false;
6832 return NULL_TREE;
6834 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6835 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6836 offset *= BITS_PER_UNIT;
6837 offset += token * size;
6839 /* Lookup the value in the constructor that is assumed to be array.
6840 This is equivalent to
6841 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6842 offset, size, NULL);
6843 but in a constant time. We expect that frontend produced a simple
6844 array without indexed initializers. */
6846 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6847 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6848 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6849 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6851 access_index = offset / BITS_PER_UNIT / elt_size;
6852 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6854 /* This code makes an assumption that there are no
6855 indexed fileds produced by C++ FE, so we can directly index the array. */
6856 if (access_index < CONSTRUCTOR_NELTS (init))
6858 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6859 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6860 STRIP_NOPS (fn);
6862 else
6863 fn = NULL;
6865 /* For type inconsistent program we may end up looking up virtual method
6866 in virtual table that does not contain TOKEN entries. We may overrun
6867 the virtual table and pick up a constant or RTTI info pointer.
6868 In any case the call is undefined. */
6869 if (!fn
6870 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6871 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6872 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6873 else
6875 fn = TREE_OPERAND (fn, 0);
6877 /* When cgraph node is missing and function is not public, we cannot
6878 devirtualize. This can happen in WHOPR when the actual method
6879 ends up in other partition, because we found devirtualization
6880 possibility too late. */
6881 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6883 if (can_refer)
6885 *can_refer = false;
6886 return fn;
6888 return NULL_TREE;
6892 /* Make sure we create a cgraph node for functions we'll reference.
6893 They can be non-existent if the reference comes from an entry
6894 of an external vtable for example. */
6895 cgraph_node::get_create (fn);
6897 return fn;
6900 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6901 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6902 KNOWN_BINFO carries the binfo describing the true type of
6903 OBJ_TYPE_REF_OBJECT(REF).
6904 Set CAN_REFER if non-NULL to false if method
6905 is not referable or if the virtual table is ill-formed (such as rewriten
6906 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6908 tree
6909 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6910 bool *can_refer)
6912 unsigned HOST_WIDE_INT offset;
6913 tree v;
6915 v = BINFO_VTABLE (known_binfo);
6916 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6917 if (!v)
6918 return NULL_TREE;
6920 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6922 if (can_refer)
6923 *can_refer = false;
6924 return NULL_TREE;
6926 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6929 /* Given a pointer value T, return a simplified version of an
6930 indirection through T, or NULL_TREE if no simplification is
6931 possible. Note that the resulting type may be different from
6932 the type pointed to in the sense that it is still compatible
6933 from the langhooks point of view. */
6935 tree
6936 gimple_fold_indirect_ref (tree t)
6938 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6939 tree sub = t;
6940 tree subtype;
6942 STRIP_NOPS (sub);
6943 subtype = TREE_TYPE (sub);
6944 if (!POINTER_TYPE_P (subtype)
6945 || TYPE_REF_CAN_ALIAS_ALL (ptype))
6946 return NULL_TREE;
6948 if (TREE_CODE (sub) == ADDR_EXPR)
6950 tree op = TREE_OPERAND (sub, 0);
6951 tree optype = TREE_TYPE (op);
6952 /* *&p => p */
6953 if (useless_type_conversion_p (type, optype))
6954 return op;
6956 /* *(foo *)&fooarray => fooarray[0] */
6957 if (TREE_CODE (optype) == ARRAY_TYPE
6958 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6959 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6961 tree type_domain = TYPE_DOMAIN (optype);
6962 tree min_val = size_zero_node;
6963 if (type_domain && TYPE_MIN_VALUE (type_domain))
6964 min_val = TYPE_MIN_VALUE (type_domain);
6965 if (TREE_CODE (min_val) == INTEGER_CST)
6966 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6968 /* *(foo *)&complexfoo => __real__ complexfoo */
6969 else if (TREE_CODE (optype) == COMPLEX_TYPE
6970 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6971 return fold_build1 (REALPART_EXPR, type, op);
6972 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6973 else if (TREE_CODE (optype) == VECTOR_TYPE
6974 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6976 tree part_width = TYPE_SIZE (type);
6977 tree index = bitsize_int (0);
6978 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6982 /* *(p + CST) -> ... */
6983 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6984 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6986 tree addr = TREE_OPERAND (sub, 0);
6987 tree off = TREE_OPERAND (sub, 1);
6988 tree addrtype;
6990 STRIP_NOPS (addr);
6991 addrtype = TREE_TYPE (addr);
6993 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6994 if (TREE_CODE (addr) == ADDR_EXPR
6995 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6996 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
6997 && tree_fits_uhwi_p (off))
6999 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7000 tree part_width = TYPE_SIZE (type);
7001 unsigned HOST_WIDE_INT part_widthi
7002 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7003 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7004 tree index = bitsize_int (indexi);
7005 if (known_lt (offset / part_widthi,
7006 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7007 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7008 part_width, index);
7011 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7012 if (TREE_CODE (addr) == ADDR_EXPR
7013 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7014 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7016 tree size = TYPE_SIZE_UNIT (type);
7017 if (tree_int_cst_equal (size, off))
7018 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7021 /* *(p + CST) -> MEM_REF <p, CST>. */
7022 if (TREE_CODE (addr) != ADDR_EXPR
7023 || DECL_P (TREE_OPERAND (addr, 0)))
7024 return fold_build2 (MEM_REF, type,
7025 addr,
7026 wide_int_to_tree (ptype, wi::to_wide (off)));
7029 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7030 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7031 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7032 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7034 tree type_domain;
7035 tree min_val = size_zero_node;
7036 tree osub = sub;
7037 sub = gimple_fold_indirect_ref (sub);
7038 if (! sub)
7039 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7040 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7041 if (type_domain && TYPE_MIN_VALUE (type_domain))
7042 min_val = TYPE_MIN_VALUE (type_domain);
7043 if (TREE_CODE (min_val) == INTEGER_CST)
7044 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7047 return NULL_TREE;
7050 /* Return true if CODE is an operation that when operating on signed
7051 integer types involves undefined behavior on overflow and the
7052 operation can be expressed with unsigned arithmetic. */
7054 bool
7055 arith_code_with_undefined_signed_overflow (tree_code code)
7057 switch (code)
7059 case PLUS_EXPR:
7060 case MINUS_EXPR:
7061 case MULT_EXPR:
7062 case NEGATE_EXPR:
7063 case POINTER_PLUS_EXPR:
7064 return true;
7065 default:
7066 return false;
7070 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7071 operation that can be transformed to unsigned arithmetic by converting
7072 its operand, carrying out the operation in the corresponding unsigned
7073 type and converting the result back to the original type.
7075 Returns a sequence of statements that replace STMT and also contain
7076 a modified form of STMT itself. */
7078 gimple_seq
7079 rewrite_to_defined_overflow (gimple *stmt)
7081 if (dump_file && (dump_flags & TDF_DETAILS))
7083 fprintf (dump_file, "rewriting stmt with undefined signed "
7084 "overflow ");
7085 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7088 tree lhs = gimple_assign_lhs (stmt);
7089 tree type = unsigned_type_for (TREE_TYPE (lhs));
7090 gimple_seq stmts = NULL;
7091 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7093 tree op = gimple_op (stmt, i);
7094 op = gimple_convert (&stmts, type, op);
7095 gimple_set_op (stmt, i, op);
7097 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7098 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7099 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7100 gimple_seq_add_stmt (&stmts, stmt);
7101 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7102 gimple_seq_add_stmt (&stmts, cvt);
7104 return stmts;
7108 /* The valueization hook we use for the gimple_build API simplification.
7109 This makes us match fold_buildN behavior by only combining with
7110 statements in the sequence(s) we are currently building. */
7112 static tree
7113 gimple_build_valueize (tree op)
7115 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7116 return op;
7117 return NULL_TREE;
7120 /* Build the expression CODE OP0 of type TYPE with location LOC,
7121 simplifying it first if possible. Returns the built
7122 expression value and appends statements possibly defining it
7123 to SEQ. */
7125 tree
7126 gimple_build (gimple_seq *seq, location_t loc,
7127 enum tree_code code, tree type, tree op0)
7129 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7130 if (!res)
7132 res = create_tmp_reg_or_ssa_name (type);
7133 gimple *stmt;
7134 if (code == REALPART_EXPR
7135 || code == IMAGPART_EXPR
7136 || code == VIEW_CONVERT_EXPR)
7137 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7138 else
7139 stmt = gimple_build_assign (res, code, op0);
7140 gimple_set_location (stmt, loc);
7141 gimple_seq_add_stmt_without_update (seq, stmt);
7143 return res;
7146 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7147 simplifying it first if possible. Returns the built
7148 expression value and appends statements possibly defining it
7149 to SEQ. */
7151 tree
7152 gimple_build (gimple_seq *seq, location_t loc,
7153 enum tree_code code, tree type, tree op0, tree op1)
7155 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7156 if (!res)
7158 res = create_tmp_reg_or_ssa_name (type);
7159 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7160 gimple_set_location (stmt, loc);
7161 gimple_seq_add_stmt_without_update (seq, stmt);
7163 return res;
7166 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7167 simplifying it first if possible. Returns the built
7168 expression value and appends statements possibly defining it
7169 to SEQ. */
7171 tree
7172 gimple_build (gimple_seq *seq, location_t loc,
7173 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7175 tree res = gimple_simplify (code, type, op0, op1, op2,
7176 seq, gimple_build_valueize);
7177 if (!res)
7179 res = create_tmp_reg_or_ssa_name (type);
7180 gimple *stmt;
7181 if (code == BIT_FIELD_REF)
7182 stmt = gimple_build_assign (res, code,
7183 build3 (code, type, op0, op1, op2));
7184 else
7185 stmt = gimple_build_assign (res, code, op0, op1, op2);
7186 gimple_set_location (stmt, loc);
7187 gimple_seq_add_stmt_without_update (seq, stmt);
7189 return res;
7192 /* Build the call FN (ARG0) with a result of type TYPE
7193 (or no result if TYPE is void) with location LOC,
7194 simplifying it first if possible. Returns the built
7195 expression value (or NULL_TREE if TYPE is void) and appends
7196 statements possibly defining it to SEQ. */
7198 tree
7199 gimple_build (gimple_seq *seq, location_t loc,
7200 enum built_in_function fn, tree type, tree arg0)
7202 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7203 if (!res)
7205 tree decl = builtin_decl_implicit (fn);
7206 gimple *stmt = gimple_build_call (decl, 1, arg0);
7207 if (!VOID_TYPE_P (type))
7209 res = create_tmp_reg_or_ssa_name (type);
7210 gimple_call_set_lhs (stmt, res);
7212 gimple_set_location (stmt, loc);
7213 gimple_seq_add_stmt_without_update (seq, stmt);
7215 return res;
7218 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7219 (or no result if TYPE is void) with location LOC,
7220 simplifying it first if possible. Returns the built
7221 expression value (or NULL_TREE if TYPE is void) and appends
7222 statements possibly defining it to SEQ. */
7224 tree
7225 gimple_build (gimple_seq *seq, location_t loc,
7226 enum built_in_function fn, tree type, tree arg0, tree arg1)
7228 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7229 if (!res)
7231 tree decl = builtin_decl_implicit (fn);
7232 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
7233 if (!VOID_TYPE_P (type))
7235 res = create_tmp_reg_or_ssa_name (type);
7236 gimple_call_set_lhs (stmt, res);
7238 gimple_set_location (stmt, loc);
7239 gimple_seq_add_stmt_without_update (seq, stmt);
7241 return res;
7244 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7245 (or no result if TYPE is void) with location LOC,
7246 simplifying it first if possible. Returns the built
7247 expression value (or NULL_TREE if TYPE is void) and appends
7248 statements possibly defining it to SEQ. */
7250 tree
7251 gimple_build (gimple_seq *seq, location_t loc,
7252 enum built_in_function fn, tree type,
7253 tree arg0, tree arg1, tree arg2)
7255 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7256 seq, gimple_build_valueize);
7257 if (!res)
7259 tree decl = builtin_decl_implicit (fn);
7260 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7261 if (!VOID_TYPE_P (type))
7263 res = create_tmp_reg_or_ssa_name (type);
7264 gimple_call_set_lhs (stmt, res);
7266 gimple_set_location (stmt, loc);
7267 gimple_seq_add_stmt_without_update (seq, stmt);
7269 return res;
7272 /* Build the conversion (TYPE) OP with a result of type TYPE
7273 with location LOC if such conversion is neccesary in GIMPLE,
7274 simplifying it first.
7275 Returns the built expression value and appends
7276 statements possibly defining it to SEQ. */
7278 tree
7279 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7281 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7282 return op;
7283 return gimple_build (seq, loc, NOP_EXPR, type, op);
7286 /* Build the conversion (ptrofftype) OP with a result of a type
7287 compatible with ptrofftype with location LOC if such conversion
7288 is neccesary in GIMPLE, simplifying it first.
7289 Returns the built expression value and appends
7290 statements possibly defining it to SEQ. */
7292 tree
7293 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7295 if (ptrofftype_p (TREE_TYPE (op)))
7296 return op;
7297 return gimple_convert (seq, loc, sizetype, op);
7300 /* Build a vector of type TYPE in which each element has the value OP.
7301 Return a gimple value for the result, appending any new statements
7302 to SEQ. */
7304 tree
7305 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7306 tree op)
7308 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7309 && !CONSTANT_CLASS_P (op))
7310 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7312 tree res, vec = build_vector_from_val (type, op);
7313 if (is_gimple_val (vec))
7314 return vec;
7315 if (gimple_in_ssa_p (cfun))
7316 res = make_ssa_name (type);
7317 else
7318 res = create_tmp_reg (type);
7319 gimple *stmt = gimple_build_assign (res, vec);
7320 gimple_set_location (stmt, loc);
7321 gimple_seq_add_stmt_without_update (seq, stmt);
7322 return res;
7325 /* Build a vector from BUILDER, handling the case in which some elements
7326 are non-constant. Return a gimple value for the result, appending any
7327 new instructions to SEQ.
7329 BUILDER must not have a stepped encoding on entry. This is because
7330 the function is not geared up to handle the arithmetic that would
7331 be needed in the variable case, and any code building a vector that
7332 is known to be constant should use BUILDER->build () directly. */
7334 tree
7335 gimple_build_vector (gimple_seq *seq, location_t loc,
7336 tree_vector_builder *builder)
7338 gcc_assert (builder->nelts_per_pattern () <= 2);
7339 unsigned int encoded_nelts = builder->encoded_nelts ();
7340 for (unsigned int i = 0; i < encoded_nelts; ++i)
7341 if (!TREE_CONSTANT ((*builder)[i]))
7343 tree type = builder->type ();
7344 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7345 vec<constructor_elt, va_gc> *v;
7346 vec_alloc (v, nelts);
7347 for (i = 0; i < nelts; ++i)
7348 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7350 tree res;
7351 if (gimple_in_ssa_p (cfun))
7352 res = make_ssa_name (type);
7353 else
7354 res = create_tmp_reg (type);
7355 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7356 gimple_set_location (stmt, loc);
7357 gimple_seq_add_stmt_without_update (seq, stmt);
7358 return res;
7360 return builder->build ();
7363 /* Return true if the result of assignment STMT is known to be non-negative.
7364 If the return value is based on the assumption that signed overflow is
7365 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7366 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7368 static bool
7369 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7370 int depth)
7372 enum tree_code code = gimple_assign_rhs_code (stmt);
7373 switch (get_gimple_rhs_class (code))
7375 case GIMPLE_UNARY_RHS:
7376 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7377 gimple_expr_type (stmt),
7378 gimple_assign_rhs1 (stmt),
7379 strict_overflow_p, depth);
7380 case GIMPLE_BINARY_RHS:
7381 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7382 gimple_expr_type (stmt),
7383 gimple_assign_rhs1 (stmt),
7384 gimple_assign_rhs2 (stmt),
7385 strict_overflow_p, depth);
7386 case GIMPLE_TERNARY_RHS:
7387 return false;
7388 case GIMPLE_SINGLE_RHS:
7389 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7390 strict_overflow_p, depth);
7391 case GIMPLE_INVALID_RHS:
7392 break;
7394 gcc_unreachable ();
7397 /* Return true if return value of call STMT is known to be non-negative.
7398 If the return value is based on the assumption that signed overflow is
7399 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7400 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7402 static bool
7403 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7404 int depth)
7406 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7407 gimple_call_arg (stmt, 0) : NULL_TREE;
7408 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7409 gimple_call_arg (stmt, 1) : NULL_TREE;
7411 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7412 gimple_call_combined_fn (stmt),
7413 arg0,
7414 arg1,
7415 strict_overflow_p, depth);
7418 /* Return true if return value of call STMT is known to be non-negative.
7419 If the return value is based on the assumption that signed overflow is
7420 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7421 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7423 static bool
7424 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7425 int depth)
7427 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7429 tree arg = gimple_phi_arg_def (stmt, i);
7430 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7431 return false;
7433 return true;
7436 /* Return true if STMT is known to compute a non-negative value.
7437 If the return value is based on the assumption that signed overflow is
7438 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7439 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7441 bool
7442 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7443 int depth)
7445 switch (gimple_code (stmt))
7447 case GIMPLE_ASSIGN:
7448 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7449 depth);
7450 case GIMPLE_CALL:
7451 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7452 depth);
7453 case GIMPLE_PHI:
7454 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7455 depth);
7456 default:
7457 return false;
7461 /* Return true if the floating-point value computed by assignment STMT
7462 is known to have an integer value. We also allow +Inf, -Inf and NaN
7463 to be considered integer values. Return false for signaling NaN.
7465 DEPTH is the current nesting depth of the query. */
7467 static bool
7468 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7470 enum tree_code code = gimple_assign_rhs_code (stmt);
7471 switch (get_gimple_rhs_class (code))
7473 case GIMPLE_UNARY_RHS:
7474 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7475 gimple_assign_rhs1 (stmt), depth);
7476 case GIMPLE_BINARY_RHS:
7477 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7478 gimple_assign_rhs1 (stmt),
7479 gimple_assign_rhs2 (stmt), depth);
7480 case GIMPLE_TERNARY_RHS:
7481 return false;
7482 case GIMPLE_SINGLE_RHS:
7483 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7484 case GIMPLE_INVALID_RHS:
7485 break;
7487 gcc_unreachable ();
7490 /* Return true if the floating-point value computed by call STMT is known
7491 to have an integer value. We also allow +Inf, -Inf and NaN to be
7492 considered integer values. Return false for signaling NaN.
7494 DEPTH is the current nesting depth of the query. */
7496 static bool
7497 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7499 tree arg0 = (gimple_call_num_args (stmt) > 0
7500 ? gimple_call_arg (stmt, 0)
7501 : NULL_TREE);
7502 tree arg1 = (gimple_call_num_args (stmt) > 1
7503 ? gimple_call_arg (stmt, 1)
7504 : NULL_TREE);
7505 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7506 arg0, arg1, depth);
7509 /* Return true if the floating-point result of phi STMT is known to have
7510 an integer value. We also allow +Inf, -Inf and NaN to be considered
7511 integer values. Return false for signaling NaN.
7513 DEPTH is the current nesting depth of the query. */
7515 static bool
7516 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7518 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7520 tree arg = gimple_phi_arg_def (stmt, i);
7521 if (!integer_valued_real_single_p (arg, depth + 1))
7522 return false;
7524 return true;
7527 /* Return true if the floating-point value computed by STMT is known
7528 to have an integer value. We also allow +Inf, -Inf and NaN to be
7529 considered integer values. Return false for signaling NaN.
7531 DEPTH is the current nesting depth of the query. */
7533 bool
7534 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7536 switch (gimple_code (stmt))
7538 case GIMPLE_ASSIGN:
7539 return gimple_assign_integer_valued_real_p (stmt, depth);
7540 case GIMPLE_CALL:
7541 return gimple_call_integer_valued_real_p (stmt, depth);
7542 case GIMPLE_PHI:
7543 return gimple_phi_integer_valued_real_p (stmt, depth);
7544 default:
7545 return false;