poly_int: get_addr_base_and_unit_offset
[official-gcc.git] / gcc / gimple-fold.c
blob7b3c6db919f85c4b582e6d376658c0b6d17479fe
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2017 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "ipa-chkp.h"
59 #include "tree-cfg.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "asan.h"
64 #include "diagnostic-core.h"
65 #include "intl.h"
66 #include "calls.h"
67 #include "tree-vector-builder.h"
69 /* Return true when DECL can be referenced from current unit.
70 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
71 We can get declarations that are not possible to reference for various
72 reasons:
74 1) When analyzing C++ virtual tables.
75 C++ virtual tables do have known constructors even
76 when they are keyed to other compilation unit.
77 Those tables can contain pointers to methods and vars
78 in other units. Those methods have both STATIC and EXTERNAL
79 set.
80 2) In WHOPR mode devirtualization might lead to reference
81 to method that was partitioned elsehwere.
82 In this case we have static VAR_DECL or FUNCTION_DECL
83 that has no corresponding callgraph/varpool node
84 declaring the body.
85 3) COMDAT functions referred by external vtables that
86 we devirtualize only during final compilation stage.
87 At this time we already decided that we will not output
88 the function body and thus we can't reference the symbol
89 directly. */
91 static bool
92 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
94 varpool_node *vnode;
95 struct cgraph_node *node;
96 symtab_node *snode;
98 if (DECL_ABSTRACT_P (decl))
99 return false;
101 /* We are concerned only about static/external vars and functions. */
102 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
103 || !VAR_OR_FUNCTION_DECL_P (decl))
104 return true;
106 /* Static objects can be referred only if they was not optimized out yet. */
107 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
109 /* Before we start optimizing unreachable code we can be sure all
110 static objects are defined. */
111 if (symtab->function_flags_ready)
112 return true;
113 snode = symtab_node::get (decl);
114 if (!snode || !snode->definition)
115 return false;
116 node = dyn_cast <cgraph_node *> (snode);
117 return !node || !node->global.inlined_to;
120 /* We will later output the initializer, so we can refer to it.
121 So we are concerned only when DECL comes from initializer of
122 external var or var that has been optimized out. */
123 if (!from_decl
124 || !VAR_P (from_decl)
125 || (!DECL_EXTERNAL (from_decl)
126 && (vnode = varpool_node::get (from_decl)) != NULL
127 && vnode->definition)
128 || (flag_ltrans
129 && (vnode = varpool_node::get (from_decl)) != NULL
130 && vnode->in_other_partition))
131 return true;
132 /* We are folding reference from external vtable. The vtable may reffer
133 to a symbol keyed to other compilation unit. The other compilation
134 unit may be in separate DSO and the symbol may be hidden. */
135 if (DECL_VISIBILITY_SPECIFIED (decl)
136 && DECL_EXTERNAL (decl)
137 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
138 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
139 return false;
140 /* When function is public, we always can introduce new reference.
141 Exception are the COMDAT functions where introducing a direct
142 reference imply need to include function body in the curren tunit. */
143 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
144 return true;
145 /* We have COMDAT. We are going to check if we still have definition
146 or if the definition is going to be output in other partition.
147 Bypass this when gimplifying; all needed functions will be produced.
149 As observed in PR20991 for already optimized out comdat virtual functions
150 it may be tempting to not necessarily give up because the copy will be
151 output elsewhere when corresponding vtable is output.
152 This is however not possible - ABI specify that COMDATs are output in
153 units where they are used and when the other unit was compiled with LTO
154 it is possible that vtable was kept public while the function itself
155 was privatized. */
156 if (!symtab->function_flags_ready)
157 return true;
159 snode = symtab_node::get (decl);
160 if (!snode
161 || ((!snode->definition || DECL_EXTERNAL (decl))
162 && (!snode->in_other_partition
163 || (!snode->forced_by_abi && !snode->force_output))))
164 return false;
165 node = dyn_cast <cgraph_node *> (snode);
166 return !node || !node->global.inlined_to;
169 /* Create a temporary for TYPE for a statement STMT. If the current function
170 is in SSA form, a SSA name is created. Otherwise a temporary register
171 is made. */
173 tree
174 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
176 if (gimple_in_ssa_p (cfun))
177 return make_ssa_name (type, stmt);
178 else
179 return create_tmp_reg (type);
182 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
183 acceptable form for is_gimple_min_invariant.
184 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
186 tree
187 canonicalize_constructor_val (tree cval, tree from_decl)
189 tree orig_cval = cval;
190 STRIP_NOPS (cval);
191 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
192 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
194 tree ptr = TREE_OPERAND (cval, 0);
195 if (is_gimple_min_invariant (ptr))
196 cval = build1_loc (EXPR_LOCATION (cval),
197 ADDR_EXPR, TREE_TYPE (ptr),
198 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
199 ptr,
200 fold_convert (ptr_type_node,
201 TREE_OPERAND (cval, 1))));
203 if (TREE_CODE (cval) == ADDR_EXPR)
205 tree base = NULL_TREE;
206 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
208 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
209 if (base)
210 TREE_OPERAND (cval, 0) = base;
212 else
213 base = get_base_address (TREE_OPERAND (cval, 0));
214 if (!base)
215 return NULL_TREE;
217 if (VAR_OR_FUNCTION_DECL_P (base)
218 && !can_refer_decl_in_current_unit_p (base, from_decl))
219 return NULL_TREE;
220 if (TREE_TYPE (base) == error_mark_node)
221 return NULL_TREE;
222 if (VAR_P (base))
223 TREE_ADDRESSABLE (base) = 1;
224 else if (TREE_CODE (base) == FUNCTION_DECL)
226 /* Make sure we create a cgraph node for functions we'll reference.
227 They can be non-existent if the reference comes from an entry
228 of an external vtable for example. */
229 cgraph_node::get_create (base);
231 /* Fixup types in global initializers. */
232 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
233 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
235 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
236 cval = fold_convert (TREE_TYPE (orig_cval), cval);
237 return cval;
239 if (TREE_OVERFLOW_P (cval))
240 return drop_tree_overflow (cval);
241 return orig_cval;
244 /* If SYM is a constant variable with known value, return the value.
245 NULL_TREE is returned otherwise. */
247 tree
248 get_symbol_constant_value (tree sym)
250 tree val = ctor_for_folding (sym);
251 if (val != error_mark_node)
253 if (val)
255 val = canonicalize_constructor_val (unshare_expr (val), sym);
256 if (val && is_gimple_min_invariant (val))
257 return val;
258 else
259 return NULL_TREE;
261 /* Variables declared 'const' without an initializer
262 have zero as the initializer if they may not be
263 overridden at link or run time. */
264 if (!val
265 && is_gimple_reg_type (TREE_TYPE (sym)))
266 return build_zero_cst (TREE_TYPE (sym));
269 return NULL_TREE;
274 /* Subroutine of fold_stmt. We perform several simplifications of the
275 memory reference tree EXPR and make sure to re-gimplify them properly
276 after propagation of constant addresses. IS_LHS is true if the
277 reference is supposed to be an lvalue. */
279 static tree
280 maybe_fold_reference (tree expr, bool is_lhs)
282 tree result;
284 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
285 || TREE_CODE (expr) == REALPART_EXPR
286 || TREE_CODE (expr) == IMAGPART_EXPR)
287 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
288 return fold_unary_loc (EXPR_LOCATION (expr),
289 TREE_CODE (expr),
290 TREE_TYPE (expr),
291 TREE_OPERAND (expr, 0));
292 else if (TREE_CODE (expr) == BIT_FIELD_REF
293 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
294 return fold_ternary_loc (EXPR_LOCATION (expr),
295 TREE_CODE (expr),
296 TREE_TYPE (expr),
297 TREE_OPERAND (expr, 0),
298 TREE_OPERAND (expr, 1),
299 TREE_OPERAND (expr, 2));
301 if (!is_lhs
302 && (result = fold_const_aggregate_ref (expr))
303 && is_gimple_min_invariant (result))
304 return result;
306 return NULL_TREE;
310 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
311 replacement rhs for the statement or NULL_TREE if no simplification
312 could be made. It is assumed that the operands have been previously
313 folded. */
315 static tree
316 fold_gimple_assign (gimple_stmt_iterator *si)
318 gimple *stmt = gsi_stmt (*si);
319 enum tree_code subcode = gimple_assign_rhs_code (stmt);
320 location_t loc = gimple_location (stmt);
322 tree result = NULL_TREE;
324 switch (get_gimple_rhs_class (subcode))
326 case GIMPLE_SINGLE_RHS:
328 tree rhs = gimple_assign_rhs1 (stmt);
330 if (TREE_CLOBBER_P (rhs))
331 return NULL_TREE;
333 if (REFERENCE_CLASS_P (rhs))
334 return maybe_fold_reference (rhs, false);
336 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
338 tree val = OBJ_TYPE_REF_EXPR (rhs);
339 if (is_gimple_min_invariant (val))
340 return val;
341 else if (flag_devirtualize && virtual_method_call_p (rhs))
343 bool final;
344 vec <cgraph_node *>targets
345 = possible_polymorphic_call_targets (rhs, stmt, &final);
346 if (final && targets.length () <= 1 && dbg_cnt (devirt))
348 if (dump_enabled_p ())
350 location_t loc = gimple_location_safe (stmt);
351 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
352 "resolving virtual function address "
353 "reference to function %s\n",
354 targets.length () == 1
355 ? targets[0]->name ()
356 : "NULL");
358 if (targets.length () == 1)
360 val = fold_convert (TREE_TYPE (val),
361 build_fold_addr_expr_loc
362 (loc, targets[0]->decl));
363 STRIP_USELESS_TYPE_CONVERSION (val);
365 else
366 /* We can not use __builtin_unreachable here because it
367 can not have address taken. */
368 val = build_int_cst (TREE_TYPE (val), 0);
369 return val;
374 else if (TREE_CODE (rhs) == ADDR_EXPR)
376 tree ref = TREE_OPERAND (rhs, 0);
377 tree tem = maybe_fold_reference (ref, true);
378 if (tem
379 && TREE_CODE (tem) == MEM_REF
380 && integer_zerop (TREE_OPERAND (tem, 1)))
381 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
382 else if (tem)
383 result = fold_convert (TREE_TYPE (rhs),
384 build_fold_addr_expr_loc (loc, tem));
385 else if (TREE_CODE (ref) == MEM_REF
386 && integer_zerop (TREE_OPERAND (ref, 1)))
387 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
389 if (result)
391 /* Strip away useless type conversions. Both the
392 NON_LVALUE_EXPR that may have been added by fold, and
393 "useless" type conversions that might now be apparent
394 due to propagation. */
395 STRIP_USELESS_TYPE_CONVERSION (result);
397 if (result != rhs && valid_gimple_rhs_p (result))
398 return result;
402 else if (TREE_CODE (rhs) == CONSTRUCTOR
403 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
405 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
406 unsigned i;
407 tree val;
409 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
410 if (! CONSTANT_CLASS_P (val))
411 return NULL_TREE;
413 return build_vector_from_ctor (TREE_TYPE (rhs),
414 CONSTRUCTOR_ELTS (rhs));
417 else if (DECL_P (rhs))
418 return get_symbol_constant_value (rhs);
420 break;
422 case GIMPLE_UNARY_RHS:
423 break;
425 case GIMPLE_BINARY_RHS:
426 break;
428 case GIMPLE_TERNARY_RHS:
429 result = fold_ternary_loc (loc, subcode,
430 TREE_TYPE (gimple_assign_lhs (stmt)),
431 gimple_assign_rhs1 (stmt),
432 gimple_assign_rhs2 (stmt),
433 gimple_assign_rhs3 (stmt));
435 if (result)
437 STRIP_USELESS_TYPE_CONVERSION (result);
438 if (valid_gimple_rhs_p (result))
439 return result;
441 break;
443 case GIMPLE_INVALID_RHS:
444 gcc_unreachable ();
447 return NULL_TREE;
451 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
452 adjusting the replacement stmts location and virtual operands.
453 If the statement has a lhs the last stmt in the sequence is expected
454 to assign to that lhs. */
456 static void
457 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
459 gimple *stmt = gsi_stmt (*si_p);
461 if (gimple_has_location (stmt))
462 annotate_all_with_location (stmts, gimple_location (stmt));
464 /* First iterate over the replacement statements backward, assigning
465 virtual operands to their defining statements. */
466 gimple *laststore = NULL;
467 for (gimple_stmt_iterator i = gsi_last (stmts);
468 !gsi_end_p (i); gsi_prev (&i))
470 gimple *new_stmt = gsi_stmt (i);
471 if ((gimple_assign_single_p (new_stmt)
472 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
473 || (is_gimple_call (new_stmt)
474 && (gimple_call_flags (new_stmt)
475 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
477 tree vdef;
478 if (!laststore)
479 vdef = gimple_vdef (stmt);
480 else
481 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
482 gimple_set_vdef (new_stmt, vdef);
483 if (vdef && TREE_CODE (vdef) == SSA_NAME)
484 SSA_NAME_DEF_STMT (vdef) = new_stmt;
485 laststore = new_stmt;
489 /* Second iterate over the statements forward, assigning virtual
490 operands to their uses. */
491 tree reaching_vuse = gimple_vuse (stmt);
492 for (gimple_stmt_iterator i = gsi_start (stmts);
493 !gsi_end_p (i); gsi_next (&i))
495 gimple *new_stmt = gsi_stmt (i);
496 /* If the new statement possibly has a VUSE, update it with exact SSA
497 name we know will reach this one. */
498 if (gimple_has_mem_ops (new_stmt))
499 gimple_set_vuse (new_stmt, reaching_vuse);
500 gimple_set_modified (new_stmt, true);
501 if (gimple_vdef (new_stmt))
502 reaching_vuse = gimple_vdef (new_stmt);
505 /* If the new sequence does not do a store release the virtual
506 definition of the original statement. */
507 if (reaching_vuse
508 && reaching_vuse == gimple_vuse (stmt))
510 tree vdef = gimple_vdef (stmt);
511 if (vdef
512 && TREE_CODE (vdef) == SSA_NAME)
514 unlink_stmt_vdef (stmt);
515 release_ssa_name (vdef);
519 /* Finally replace the original statement with the sequence. */
520 gsi_replace_with_seq (si_p, stmts, false);
523 /* Convert EXPR into a GIMPLE value suitable for substitution on the
524 RHS of an assignment. Insert the necessary statements before
525 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
526 is replaced. If the call is expected to produces a result, then it
527 is replaced by an assignment of the new RHS to the result variable.
528 If the result is to be ignored, then the call is replaced by a
529 GIMPLE_NOP. A proper VDEF chain is retained by making the first
530 VUSE and the last VDEF of the whole sequence be the same as the replaced
531 statement and using new SSA names for stores in between. */
533 void
534 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
536 tree lhs;
537 gimple *stmt, *new_stmt;
538 gimple_stmt_iterator i;
539 gimple_seq stmts = NULL;
541 stmt = gsi_stmt (*si_p);
543 gcc_assert (is_gimple_call (stmt));
545 push_gimplify_context (gimple_in_ssa_p (cfun));
547 lhs = gimple_call_lhs (stmt);
548 if (lhs == NULL_TREE)
550 gimplify_and_add (expr, &stmts);
551 /* We can end up with folding a memcpy of an empty class assignment
552 which gets optimized away by C++ gimplification. */
553 if (gimple_seq_empty_p (stmts))
555 pop_gimplify_context (NULL);
556 if (gimple_in_ssa_p (cfun))
558 unlink_stmt_vdef (stmt);
559 release_defs (stmt);
561 gsi_replace (si_p, gimple_build_nop (), false);
562 return;
565 else
567 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
568 new_stmt = gimple_build_assign (lhs, tmp);
569 i = gsi_last (stmts);
570 gsi_insert_after_without_update (&i, new_stmt,
571 GSI_CONTINUE_LINKING);
574 pop_gimplify_context (NULL);
576 gsi_replace_with_seq_vops (si_p, stmts);
580 /* Replace the call at *GSI with the gimple value VAL. */
582 void
583 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
585 gimple *stmt = gsi_stmt (*gsi);
586 tree lhs = gimple_call_lhs (stmt);
587 gimple *repl;
588 if (lhs)
590 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
591 val = fold_convert (TREE_TYPE (lhs), val);
592 repl = gimple_build_assign (lhs, val);
594 else
595 repl = gimple_build_nop ();
596 tree vdef = gimple_vdef (stmt);
597 if (vdef && TREE_CODE (vdef) == SSA_NAME)
599 unlink_stmt_vdef (stmt);
600 release_ssa_name (vdef);
602 gsi_replace (gsi, repl, false);
605 /* Replace the call at *GSI with the new call REPL and fold that
606 again. */
608 static void
609 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
611 gimple *stmt = gsi_stmt (*gsi);
612 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
613 gimple_set_location (repl, gimple_location (stmt));
614 if (gimple_vdef (stmt)
615 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
617 gimple_set_vdef (repl, gimple_vdef (stmt));
618 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
620 if (gimple_vuse (stmt))
621 gimple_set_vuse (repl, gimple_vuse (stmt));
622 gsi_replace (gsi, repl, false);
623 fold_stmt (gsi);
626 /* Return true if VAR is a VAR_DECL or a component thereof. */
628 static bool
629 var_decl_component_p (tree var)
631 tree inner = var;
632 while (handled_component_p (inner))
633 inner = TREE_OPERAND (inner, 0);
634 return SSA_VAR_P (inner);
637 /* If the SIZE argument representing the size of an object is in a range
638 of values of which exactly one is valid (and that is zero), return
639 true, otherwise false. */
641 static bool
642 size_must_be_zero_p (tree size)
644 if (integer_zerop (size))
645 return true;
647 if (TREE_CODE (size) != SSA_NAME)
648 return false;
650 wide_int min, max;
651 enum value_range_type rtype = get_range_info (size, &min, &max);
652 if (rtype != VR_ANTI_RANGE)
653 return false;
655 tree type = TREE_TYPE (size);
656 int prec = TYPE_PRECISION (type);
658 wide_int wone = wi::one (prec);
660 /* Compute the value of SSIZE_MAX, the largest positive value that
661 can be stored in ssize_t, the signed counterpart of size_t. */
662 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
664 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
667 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
668 diagnose (otherwise undefined) overlapping copies without preventing
669 folding. When folded, GCC guarantees that overlapping memcpy has
670 the same semantics as memmove. Call to the library memcpy need not
671 provide the same guarantee. Return false if no simplification can
672 be made. */
674 static bool
675 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
676 tree dest, tree src, int endp)
678 gimple *stmt = gsi_stmt (*gsi);
679 tree lhs = gimple_call_lhs (stmt);
680 tree len = gimple_call_arg (stmt, 2);
681 tree destvar, srcvar;
682 location_t loc = gimple_location (stmt);
684 tree func = gimple_call_fndecl (stmt);
685 bool nowarn = gimple_no_warning_p (stmt);
686 bool check_overlap = (DECL_FUNCTION_CODE (func) != BUILT_IN_MEMMOVE
687 && DECL_FUNCTION_CODE (func) != BUILT_IN_MEMMOVE_CHK
688 && !nowarn);
690 /* If the LEN parameter is a constant zero or in range where
691 the only valid value is zero, return DEST. */
692 if (size_must_be_zero_p (len))
694 gimple *repl;
695 if (gimple_call_lhs (stmt))
696 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
697 else
698 repl = gimple_build_nop ();
699 tree vdef = gimple_vdef (stmt);
700 if (vdef && TREE_CODE (vdef) == SSA_NAME)
702 unlink_stmt_vdef (stmt);
703 release_ssa_name (vdef);
705 gsi_replace (gsi, repl, false);
706 return true;
709 /* If SRC and DEST are the same (and not volatile), return
710 DEST{,+LEN,+LEN-1}. */
711 if (operand_equal_p (src, dest, 0))
713 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
714 It's safe and may even be emitted by GCC itself (see bug
715 32667). However, diagnose it in explicit calls to the memcpy
716 function. */
717 if (check_overlap && *IDENTIFIER_POINTER (DECL_NAME (func)) != '_')
718 warning_at (loc, OPT_Wrestrict,
719 "%qD source argument is the same as destination",
720 func);
722 unlink_stmt_vdef (stmt);
723 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
724 release_ssa_name (gimple_vdef (stmt));
725 if (!lhs)
727 gsi_replace (gsi, gimple_build_nop (), false);
728 return true;
730 goto done;
732 else
734 tree srctype, desttype;
735 unsigned int src_align, dest_align;
736 tree off0;
738 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
739 pointers as wide integer) and also may result in huge function
740 size because of inlined bounds copy. Thus don't inline for
741 functions we want to instrument. */
742 if (flag_check_pointer_bounds
743 && chkp_instrumentable_p (cfun->decl)
744 /* Even if data may contain pointers we can inline if copy
745 less than a pointer size. */
746 && (!tree_fits_uhwi_p (len)
747 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
748 return false;
750 /* Build accesses at offset zero with a ref-all character type. */
751 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
752 ptr_mode, true), 0);
754 /* If we can perform the copy efficiently with first doing all loads
755 and then all stores inline it that way. Currently efficiently
756 means that we can load all the memory into a single integer
757 register which is what MOVE_MAX gives us. */
758 src_align = get_pointer_alignment (src);
759 dest_align = get_pointer_alignment (dest);
760 if (tree_fits_uhwi_p (len)
761 && compare_tree_int (len, MOVE_MAX) <= 0
762 /* ??? Don't transform copies from strings with known length this
763 confuses the tree-ssa-strlen.c. This doesn't handle
764 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
765 reason. */
766 && !c_strlen (src, 2))
768 unsigned ilen = tree_to_uhwi (len);
769 if (pow2p_hwi (ilen))
771 /* Detect invalid bounds and overlapping copies and issue
772 either -Warray-bounds or -Wrestrict. */
773 if (!nowarn
774 && check_bounds_or_overlap (as_a <gcall *>(stmt),
775 dest, src, len, len))
776 gimple_set_no_warning (stmt, true);
778 scalar_int_mode mode;
779 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
780 if (type
781 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
782 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
783 /* If the destination pointer is not aligned we must be able
784 to emit an unaligned store. */
785 && (dest_align >= GET_MODE_ALIGNMENT (mode)
786 || !targetm.slow_unaligned_access (mode, dest_align)
787 || (optab_handler (movmisalign_optab, mode)
788 != CODE_FOR_nothing)))
790 tree srctype = type;
791 tree desttype = type;
792 if (src_align < GET_MODE_ALIGNMENT (mode))
793 srctype = build_aligned_type (type, src_align);
794 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
795 tree tem = fold_const_aggregate_ref (srcmem);
796 if (tem)
797 srcmem = tem;
798 else if (src_align < GET_MODE_ALIGNMENT (mode)
799 && targetm.slow_unaligned_access (mode, src_align)
800 && (optab_handler (movmisalign_optab, mode)
801 == CODE_FOR_nothing))
802 srcmem = NULL_TREE;
803 if (srcmem)
805 gimple *new_stmt;
806 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
808 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
809 srcmem
810 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
811 new_stmt);
812 gimple_assign_set_lhs (new_stmt, srcmem);
813 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
814 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
816 if (dest_align < GET_MODE_ALIGNMENT (mode))
817 desttype = build_aligned_type (type, dest_align);
818 new_stmt
819 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
820 dest, off0),
821 srcmem);
822 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
823 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
824 if (gimple_vdef (new_stmt)
825 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
826 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
827 if (!lhs)
829 gsi_replace (gsi, new_stmt, false);
830 return true;
832 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
833 goto done;
839 if (endp == 3)
841 /* Both DEST and SRC must be pointer types.
842 ??? This is what old code did. Is the testing for pointer types
843 really mandatory?
845 If either SRC is readonly or length is 1, we can use memcpy. */
846 if (!dest_align || !src_align)
847 return false;
848 if (readonly_data_expr (src)
849 || (tree_fits_uhwi_p (len)
850 && (MIN (src_align, dest_align) / BITS_PER_UNIT
851 >= tree_to_uhwi (len))))
853 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
854 if (!fn)
855 return false;
856 gimple_call_set_fndecl (stmt, fn);
857 gimple_call_set_arg (stmt, 0, dest);
858 gimple_call_set_arg (stmt, 1, src);
859 fold_stmt (gsi);
860 return true;
863 /* If *src and *dest can't overlap, optimize into memcpy as well. */
864 if (TREE_CODE (src) == ADDR_EXPR
865 && TREE_CODE (dest) == ADDR_EXPR)
867 tree src_base, dest_base, fn;
868 poly_int64 src_offset = 0, dest_offset = 0;
869 poly_uint64 maxsize;
871 srcvar = TREE_OPERAND (src, 0);
872 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
873 if (src_base == NULL)
874 src_base = srcvar;
875 destvar = TREE_OPERAND (dest, 0);
876 dest_base = get_addr_base_and_unit_offset (destvar,
877 &dest_offset);
878 if (dest_base == NULL)
879 dest_base = destvar;
880 if (!poly_int_tree_p (len, &maxsize))
881 maxsize = -1;
882 if (SSA_VAR_P (src_base)
883 && SSA_VAR_P (dest_base))
885 if (operand_equal_p (src_base, dest_base, 0)
886 && ranges_maybe_overlap_p (src_offset, maxsize,
887 dest_offset, maxsize))
888 return false;
890 else if (TREE_CODE (src_base) == MEM_REF
891 && TREE_CODE (dest_base) == MEM_REF)
893 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
894 TREE_OPERAND (dest_base, 0), 0))
895 return false;
896 poly_offset_int full_src_offset
897 = mem_ref_offset (src_base) + src_offset;
898 poly_offset_int full_dest_offset
899 = mem_ref_offset (dest_base) + dest_offset;
900 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
901 full_dest_offset, maxsize))
902 return false;
904 else
905 return false;
907 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
908 if (!fn)
909 return false;
910 gimple_call_set_fndecl (stmt, fn);
911 gimple_call_set_arg (stmt, 0, dest);
912 gimple_call_set_arg (stmt, 1, src);
913 fold_stmt (gsi);
914 return true;
917 /* If the destination and source do not alias optimize into
918 memcpy as well. */
919 if ((is_gimple_min_invariant (dest)
920 || TREE_CODE (dest) == SSA_NAME)
921 && (is_gimple_min_invariant (src)
922 || TREE_CODE (src) == SSA_NAME))
924 ao_ref destr, srcr;
925 ao_ref_init_from_ptr_and_size (&destr, dest, len);
926 ao_ref_init_from_ptr_and_size (&srcr, src, len);
927 if (!refs_may_alias_p_1 (&destr, &srcr, false))
929 tree fn;
930 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
931 if (!fn)
932 return false;
933 gimple_call_set_fndecl (stmt, fn);
934 gimple_call_set_arg (stmt, 0, dest);
935 gimple_call_set_arg (stmt, 1, src);
936 fold_stmt (gsi);
937 return true;
941 return false;
944 if (!tree_fits_shwi_p (len))
945 return false;
946 if (!POINTER_TYPE_P (TREE_TYPE (src))
947 || !POINTER_TYPE_P (TREE_TYPE (dest)))
948 return false;
949 /* In the following try to find a type that is most natural to be
950 used for the memcpy source and destination and that allows
951 the most optimization when memcpy is turned into a plain assignment
952 using that type. In theory we could always use a char[len] type
953 but that only gains us that the destination and source possibly
954 no longer will have their address taken. */
955 srctype = TREE_TYPE (TREE_TYPE (src));
956 if (TREE_CODE (srctype) == ARRAY_TYPE
957 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
958 srctype = TREE_TYPE (srctype);
959 desttype = TREE_TYPE (TREE_TYPE (dest));
960 if (TREE_CODE (desttype) == ARRAY_TYPE
961 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
962 desttype = TREE_TYPE (desttype);
963 if (TREE_ADDRESSABLE (srctype)
964 || TREE_ADDRESSABLE (desttype))
965 return false;
967 /* Make sure we are not copying using a floating-point mode or
968 a type whose size possibly does not match its precision. */
969 if (FLOAT_MODE_P (TYPE_MODE (desttype))
970 || TREE_CODE (desttype) == BOOLEAN_TYPE
971 || TREE_CODE (desttype) == ENUMERAL_TYPE)
972 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
973 if (FLOAT_MODE_P (TYPE_MODE (srctype))
974 || TREE_CODE (srctype) == BOOLEAN_TYPE
975 || TREE_CODE (srctype) == ENUMERAL_TYPE)
976 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
977 if (!srctype)
978 srctype = desttype;
979 if (!desttype)
980 desttype = srctype;
981 if (!srctype)
982 return false;
984 src_align = get_pointer_alignment (src);
985 dest_align = get_pointer_alignment (dest);
986 if (dest_align < TYPE_ALIGN (desttype)
987 || src_align < TYPE_ALIGN (srctype))
988 return false;
990 destvar = NULL_TREE;
991 if (TREE_CODE (dest) == ADDR_EXPR
992 && var_decl_component_p (TREE_OPERAND (dest, 0))
993 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
994 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
996 srcvar = NULL_TREE;
997 if (TREE_CODE (src) == ADDR_EXPR
998 && var_decl_component_p (TREE_OPERAND (src, 0))
999 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1001 if (!destvar
1002 || src_align >= TYPE_ALIGN (desttype))
1003 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1004 src, off0);
1005 else if (!STRICT_ALIGNMENT)
1007 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1008 src_align);
1009 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1013 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1014 return false;
1016 if (srcvar == NULL_TREE)
1018 if (src_align >= TYPE_ALIGN (desttype))
1019 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1020 else
1022 if (STRICT_ALIGNMENT)
1023 return false;
1024 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1025 src_align);
1026 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1029 else if (destvar == NULL_TREE)
1031 if (dest_align >= TYPE_ALIGN (srctype))
1032 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1033 else
1035 if (STRICT_ALIGNMENT)
1036 return false;
1037 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1038 dest_align);
1039 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1043 /* Detect invalid bounds and overlapping copies and issue either
1044 -Warray-bounds or -Wrestrict. */
1045 if (!nowarn)
1046 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1048 gimple *new_stmt;
1049 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1051 tree tem = fold_const_aggregate_ref (srcvar);
1052 if (tem)
1053 srcvar = tem;
1054 if (! is_gimple_min_invariant (srcvar))
1056 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1057 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1058 new_stmt);
1059 gimple_assign_set_lhs (new_stmt, srcvar);
1060 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1061 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1063 new_stmt = gimple_build_assign (destvar, srcvar);
1064 goto set_vop_and_replace;
1067 /* We get an aggregate copy. Use an unsigned char[] type to
1068 perform the copying to preserve padding and to avoid any issues
1069 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1070 desttype = build_array_type_nelts (unsigned_char_type_node,
1071 tree_to_uhwi (len));
1072 srctype = desttype;
1073 if (src_align > TYPE_ALIGN (srctype))
1074 srctype = build_aligned_type (srctype, src_align);
1075 if (dest_align > TYPE_ALIGN (desttype))
1076 desttype = build_aligned_type (desttype, dest_align);
1077 new_stmt
1078 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1079 fold_build2 (MEM_REF, srctype, src, off0));
1080 set_vop_and_replace:
1081 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1082 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1083 if (gimple_vdef (new_stmt)
1084 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1085 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1086 if (!lhs)
1088 gsi_replace (gsi, new_stmt, false);
1089 return true;
1091 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1094 done:
1095 gimple_seq stmts = NULL;
1096 if (endp == 0 || endp == 3)
1097 len = NULL_TREE;
1098 else if (endp == 2)
1099 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1100 ssize_int (1));
1101 if (endp == 2 || endp == 1)
1103 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1104 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1105 TREE_TYPE (dest), dest, len);
1108 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1109 gimple *repl = gimple_build_assign (lhs, dest);
1110 gsi_replace (gsi, repl, false);
1111 return true;
1114 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1115 to built-in memcmp (a, b, len). */
1117 static bool
1118 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1120 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1122 if (!fn)
1123 return false;
1125 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1127 gimple *stmt = gsi_stmt (*gsi);
1128 tree a = gimple_call_arg (stmt, 0);
1129 tree b = gimple_call_arg (stmt, 1);
1130 tree len = gimple_call_arg (stmt, 2);
1132 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1133 replace_call_with_call_and_fold (gsi, repl);
1135 return true;
1138 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1139 to built-in memmove (dest, src, len). */
1141 static bool
1142 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1144 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1146 if (!fn)
1147 return false;
1149 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1150 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1151 len) into memmove (dest, src, len). */
1153 gimple *stmt = gsi_stmt (*gsi);
1154 tree src = gimple_call_arg (stmt, 0);
1155 tree dest = gimple_call_arg (stmt, 1);
1156 tree len = gimple_call_arg (stmt, 2);
1158 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1159 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1160 replace_call_with_call_and_fold (gsi, repl);
1162 return true;
1165 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1166 to built-in memset (dest, 0, len). */
1168 static bool
1169 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1171 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1173 if (!fn)
1174 return false;
1176 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1178 gimple *stmt = gsi_stmt (*gsi);
1179 tree dest = gimple_call_arg (stmt, 0);
1180 tree len = gimple_call_arg (stmt, 1);
1182 gimple_seq seq = NULL;
1183 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1184 gimple_seq_add_stmt_without_update (&seq, repl);
1185 gsi_replace_with_seq_vops (gsi, seq);
1186 fold_stmt (gsi);
1188 return true;
1191 /* Fold function call to builtin memset or bzero at *GSI setting the
1192 memory of size LEN to VAL. Return whether a simplification was made. */
1194 static bool
1195 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1197 gimple *stmt = gsi_stmt (*gsi);
1198 tree etype;
1199 unsigned HOST_WIDE_INT length, cval;
1201 /* If the LEN parameter is zero, return DEST. */
1202 if (integer_zerop (len))
1204 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1205 return true;
1208 if (! tree_fits_uhwi_p (len))
1209 return false;
1211 if (TREE_CODE (c) != INTEGER_CST)
1212 return false;
1214 tree dest = gimple_call_arg (stmt, 0);
1215 tree var = dest;
1216 if (TREE_CODE (var) != ADDR_EXPR)
1217 return false;
1219 var = TREE_OPERAND (var, 0);
1220 if (TREE_THIS_VOLATILE (var))
1221 return false;
1223 etype = TREE_TYPE (var);
1224 if (TREE_CODE (etype) == ARRAY_TYPE)
1225 etype = TREE_TYPE (etype);
1227 if (!INTEGRAL_TYPE_P (etype)
1228 && !POINTER_TYPE_P (etype))
1229 return NULL_TREE;
1231 if (! var_decl_component_p (var))
1232 return NULL_TREE;
1234 length = tree_to_uhwi (len);
1235 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1236 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1237 return NULL_TREE;
1239 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1240 return NULL_TREE;
1242 if (integer_zerop (c))
1243 cval = 0;
1244 else
1246 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1247 return NULL_TREE;
1249 cval = TREE_INT_CST_LOW (c);
1250 cval &= 0xff;
1251 cval |= cval << 8;
1252 cval |= cval << 16;
1253 cval |= (cval << 31) << 1;
1256 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1257 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1258 gimple_set_vuse (store, gimple_vuse (stmt));
1259 tree vdef = gimple_vdef (stmt);
1260 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1262 gimple_set_vdef (store, gimple_vdef (stmt));
1263 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1265 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1266 if (gimple_call_lhs (stmt))
1268 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1269 gsi_replace (gsi, asgn, false);
1271 else
1273 gimple_stmt_iterator gsi2 = *gsi;
1274 gsi_prev (gsi);
1275 gsi_remove (&gsi2, true);
1278 return true;
1282 /* Obtain the minimum and maximum string length or minimum and maximum
1283 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1284 If ARG is an SSA name variable, follow its use-def chains. When
1285 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1286 if we are unable to determine the length or value, return False.
1287 VISITED is a bitmap of visited variables.
1288 TYPE is 0 if string length should be obtained, 1 for maximum string
1289 length and 2 for maximum value ARG can have.
1290 When FUZZY is set and the length of a string cannot be determined,
1291 the function instead considers as the maximum possible length the
1292 size of a character array it may refer to.
1293 Set *FLEXP to true if the range of the string lengths has been
1294 obtained from the upper bound of an array at the end of a struct.
1295 Such an array may hold a string that's longer than its upper bound
1296 due to it being used as a poor-man's flexible array member. */
1298 static bool
1299 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1300 bool fuzzy, bool *flexp)
1302 tree var, val;
1303 gimple *def_stmt;
1305 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1306 but MINLEN may be cleared during the execution of the function. */
1307 tree *minlen = length;
1308 tree *const maxlen = length + 1;
1310 if (TREE_CODE (arg) != SSA_NAME)
1312 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1313 if (TREE_CODE (arg) == ADDR_EXPR
1314 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1315 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1317 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1318 if (TREE_CODE (aop0) == INDIRECT_REF
1319 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1320 return get_range_strlen (TREE_OPERAND (aop0, 0),
1321 length, visited, type, fuzzy, flexp);
1324 if (type == 2)
1326 val = arg;
1327 if (TREE_CODE (val) != INTEGER_CST
1328 || tree_int_cst_sgn (val) < 0)
1329 return false;
1331 else
1332 val = c_strlen (arg, 1);
1334 if (!val && fuzzy)
1336 if (TREE_CODE (arg) == ADDR_EXPR)
1337 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1338 visited, type, fuzzy, flexp);
1340 if (TREE_CODE (arg) == COMPONENT_REF
1341 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE)
1343 /* Use the type of the member array to determine the upper
1344 bound on the length of the array. This may be overly
1345 optimistic if the array itself isn't NUL-terminated and
1346 the caller relies on the subsequent member to contain
1347 the NUL.
1348 Set *FLEXP to true if the array whose bound is being
1349 used is at the end of a struct. */
1350 if (array_at_struct_end_p (arg))
1351 *flexp = true;
1353 arg = TREE_OPERAND (arg, 1);
1354 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1355 if (!val || integer_zerop (val))
1356 return false;
1357 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1358 integer_one_node);
1359 /* Set the minimum size to zero since the string in
1360 the array could have zero length. */
1361 *minlen = ssize_int (0);
1364 if (VAR_P (arg)
1365 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE)
1367 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1368 if (!val || TREE_CODE (val) != INTEGER_CST || integer_zerop (val))
1369 return false;
1370 val = wide_int_to_tree (TREE_TYPE (val),
1371 wi::sub(wi::to_wide (val), 1));
1372 /* Set the minimum size to zero since the string in
1373 the array could have zero length. */
1374 *minlen = ssize_int (0);
1378 if (!val)
1379 return false;
1381 if (minlen
1382 && (!*minlen
1383 || (type > 0
1384 && TREE_CODE (*minlen) == INTEGER_CST
1385 && TREE_CODE (val) == INTEGER_CST
1386 && tree_int_cst_lt (val, *minlen))))
1387 *minlen = val;
1389 if (*maxlen)
1391 if (type > 0)
1393 if (TREE_CODE (*maxlen) != INTEGER_CST
1394 || TREE_CODE (val) != INTEGER_CST)
1395 return false;
1397 if (tree_int_cst_lt (*maxlen, val))
1398 *maxlen = val;
1399 return true;
1401 else if (simple_cst_equal (val, *maxlen) != 1)
1402 return false;
1405 *maxlen = val;
1406 return true;
1409 /* If ARG is registered for SSA update we cannot look at its defining
1410 statement. */
1411 if (name_registered_for_update_p (arg))
1412 return false;
1414 /* If we were already here, break the infinite cycle. */
1415 if (!*visited)
1416 *visited = BITMAP_ALLOC (NULL);
1417 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1418 return true;
1420 var = arg;
1421 def_stmt = SSA_NAME_DEF_STMT (var);
1423 switch (gimple_code (def_stmt))
1425 case GIMPLE_ASSIGN:
1426 /* The RHS of the statement defining VAR must either have a
1427 constant length or come from another SSA_NAME with a constant
1428 length. */
1429 if (gimple_assign_single_p (def_stmt)
1430 || gimple_assign_unary_nop_p (def_stmt))
1432 tree rhs = gimple_assign_rhs1 (def_stmt);
1433 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
1435 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1437 tree op2 = gimple_assign_rhs2 (def_stmt);
1438 tree op3 = gimple_assign_rhs3 (def_stmt);
1439 return get_range_strlen (op2, length, visited, type, fuzzy, flexp)
1440 && get_range_strlen (op3, length, visited, type, fuzzy, flexp);
1442 return false;
1444 case GIMPLE_PHI:
1446 /* All the arguments of the PHI node must have the same constant
1447 length. */
1448 unsigned i;
1450 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1452 tree arg = gimple_phi_arg (def_stmt, i)->def;
1454 /* If this PHI has itself as an argument, we cannot
1455 determine the string length of this argument. However,
1456 if we can find a constant string length for the other
1457 PHI args then we can still be sure that this is a
1458 constant string length. So be optimistic and just
1459 continue with the next argument. */
1460 if (arg == gimple_phi_result (def_stmt))
1461 continue;
1463 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
1465 if (fuzzy)
1466 *maxlen = build_all_ones_cst (size_type_node);
1467 else
1468 return false;
1472 return true;
1474 default:
1475 return false;
1479 /* Determine the minimum and maximum value or string length that ARG
1480 refers to and store each in the first two elements of MINMAXLEN.
1481 For expressions that point to strings of unknown lengths that are
1482 character arrays, use the upper bound of the array as the maximum
1483 length. For example, given an expression like 'x ? array : "xyz"'
1484 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1485 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1486 stored in array.
1487 Return true if the range of the string lengths has been obtained
1488 from the upper bound of an array at the end of a struct. Such
1489 an array may hold a string that's longer than its upper bound
1490 due to it being used as a poor-man's flexible array member. */
1492 bool
1493 get_range_strlen (tree arg, tree minmaxlen[2])
1495 bitmap visited = NULL;
1497 minmaxlen[0] = NULL_TREE;
1498 minmaxlen[1] = NULL_TREE;
1500 bool flexarray = false;
1501 get_range_strlen (arg, minmaxlen, &visited, 1, true, &flexarray);
1503 if (visited)
1504 BITMAP_FREE (visited);
1506 return flexarray;
1509 tree
1510 get_maxval_strlen (tree arg, int type)
1512 bitmap visited = NULL;
1513 tree len[2] = { NULL_TREE, NULL_TREE };
1515 bool dummy;
1516 if (!get_range_strlen (arg, len, &visited, type, false, &dummy))
1517 len[1] = NULL_TREE;
1518 if (visited)
1519 BITMAP_FREE (visited);
1521 return len[1];
1525 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1526 If LEN is not NULL, it represents the length of the string to be
1527 copied. Return NULL_TREE if no simplification can be made. */
1529 static bool
1530 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1531 tree dest, tree src)
1533 gimple *stmt = gsi_stmt (*gsi);
1534 location_t loc = gimple_location (stmt);
1535 tree fn;
1537 /* If SRC and DEST are the same (and not volatile), return DEST. */
1538 if (operand_equal_p (src, dest, 0))
1540 tree func = gimple_call_fndecl (stmt);
1542 warning_at (loc, OPT_Wrestrict,
1543 "%qD source argument is the same as destination",
1544 func);
1546 replace_call_with_value (gsi, dest);
1547 return true;
1550 if (optimize_function_for_size_p (cfun))
1551 return false;
1553 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1554 if (!fn)
1555 return false;
1557 tree len = get_maxval_strlen (src, 0);
1558 if (!len)
1559 return false;
1561 len = fold_convert_loc (loc, size_type_node, len);
1562 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1563 len = force_gimple_operand_gsi (gsi, len, true,
1564 NULL_TREE, true, GSI_SAME_STMT);
1565 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1566 replace_call_with_call_and_fold (gsi, repl);
1567 return true;
1570 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1571 If SLEN is not NULL, it represents the length of the source string.
1572 Return NULL_TREE if no simplification can be made. */
1574 static bool
1575 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1576 tree dest, tree src, tree len)
1578 gimple *stmt = gsi_stmt (*gsi);
1579 location_t loc = gimple_location (stmt);
1580 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1582 /* If the LEN parameter is zero, return DEST. */
1583 if (integer_zerop (len))
1585 /* Avoid warning if the destination refers to a an array/pointer
1586 decorate with attribute nonstring. */
1587 if (!nonstring)
1589 tree fndecl = gimple_call_fndecl (stmt);
1590 gcall *call = as_a <gcall *> (stmt);
1592 /* Warn about the lack of nul termination: the result is not
1593 a (nul-terminated) string. */
1594 tree slen = get_maxval_strlen (src, 0);
1595 if (slen && !integer_zerop (slen))
1596 warning_at (loc, OPT_Wstringop_truncation,
1597 "%G%qD destination unchanged after copying no bytes "
1598 "from a string of length %E",
1599 call, fndecl, slen);
1600 else
1601 warning_at (loc, OPT_Wstringop_truncation,
1602 "%G%qD destination unchanged after copying no bytes",
1603 call, fndecl);
1606 replace_call_with_value (gsi, dest);
1607 return true;
1610 /* We can't compare slen with len as constants below if len is not a
1611 constant. */
1612 if (TREE_CODE (len) != INTEGER_CST)
1613 return false;
1615 /* Now, we must be passed a constant src ptr parameter. */
1616 tree slen = get_maxval_strlen (src, 0);
1617 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1618 return false;
1620 /* The size of the source string including the terminating nul. */
1621 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1623 /* We do not support simplification of this case, though we do
1624 support it when expanding trees into RTL. */
1625 /* FIXME: generate a call to __builtin_memset. */
1626 if (tree_int_cst_lt (ssize, len))
1627 return false;
1629 if (!nonstring)
1631 if (tree_int_cst_lt (len, slen))
1633 tree fndecl = gimple_call_fndecl (stmt);
1634 gcall *call = as_a <gcall *> (stmt);
1636 warning_at (loc, OPT_Wstringop_truncation,
1637 (tree_int_cst_equal (size_one_node, len)
1638 ? G_("%G%qD output truncated copying %E byte "
1639 "from a string of length %E")
1640 : G_("%G%qD output truncated copying %E bytes "
1641 "from a string of length %E")),
1642 call, fndecl, len, slen);
1644 else if (tree_int_cst_equal (len, slen))
1646 tree fndecl = gimple_call_fndecl (stmt);
1647 gcall *call = as_a <gcall *> (stmt);
1649 warning_at (loc, OPT_Wstringop_truncation,
1650 (tree_int_cst_equal (size_one_node, len)
1651 ? G_("%G%qD output truncated before terminating nul "
1652 "copying %E byte from a string of the same "
1653 "length")
1654 : G_("%G%qD output truncated before terminating nul "
1655 "copying %E bytes from a string of the same "
1656 "length")),
1657 call, fndecl, len);
1661 /* OK transform into builtin memcpy. */
1662 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1663 if (!fn)
1664 return false;
1666 len = fold_convert_loc (loc, size_type_node, len);
1667 len = force_gimple_operand_gsi (gsi, len, true,
1668 NULL_TREE, true, GSI_SAME_STMT);
1669 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1670 replace_call_with_call_and_fold (gsi, repl);
1672 return true;
1675 /* Fold function call to builtin strchr or strrchr.
1676 If both arguments are constant, evaluate and fold the result,
1677 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1678 In general strlen is significantly faster than strchr
1679 due to being a simpler operation. */
1680 static bool
1681 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1683 gimple *stmt = gsi_stmt (*gsi);
1684 tree str = gimple_call_arg (stmt, 0);
1685 tree c = gimple_call_arg (stmt, 1);
1686 location_t loc = gimple_location (stmt);
1687 const char *p;
1688 char ch;
1690 if (!gimple_call_lhs (stmt))
1691 return false;
1693 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1695 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1697 if (p1 == NULL)
1699 replace_call_with_value (gsi, integer_zero_node);
1700 return true;
1703 tree len = build_int_cst (size_type_node, p1 - p);
1704 gimple_seq stmts = NULL;
1705 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1706 POINTER_PLUS_EXPR, str, len);
1707 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1708 gsi_replace_with_seq_vops (gsi, stmts);
1709 return true;
1712 if (!integer_zerop (c))
1713 return false;
1715 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1716 if (is_strrchr && optimize_function_for_size_p (cfun))
1718 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1720 if (strchr_fn)
1722 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1723 replace_call_with_call_and_fold (gsi, repl);
1724 return true;
1727 return false;
1730 tree len;
1731 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1733 if (!strlen_fn)
1734 return false;
1736 /* Create newstr = strlen (str). */
1737 gimple_seq stmts = NULL;
1738 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1739 gimple_set_location (new_stmt, loc);
1740 len = create_tmp_reg_or_ssa_name (size_type_node);
1741 gimple_call_set_lhs (new_stmt, len);
1742 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1744 /* Create (str p+ strlen (str)). */
1745 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1746 POINTER_PLUS_EXPR, str, len);
1747 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1748 gsi_replace_with_seq_vops (gsi, stmts);
1749 /* gsi now points at the assignment to the lhs, get a
1750 stmt iterator to the strlen.
1751 ??? We can't use gsi_for_stmt as that doesn't work when the
1752 CFG isn't built yet. */
1753 gimple_stmt_iterator gsi2 = *gsi;
1754 gsi_prev (&gsi2);
1755 fold_stmt (&gsi2);
1756 return true;
1759 /* Fold function call to builtin strstr.
1760 If both arguments are constant, evaluate and fold the result,
1761 additionally fold strstr (x, "") into x and strstr (x, "c")
1762 into strchr (x, 'c'). */
1763 static bool
1764 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1766 gimple *stmt = gsi_stmt (*gsi);
1767 tree haystack = gimple_call_arg (stmt, 0);
1768 tree needle = gimple_call_arg (stmt, 1);
1769 const char *p, *q;
1771 if (!gimple_call_lhs (stmt))
1772 return false;
1774 q = c_getstr (needle);
1775 if (q == NULL)
1776 return false;
1778 if ((p = c_getstr (haystack)))
1780 const char *r = strstr (p, q);
1782 if (r == NULL)
1784 replace_call_with_value (gsi, integer_zero_node);
1785 return true;
1788 tree len = build_int_cst (size_type_node, r - p);
1789 gimple_seq stmts = NULL;
1790 gimple *new_stmt
1791 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1792 haystack, len);
1793 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1794 gsi_replace_with_seq_vops (gsi, stmts);
1795 return true;
1798 /* For strstr (x, "") return x. */
1799 if (q[0] == '\0')
1801 replace_call_with_value (gsi, haystack);
1802 return true;
1805 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1806 if (q[1] == '\0')
1808 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1809 if (strchr_fn)
1811 tree c = build_int_cst (integer_type_node, q[0]);
1812 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1813 replace_call_with_call_and_fold (gsi, repl);
1814 return true;
1818 return false;
1821 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1822 to the call.
1824 Return NULL_TREE if no simplification was possible, otherwise return the
1825 simplified form of the call as a tree.
1827 The simplified form may be a constant or other expression which
1828 computes the same value, but in a more efficient manner (including
1829 calls to other builtin functions).
1831 The call may contain arguments which need to be evaluated, but
1832 which are not useful to determine the result of the call. In
1833 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1834 COMPOUND_EXPR will be an argument which must be evaluated.
1835 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1836 COMPOUND_EXPR in the chain will contain the tree for the simplified
1837 form of the builtin function call. */
1839 static bool
1840 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1842 gimple *stmt = gsi_stmt (*gsi);
1843 location_t loc = gimple_location (stmt);
1845 const char *p = c_getstr (src);
1847 /* If the string length is zero, return the dst parameter. */
1848 if (p && *p == '\0')
1850 replace_call_with_value (gsi, dst);
1851 return true;
1854 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1855 return false;
1857 /* See if we can store by pieces into (dst + strlen(dst)). */
1858 tree newdst;
1859 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1860 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1862 if (!strlen_fn || !memcpy_fn)
1863 return false;
1865 /* If the length of the source string isn't computable don't
1866 split strcat into strlen and memcpy. */
1867 tree len = get_maxval_strlen (src, 0);
1868 if (! len)
1869 return false;
1871 /* Create strlen (dst). */
1872 gimple_seq stmts = NULL, stmts2;
1873 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1874 gimple_set_location (repl, loc);
1875 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1876 gimple_call_set_lhs (repl, newdst);
1877 gimple_seq_add_stmt_without_update (&stmts, repl);
1879 /* Create (dst p+ strlen (dst)). */
1880 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1881 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1882 gimple_seq_add_seq_without_update (&stmts, stmts2);
1884 len = fold_convert_loc (loc, size_type_node, len);
1885 len = size_binop_loc (loc, PLUS_EXPR, len,
1886 build_int_cst (size_type_node, 1));
1887 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1888 gimple_seq_add_seq_without_update (&stmts, stmts2);
1890 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1891 gimple_seq_add_stmt_without_update (&stmts, repl);
1892 if (gimple_call_lhs (stmt))
1894 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1895 gimple_seq_add_stmt_without_update (&stmts, repl);
1896 gsi_replace_with_seq_vops (gsi, stmts);
1897 /* gsi now points at the assignment to the lhs, get a
1898 stmt iterator to the memcpy call.
1899 ??? We can't use gsi_for_stmt as that doesn't work when the
1900 CFG isn't built yet. */
1901 gimple_stmt_iterator gsi2 = *gsi;
1902 gsi_prev (&gsi2);
1903 fold_stmt (&gsi2);
1905 else
1907 gsi_replace_with_seq_vops (gsi, stmts);
1908 fold_stmt (gsi);
1910 return true;
1913 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1914 are the arguments to the call. */
1916 static bool
1917 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1919 gimple *stmt = gsi_stmt (*gsi);
1920 tree dest = gimple_call_arg (stmt, 0);
1921 tree src = gimple_call_arg (stmt, 1);
1922 tree size = gimple_call_arg (stmt, 2);
1923 tree fn;
1924 const char *p;
1927 p = c_getstr (src);
1928 /* If the SRC parameter is "", return DEST. */
1929 if (p && *p == '\0')
1931 replace_call_with_value (gsi, dest);
1932 return true;
1935 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1936 return false;
1938 /* If __builtin_strcat_chk is used, assume strcat is available. */
1939 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1940 if (!fn)
1941 return false;
1943 gimple *repl = gimple_build_call (fn, 2, dest, src);
1944 replace_call_with_call_and_fold (gsi, repl);
1945 return true;
1948 /* Simplify a call to the strncat builtin. */
1950 static bool
1951 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1953 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1954 tree dst = gimple_call_arg (stmt, 0);
1955 tree src = gimple_call_arg (stmt, 1);
1956 tree len = gimple_call_arg (stmt, 2);
1958 const char *p = c_getstr (src);
1960 /* If the requested length is zero, or the src parameter string
1961 length is zero, return the dst parameter. */
1962 if (integer_zerop (len) || (p && *p == '\0'))
1964 replace_call_with_value (gsi, dst);
1965 return true;
1968 if (TREE_CODE (len) != INTEGER_CST || !p)
1969 return false;
1971 unsigned srclen = strlen (p);
1973 int cmpsrc = compare_tree_int (len, srclen);
1975 /* Return early if the requested len is less than the string length.
1976 Warnings will be issued elsewhere later. */
1977 if (cmpsrc < 0)
1978 return false;
1980 unsigned HOST_WIDE_INT dstsize;
1982 bool nowarn = gimple_no_warning_p (stmt);
1984 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
1986 int cmpdst = compare_tree_int (len, dstsize);
1988 if (cmpdst >= 0)
1990 tree fndecl = gimple_call_fndecl (stmt);
1992 /* Strncat copies (at most) LEN bytes and always appends
1993 the terminating NUL so the specified bound should never
1994 be equal to (or greater than) the size of the destination.
1995 If it is, the copy could overflow. */
1996 location_t loc = gimple_location (stmt);
1997 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
1998 cmpdst == 0
1999 ? G_("%G%qD specified bound %E equals "
2000 "destination size")
2001 : G_("%G%qD specified bound %E exceeds "
2002 "destination size %wu"),
2003 stmt, fndecl, len, dstsize);
2004 if (nowarn)
2005 gimple_set_no_warning (stmt, true);
2009 if (!nowarn && cmpsrc == 0)
2011 tree fndecl = gimple_call_fndecl (stmt);
2013 /* To avoid certain truncation the specified bound should also
2014 not be equal to (or less than) the length of the source. */
2015 location_t loc = gimple_location (stmt);
2016 if (warning_at (loc, OPT_Wstringop_overflow_,
2017 "%G%qD specified bound %E equals source length",
2018 stmt, fndecl, len))
2019 gimple_set_no_warning (stmt, true);
2022 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2024 /* If the replacement _DECL isn't initialized, don't do the
2025 transformation. */
2026 if (!fn)
2027 return false;
2029 /* Otherwise, emit a call to strcat. */
2030 gcall *repl = gimple_build_call (fn, 2, dst, src);
2031 replace_call_with_call_and_fold (gsi, repl);
2032 return true;
2035 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2036 LEN, and SIZE. */
2038 static bool
2039 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2041 gimple *stmt = gsi_stmt (*gsi);
2042 tree dest = gimple_call_arg (stmt, 0);
2043 tree src = gimple_call_arg (stmt, 1);
2044 tree len = gimple_call_arg (stmt, 2);
2045 tree size = gimple_call_arg (stmt, 3);
2046 tree fn;
2047 const char *p;
2049 p = c_getstr (src);
2050 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2051 if ((p && *p == '\0')
2052 || integer_zerop (len))
2054 replace_call_with_value (gsi, dest);
2055 return true;
2058 if (! tree_fits_uhwi_p (size))
2059 return false;
2061 if (! integer_all_onesp (size))
2063 tree src_len = c_strlen (src, 1);
2064 if (src_len
2065 && tree_fits_uhwi_p (src_len)
2066 && tree_fits_uhwi_p (len)
2067 && ! tree_int_cst_lt (len, src_len))
2069 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2070 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2071 if (!fn)
2072 return false;
2074 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2075 replace_call_with_call_and_fold (gsi, repl);
2076 return true;
2078 return false;
2081 /* If __builtin_strncat_chk is used, assume strncat is available. */
2082 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2083 if (!fn)
2084 return false;
2086 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2087 replace_call_with_call_and_fold (gsi, repl);
2088 return true;
2091 /* Build and append gimple statements to STMTS that would load a first
2092 character of a memory location identified by STR. LOC is location
2093 of the statement. */
2095 static tree
2096 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2098 tree var;
2100 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2101 tree cst_uchar_ptr_node
2102 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2103 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2105 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2106 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2107 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2109 gimple_assign_set_lhs (stmt, var);
2110 gimple_seq_add_stmt_without_update (stmts, stmt);
2112 return var;
2115 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2116 FCODE is the name of the builtin. */
2118 static bool
2119 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2121 gimple *stmt = gsi_stmt (*gsi);
2122 tree callee = gimple_call_fndecl (stmt);
2123 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2125 tree type = integer_type_node;
2126 tree str1 = gimple_call_arg (stmt, 0);
2127 tree str2 = gimple_call_arg (stmt, 1);
2128 tree lhs = gimple_call_lhs (stmt);
2129 HOST_WIDE_INT length = -1;
2131 /* Handle strncmp and strncasecmp functions. */
2132 if (gimple_call_num_args (stmt) == 3)
2134 tree len = gimple_call_arg (stmt, 2);
2135 if (tree_fits_uhwi_p (len))
2136 length = tree_to_uhwi (len);
2139 /* If the LEN parameter is zero, return zero. */
2140 if (length == 0)
2142 replace_call_with_value (gsi, integer_zero_node);
2143 return true;
2146 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2147 if (operand_equal_p (str1, str2, 0))
2149 replace_call_with_value (gsi, integer_zero_node);
2150 return true;
2153 const char *p1 = c_getstr (str1);
2154 const char *p2 = c_getstr (str2);
2156 /* For known strings, return an immediate value. */
2157 if (p1 && p2)
2159 int r = 0;
2160 bool known_result = false;
2162 switch (fcode)
2164 case BUILT_IN_STRCMP:
2166 r = strcmp (p1, p2);
2167 known_result = true;
2168 break;
2170 case BUILT_IN_STRNCMP:
2172 if (length == -1)
2173 break;
2174 r = strncmp (p1, p2, length);
2175 known_result = true;
2176 break;
2178 /* Only handleable situation is where the string are equal (result 0),
2179 which is already handled by operand_equal_p case. */
2180 case BUILT_IN_STRCASECMP:
2181 break;
2182 case BUILT_IN_STRNCASECMP:
2184 if (length == -1)
2185 break;
2186 r = strncmp (p1, p2, length);
2187 if (r == 0)
2188 known_result = true;
2189 break;
2191 default:
2192 gcc_unreachable ();
2195 if (known_result)
2197 replace_call_with_value (gsi, build_cmp_result (type, r));
2198 return true;
2202 bool nonzero_length = length >= 1
2203 || fcode == BUILT_IN_STRCMP
2204 || fcode == BUILT_IN_STRCASECMP;
2206 location_t loc = gimple_location (stmt);
2208 /* If the second arg is "", return *(const unsigned char*)arg1. */
2209 if (p2 && *p2 == '\0' && nonzero_length)
2211 gimple_seq stmts = NULL;
2212 tree var = gimple_load_first_char (loc, str1, &stmts);
2213 if (lhs)
2215 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2216 gimple_seq_add_stmt_without_update (&stmts, stmt);
2219 gsi_replace_with_seq_vops (gsi, stmts);
2220 return true;
2223 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2224 if (p1 && *p1 == '\0' && nonzero_length)
2226 gimple_seq stmts = NULL;
2227 tree var = gimple_load_first_char (loc, str2, &stmts);
2229 if (lhs)
2231 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2232 stmt = gimple_build_assign (c, NOP_EXPR, var);
2233 gimple_seq_add_stmt_without_update (&stmts, stmt);
2235 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2236 gimple_seq_add_stmt_without_update (&stmts, stmt);
2239 gsi_replace_with_seq_vops (gsi, stmts);
2240 return true;
2243 /* If len parameter is one, return an expression corresponding to
2244 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2245 if (fcode == BUILT_IN_STRNCMP && length == 1)
2247 gimple_seq stmts = NULL;
2248 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2249 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2251 if (lhs)
2253 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2254 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2255 gimple_seq_add_stmt_without_update (&stmts, convert1);
2257 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2258 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2259 gimple_seq_add_stmt_without_update (&stmts, convert2);
2261 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2262 gimple_seq_add_stmt_without_update (&stmts, stmt);
2265 gsi_replace_with_seq_vops (gsi, stmts);
2266 return true;
2269 /* If length is larger than the length of one constant string,
2270 replace strncmp with corresponding strcmp */
2271 if (fcode == BUILT_IN_STRNCMP
2272 && length > 0
2273 && ((p2 && (size_t) length > strlen (p2))
2274 || (p1 && (size_t) length > strlen (p1))))
2276 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2277 if (!fn)
2278 return false;
2279 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2280 replace_call_with_call_and_fold (gsi, repl);
2281 return true;
2284 return false;
2287 /* Fold a call to the memchr pointed by GSI iterator. */
2289 static bool
2290 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2292 gimple *stmt = gsi_stmt (*gsi);
2293 tree lhs = gimple_call_lhs (stmt);
2294 tree arg1 = gimple_call_arg (stmt, 0);
2295 tree arg2 = gimple_call_arg (stmt, 1);
2296 tree len = gimple_call_arg (stmt, 2);
2298 /* If the LEN parameter is zero, return zero. */
2299 if (integer_zerop (len))
2301 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2302 return true;
2305 char c;
2306 if (TREE_CODE (arg2) != INTEGER_CST
2307 || !tree_fits_uhwi_p (len)
2308 || !target_char_cst_p (arg2, &c))
2309 return false;
2311 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2312 unsigned HOST_WIDE_INT string_length;
2313 const char *p1 = c_getstr (arg1, &string_length);
2315 if (p1)
2317 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2318 if (r == NULL)
2320 if (length <= string_length)
2322 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2323 return true;
2326 else
2328 unsigned HOST_WIDE_INT offset = r - p1;
2329 gimple_seq stmts = NULL;
2330 if (lhs != NULL_TREE)
2332 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2333 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2334 arg1, offset_cst);
2335 gimple_seq_add_stmt_without_update (&stmts, stmt);
2337 else
2338 gimple_seq_add_stmt_without_update (&stmts,
2339 gimple_build_nop ());
2341 gsi_replace_with_seq_vops (gsi, stmts);
2342 return true;
2346 return false;
2349 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2350 to the call. IGNORE is true if the value returned
2351 by the builtin will be ignored. UNLOCKED is true is true if this
2352 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2353 the known length of the string. Return NULL_TREE if no simplification
2354 was possible. */
2356 static bool
2357 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2358 tree arg0, tree arg1,
2359 bool unlocked)
2361 gimple *stmt = gsi_stmt (*gsi);
2363 /* If we're using an unlocked function, assume the other unlocked
2364 functions exist explicitly. */
2365 tree const fn_fputc = (unlocked
2366 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2367 : builtin_decl_implicit (BUILT_IN_FPUTC));
2368 tree const fn_fwrite = (unlocked
2369 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2370 : builtin_decl_implicit (BUILT_IN_FWRITE));
2372 /* If the return value is used, don't do the transformation. */
2373 if (gimple_call_lhs (stmt))
2374 return false;
2376 /* Get the length of the string passed to fputs. If the length
2377 can't be determined, punt. */
2378 tree len = get_maxval_strlen (arg0, 0);
2379 if (!len
2380 || TREE_CODE (len) != INTEGER_CST)
2381 return false;
2383 switch (compare_tree_int (len, 1))
2385 case -1: /* length is 0, delete the call entirely . */
2386 replace_call_with_value (gsi, integer_zero_node);
2387 return true;
2389 case 0: /* length is 1, call fputc. */
2391 const char *p = c_getstr (arg0);
2392 if (p != NULL)
2394 if (!fn_fputc)
2395 return false;
2397 gimple *repl = gimple_build_call (fn_fputc, 2,
2398 build_int_cst
2399 (integer_type_node, p[0]), arg1);
2400 replace_call_with_call_and_fold (gsi, repl);
2401 return true;
2404 /* FALLTHROUGH */
2405 case 1: /* length is greater than 1, call fwrite. */
2407 /* If optimizing for size keep fputs. */
2408 if (optimize_function_for_size_p (cfun))
2409 return false;
2410 /* New argument list transforming fputs(string, stream) to
2411 fwrite(string, 1, len, stream). */
2412 if (!fn_fwrite)
2413 return false;
2415 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2416 size_one_node, len, arg1);
2417 replace_call_with_call_and_fold (gsi, repl);
2418 return true;
2420 default:
2421 gcc_unreachable ();
2423 return false;
2426 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2427 DEST, SRC, LEN, and SIZE are the arguments to the call.
2428 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2429 code of the builtin. If MAXLEN is not NULL, it is maximum length
2430 passed as third argument. */
2432 static bool
2433 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2434 tree dest, tree src, tree len, tree size,
2435 enum built_in_function fcode)
2437 gimple *stmt = gsi_stmt (*gsi);
2438 location_t loc = gimple_location (stmt);
2439 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2440 tree fn;
2442 /* If SRC and DEST are the same (and not volatile), return DEST
2443 (resp. DEST+LEN for __mempcpy_chk). */
2444 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2446 if (fcode != BUILT_IN_MEMMOVE && fcode != BUILT_IN_MEMMOVE_CHK)
2448 tree func = gimple_call_fndecl (stmt);
2450 warning_at (loc, OPT_Wrestrict,
2451 "%qD source argument is the same as destination",
2452 func);
2455 if (fcode != BUILT_IN_MEMPCPY_CHK)
2457 replace_call_with_value (gsi, dest);
2458 return true;
2460 else
2462 gimple_seq stmts = NULL;
2463 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2464 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2465 TREE_TYPE (dest), dest, len);
2466 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2467 replace_call_with_value (gsi, temp);
2468 return true;
2472 if (! tree_fits_uhwi_p (size))
2473 return false;
2475 tree maxlen = get_maxval_strlen (len, 2);
2476 if (! integer_all_onesp (size))
2478 if (! tree_fits_uhwi_p (len))
2480 /* If LEN is not constant, try MAXLEN too.
2481 For MAXLEN only allow optimizing into non-_ocs function
2482 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2483 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2485 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2487 /* (void) __mempcpy_chk () can be optimized into
2488 (void) __memcpy_chk (). */
2489 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2490 if (!fn)
2491 return false;
2493 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2494 replace_call_with_call_and_fold (gsi, repl);
2495 return true;
2497 return false;
2500 else
2501 maxlen = len;
2503 if (tree_int_cst_lt (size, maxlen))
2504 return false;
2507 fn = NULL_TREE;
2508 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2509 mem{cpy,pcpy,move,set} is available. */
2510 switch (fcode)
2512 case BUILT_IN_MEMCPY_CHK:
2513 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2514 break;
2515 case BUILT_IN_MEMPCPY_CHK:
2516 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2517 break;
2518 case BUILT_IN_MEMMOVE_CHK:
2519 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2520 break;
2521 case BUILT_IN_MEMSET_CHK:
2522 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2523 break;
2524 default:
2525 break;
2528 if (!fn)
2529 return false;
2531 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2532 replace_call_with_call_and_fold (gsi, repl);
2533 return true;
2536 /* Fold a call to the __st[rp]cpy_chk builtin.
2537 DEST, SRC, and SIZE are the arguments to the call.
2538 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2539 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2540 strings passed as second argument. */
2542 static bool
2543 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2544 tree dest,
2545 tree src, tree size,
2546 enum built_in_function fcode)
2548 gimple *stmt = gsi_stmt (*gsi);
2549 location_t loc = gimple_location (stmt);
2550 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2551 tree len, fn;
2553 /* If SRC and DEST are the same (and not volatile), return DEST. */
2554 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2556 tree func = gimple_call_fndecl (stmt);
2558 warning_at (loc, OPT_Wrestrict,
2559 "%qD source argument is the same as destination",
2560 func);
2562 replace_call_with_value (gsi, dest);
2563 return true;
2566 if (! tree_fits_uhwi_p (size))
2567 return false;
2569 tree maxlen = get_maxval_strlen (src, 1);
2570 if (! integer_all_onesp (size))
2572 len = c_strlen (src, 1);
2573 if (! len || ! tree_fits_uhwi_p (len))
2575 /* If LEN is not constant, try MAXLEN too.
2576 For MAXLEN only allow optimizing into non-_ocs function
2577 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2578 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2580 if (fcode == BUILT_IN_STPCPY_CHK)
2582 if (! ignore)
2583 return false;
2585 /* If return value of __stpcpy_chk is ignored,
2586 optimize into __strcpy_chk. */
2587 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2588 if (!fn)
2589 return false;
2591 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2592 replace_call_with_call_and_fold (gsi, repl);
2593 return true;
2596 if (! len || TREE_SIDE_EFFECTS (len))
2597 return false;
2599 /* If c_strlen returned something, but not a constant,
2600 transform __strcpy_chk into __memcpy_chk. */
2601 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2602 if (!fn)
2603 return false;
2605 gimple_seq stmts = NULL;
2606 len = gimple_convert (&stmts, loc, size_type_node, len);
2607 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2608 build_int_cst (size_type_node, 1));
2609 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2610 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2611 replace_call_with_call_and_fold (gsi, repl);
2612 return true;
2615 else
2616 maxlen = len;
2618 if (! tree_int_cst_lt (maxlen, size))
2619 return false;
2622 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2623 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2624 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2625 if (!fn)
2626 return false;
2628 gimple *repl = gimple_build_call (fn, 2, dest, src);
2629 replace_call_with_call_and_fold (gsi, repl);
2630 return true;
2633 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2634 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2635 length passed as third argument. IGNORE is true if return value can be
2636 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2638 static bool
2639 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2640 tree dest, tree src,
2641 tree len, tree size,
2642 enum built_in_function fcode)
2644 gimple *stmt = gsi_stmt (*gsi);
2645 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2646 tree fn;
2648 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2650 /* If return value of __stpncpy_chk is ignored,
2651 optimize into __strncpy_chk. */
2652 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2653 if (fn)
2655 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2656 replace_call_with_call_and_fold (gsi, repl);
2657 return true;
2661 if (! tree_fits_uhwi_p (size))
2662 return false;
2664 tree maxlen = get_maxval_strlen (len, 2);
2665 if (! integer_all_onesp (size))
2667 if (! tree_fits_uhwi_p (len))
2669 /* If LEN is not constant, try MAXLEN too.
2670 For MAXLEN only allow optimizing into non-_ocs function
2671 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2672 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2673 return false;
2675 else
2676 maxlen = len;
2678 if (tree_int_cst_lt (size, maxlen))
2679 return false;
2682 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2683 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2684 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2685 if (!fn)
2686 return false;
2688 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2689 replace_call_with_call_and_fold (gsi, repl);
2690 return true;
2693 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2694 Return NULL_TREE if no simplification can be made. */
2696 static bool
2697 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2699 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2700 location_t loc = gimple_location (stmt);
2701 tree dest = gimple_call_arg (stmt, 0);
2702 tree src = gimple_call_arg (stmt, 1);
2703 tree fn, len, lenp1;
2705 /* If the result is unused, replace stpcpy with strcpy. */
2706 if (gimple_call_lhs (stmt) == NULL_TREE)
2708 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2709 if (!fn)
2710 return false;
2711 gimple_call_set_fndecl (stmt, fn);
2712 fold_stmt (gsi);
2713 return true;
2716 len = c_strlen (src, 1);
2717 if (!len
2718 || TREE_CODE (len) != INTEGER_CST)
2719 return false;
2721 if (optimize_function_for_size_p (cfun)
2722 /* If length is zero it's small enough. */
2723 && !integer_zerop (len))
2724 return false;
2726 /* If the source has a known length replace stpcpy with memcpy. */
2727 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2728 if (!fn)
2729 return false;
2731 gimple_seq stmts = NULL;
2732 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2733 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2734 tem, build_int_cst (size_type_node, 1));
2735 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2736 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2737 gimple_set_vuse (repl, gimple_vuse (stmt));
2738 gimple_set_vdef (repl, gimple_vdef (stmt));
2739 if (gimple_vdef (repl)
2740 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2741 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2742 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2743 /* Replace the result with dest + len. */
2744 stmts = NULL;
2745 tem = gimple_convert (&stmts, loc, sizetype, len);
2746 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2747 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2748 POINTER_PLUS_EXPR, dest, tem);
2749 gsi_replace (gsi, ret, false);
2750 /* Finally fold the memcpy call. */
2751 gimple_stmt_iterator gsi2 = *gsi;
2752 gsi_prev (&gsi2);
2753 fold_stmt (&gsi2);
2754 return true;
2757 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2758 NULL_TREE if a normal call should be emitted rather than expanding
2759 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2760 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2761 passed as second argument. */
2763 static bool
2764 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2765 enum built_in_function fcode)
2767 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2768 tree dest, size, len, fn, fmt, flag;
2769 const char *fmt_str;
2771 /* Verify the required arguments in the original call. */
2772 if (gimple_call_num_args (stmt) < 5)
2773 return false;
2775 dest = gimple_call_arg (stmt, 0);
2776 len = gimple_call_arg (stmt, 1);
2777 flag = gimple_call_arg (stmt, 2);
2778 size = gimple_call_arg (stmt, 3);
2779 fmt = gimple_call_arg (stmt, 4);
2781 if (! tree_fits_uhwi_p (size))
2782 return false;
2784 if (! integer_all_onesp (size))
2786 tree maxlen = get_maxval_strlen (len, 2);
2787 if (! tree_fits_uhwi_p (len))
2789 /* If LEN is not constant, try MAXLEN too.
2790 For MAXLEN only allow optimizing into non-_ocs function
2791 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2792 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2793 return false;
2795 else
2796 maxlen = len;
2798 if (tree_int_cst_lt (size, maxlen))
2799 return false;
2802 if (!init_target_chars ())
2803 return false;
2805 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2806 or if format doesn't contain % chars or is "%s". */
2807 if (! integer_zerop (flag))
2809 fmt_str = c_getstr (fmt);
2810 if (fmt_str == NULL)
2811 return false;
2812 if (strchr (fmt_str, target_percent) != NULL
2813 && strcmp (fmt_str, target_percent_s))
2814 return false;
2817 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2818 available. */
2819 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2820 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2821 if (!fn)
2822 return false;
2824 /* Replace the called function and the first 5 argument by 3 retaining
2825 trailing varargs. */
2826 gimple_call_set_fndecl (stmt, fn);
2827 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2828 gimple_call_set_arg (stmt, 0, dest);
2829 gimple_call_set_arg (stmt, 1, len);
2830 gimple_call_set_arg (stmt, 2, fmt);
2831 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2832 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2833 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2834 fold_stmt (gsi);
2835 return true;
2838 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2839 Return NULL_TREE if a normal call should be emitted rather than
2840 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2841 or BUILT_IN_VSPRINTF_CHK. */
2843 static bool
2844 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2845 enum built_in_function fcode)
2847 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2848 tree dest, size, len, fn, fmt, flag;
2849 const char *fmt_str;
2850 unsigned nargs = gimple_call_num_args (stmt);
2852 /* Verify the required arguments in the original call. */
2853 if (nargs < 4)
2854 return false;
2855 dest = gimple_call_arg (stmt, 0);
2856 flag = gimple_call_arg (stmt, 1);
2857 size = gimple_call_arg (stmt, 2);
2858 fmt = gimple_call_arg (stmt, 3);
2860 if (! tree_fits_uhwi_p (size))
2861 return false;
2863 len = NULL_TREE;
2865 if (!init_target_chars ())
2866 return false;
2868 /* Check whether the format is a literal string constant. */
2869 fmt_str = c_getstr (fmt);
2870 if (fmt_str != NULL)
2872 /* If the format doesn't contain % args or %%, we know the size. */
2873 if (strchr (fmt_str, target_percent) == 0)
2875 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2876 len = build_int_cstu (size_type_node, strlen (fmt_str));
2878 /* If the format is "%s" and first ... argument is a string literal,
2879 we know the size too. */
2880 else if (fcode == BUILT_IN_SPRINTF_CHK
2881 && strcmp (fmt_str, target_percent_s) == 0)
2883 tree arg;
2885 if (nargs == 5)
2887 arg = gimple_call_arg (stmt, 4);
2888 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2890 len = c_strlen (arg, 1);
2891 if (! len || ! tree_fits_uhwi_p (len))
2892 len = NULL_TREE;
2898 if (! integer_all_onesp (size))
2900 if (! len || ! tree_int_cst_lt (len, size))
2901 return false;
2904 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2905 or if format doesn't contain % chars or is "%s". */
2906 if (! integer_zerop (flag))
2908 if (fmt_str == NULL)
2909 return false;
2910 if (strchr (fmt_str, target_percent) != NULL
2911 && strcmp (fmt_str, target_percent_s))
2912 return false;
2915 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2916 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2917 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2918 if (!fn)
2919 return false;
2921 /* Replace the called function and the first 4 argument by 2 retaining
2922 trailing varargs. */
2923 gimple_call_set_fndecl (stmt, fn);
2924 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2925 gimple_call_set_arg (stmt, 0, dest);
2926 gimple_call_set_arg (stmt, 1, fmt);
2927 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2928 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2929 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2930 fold_stmt (gsi);
2931 return true;
2934 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2935 ORIG may be null if this is a 2-argument call. We don't attempt to
2936 simplify calls with more than 3 arguments.
2938 Return true if simplification was possible, otherwise false. */
2940 bool
2941 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2943 gimple *stmt = gsi_stmt (*gsi);
2944 tree dest = gimple_call_arg (stmt, 0);
2945 tree fmt = gimple_call_arg (stmt, 1);
2946 tree orig = NULL_TREE;
2947 const char *fmt_str = NULL;
2949 /* Verify the required arguments in the original call. We deal with two
2950 types of sprintf() calls: 'sprintf (str, fmt)' and
2951 'sprintf (dest, "%s", orig)'. */
2952 if (gimple_call_num_args (stmt) > 3)
2953 return false;
2955 if (gimple_call_num_args (stmt) == 3)
2956 orig = gimple_call_arg (stmt, 2);
2958 /* Check whether the format is a literal string constant. */
2959 fmt_str = c_getstr (fmt);
2960 if (fmt_str == NULL)
2961 return false;
2963 if (!init_target_chars ())
2964 return false;
2966 /* If the format doesn't contain % args or %%, use strcpy. */
2967 if (strchr (fmt_str, target_percent) == NULL)
2969 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2971 if (!fn)
2972 return false;
2974 /* Don't optimize sprintf (buf, "abc", ptr++). */
2975 if (orig)
2976 return false;
2978 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2979 'format' is known to contain no % formats. */
2980 gimple_seq stmts = NULL;
2981 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2982 gimple_seq_add_stmt_without_update (&stmts, repl);
2983 if (gimple_call_lhs (stmt))
2985 repl = gimple_build_assign (gimple_call_lhs (stmt),
2986 build_int_cst (integer_type_node,
2987 strlen (fmt_str)));
2988 gimple_seq_add_stmt_without_update (&stmts, repl);
2989 gsi_replace_with_seq_vops (gsi, stmts);
2990 /* gsi now points at the assignment to the lhs, get a
2991 stmt iterator to the memcpy call.
2992 ??? We can't use gsi_for_stmt as that doesn't work when the
2993 CFG isn't built yet. */
2994 gimple_stmt_iterator gsi2 = *gsi;
2995 gsi_prev (&gsi2);
2996 fold_stmt (&gsi2);
2998 else
3000 gsi_replace_with_seq_vops (gsi, stmts);
3001 fold_stmt (gsi);
3003 return true;
3006 /* If the format is "%s", use strcpy if the result isn't used. */
3007 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3009 tree fn;
3010 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3012 if (!fn)
3013 return false;
3015 /* Don't crash on sprintf (str1, "%s"). */
3016 if (!orig)
3017 return false;
3019 tree orig_len = NULL_TREE;
3020 if (gimple_call_lhs (stmt))
3022 orig_len = get_maxval_strlen (orig, 0);
3023 if (!orig_len)
3024 return false;
3027 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3028 gimple_seq stmts = NULL;
3029 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3030 gimple_seq_add_stmt_without_update (&stmts, repl);
3031 if (gimple_call_lhs (stmt))
3033 if (!useless_type_conversion_p (integer_type_node,
3034 TREE_TYPE (orig_len)))
3035 orig_len = fold_convert (integer_type_node, orig_len);
3036 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3037 gimple_seq_add_stmt_without_update (&stmts, repl);
3038 gsi_replace_with_seq_vops (gsi, stmts);
3039 /* gsi now points at the assignment to the lhs, get a
3040 stmt iterator to the memcpy call.
3041 ??? We can't use gsi_for_stmt as that doesn't work when the
3042 CFG isn't built yet. */
3043 gimple_stmt_iterator gsi2 = *gsi;
3044 gsi_prev (&gsi2);
3045 fold_stmt (&gsi2);
3047 else
3049 gsi_replace_with_seq_vops (gsi, stmts);
3050 fold_stmt (gsi);
3052 return true;
3054 return false;
3057 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3058 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3059 attempt to simplify calls with more than 4 arguments.
3061 Return true if simplification was possible, otherwise false. */
3063 bool
3064 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3066 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3067 tree dest = gimple_call_arg (stmt, 0);
3068 tree destsize = gimple_call_arg (stmt, 1);
3069 tree fmt = gimple_call_arg (stmt, 2);
3070 tree orig = NULL_TREE;
3071 const char *fmt_str = NULL;
3073 if (gimple_call_num_args (stmt) > 4)
3074 return false;
3076 if (gimple_call_num_args (stmt) == 4)
3077 orig = gimple_call_arg (stmt, 3);
3079 if (!tree_fits_uhwi_p (destsize))
3080 return false;
3081 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3083 /* Check whether the format is a literal string constant. */
3084 fmt_str = c_getstr (fmt);
3085 if (fmt_str == NULL)
3086 return false;
3088 if (!init_target_chars ())
3089 return false;
3091 /* If the format doesn't contain % args or %%, use strcpy. */
3092 if (strchr (fmt_str, target_percent) == NULL)
3094 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3095 if (!fn)
3096 return false;
3098 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3099 if (orig)
3100 return false;
3102 /* We could expand this as
3103 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3104 or to
3105 memcpy (str, fmt_with_nul_at_cstm1, cst);
3106 but in the former case that might increase code size
3107 and in the latter case grow .rodata section too much.
3108 So punt for now. */
3109 size_t len = strlen (fmt_str);
3110 if (len >= destlen)
3111 return false;
3113 gimple_seq stmts = NULL;
3114 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3115 gimple_seq_add_stmt_without_update (&stmts, repl);
3116 if (gimple_call_lhs (stmt))
3118 repl = gimple_build_assign (gimple_call_lhs (stmt),
3119 build_int_cst (integer_type_node, len));
3120 gimple_seq_add_stmt_without_update (&stmts, repl);
3121 gsi_replace_with_seq_vops (gsi, stmts);
3122 /* gsi now points at the assignment to the lhs, get a
3123 stmt iterator to the memcpy call.
3124 ??? We can't use gsi_for_stmt as that doesn't work when the
3125 CFG isn't built yet. */
3126 gimple_stmt_iterator gsi2 = *gsi;
3127 gsi_prev (&gsi2);
3128 fold_stmt (&gsi2);
3130 else
3132 gsi_replace_with_seq_vops (gsi, stmts);
3133 fold_stmt (gsi);
3135 return true;
3138 /* If the format is "%s", use strcpy if the result isn't used. */
3139 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3141 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3142 if (!fn)
3143 return false;
3145 /* Don't crash on snprintf (str1, cst, "%s"). */
3146 if (!orig)
3147 return false;
3149 tree orig_len = get_maxval_strlen (orig, 0);
3150 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3151 return false;
3153 /* We could expand this as
3154 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3155 or to
3156 memcpy (str1, str2_with_nul_at_cstm1, cst);
3157 but in the former case that might increase code size
3158 and in the latter case grow .rodata section too much.
3159 So punt for now. */
3160 if (compare_tree_int (orig_len, destlen) >= 0)
3161 return false;
3163 /* Convert snprintf (str1, cst, "%s", str2) into
3164 strcpy (str1, str2) if strlen (str2) < cst. */
3165 gimple_seq stmts = NULL;
3166 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3167 gimple_seq_add_stmt_without_update (&stmts, repl);
3168 if (gimple_call_lhs (stmt))
3170 if (!useless_type_conversion_p (integer_type_node,
3171 TREE_TYPE (orig_len)))
3172 orig_len = fold_convert (integer_type_node, orig_len);
3173 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3174 gimple_seq_add_stmt_without_update (&stmts, repl);
3175 gsi_replace_with_seq_vops (gsi, stmts);
3176 /* gsi now points at the assignment to the lhs, get a
3177 stmt iterator to the memcpy call.
3178 ??? We can't use gsi_for_stmt as that doesn't work when the
3179 CFG isn't built yet. */
3180 gimple_stmt_iterator gsi2 = *gsi;
3181 gsi_prev (&gsi2);
3182 fold_stmt (&gsi2);
3184 else
3186 gsi_replace_with_seq_vops (gsi, stmts);
3187 fold_stmt (gsi);
3189 return true;
3191 return false;
3194 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3195 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3196 more than 3 arguments, and ARG may be null in the 2-argument case.
3198 Return NULL_TREE if no simplification was possible, otherwise return the
3199 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3200 code of the function to be simplified. */
3202 static bool
3203 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3204 tree fp, tree fmt, tree arg,
3205 enum built_in_function fcode)
3207 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3208 tree fn_fputc, fn_fputs;
3209 const char *fmt_str = NULL;
3211 /* If the return value is used, don't do the transformation. */
3212 if (gimple_call_lhs (stmt) != NULL_TREE)
3213 return false;
3215 /* Check whether the format is a literal string constant. */
3216 fmt_str = c_getstr (fmt);
3217 if (fmt_str == NULL)
3218 return false;
3220 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3222 /* If we're using an unlocked function, assume the other
3223 unlocked functions exist explicitly. */
3224 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3225 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3227 else
3229 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3230 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3233 if (!init_target_chars ())
3234 return false;
3236 /* If the format doesn't contain % args or %%, use strcpy. */
3237 if (strchr (fmt_str, target_percent) == NULL)
3239 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3240 && arg)
3241 return false;
3243 /* If the format specifier was "", fprintf does nothing. */
3244 if (fmt_str[0] == '\0')
3246 replace_call_with_value (gsi, NULL_TREE);
3247 return true;
3250 /* When "string" doesn't contain %, replace all cases of
3251 fprintf (fp, string) with fputs (string, fp). The fputs
3252 builtin will take care of special cases like length == 1. */
3253 if (fn_fputs)
3255 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3256 replace_call_with_call_and_fold (gsi, repl);
3257 return true;
3261 /* The other optimizations can be done only on the non-va_list variants. */
3262 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3263 return false;
3265 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3266 else if (strcmp (fmt_str, target_percent_s) == 0)
3268 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3269 return false;
3270 if (fn_fputs)
3272 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3273 replace_call_with_call_and_fold (gsi, repl);
3274 return true;
3278 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3279 else if (strcmp (fmt_str, target_percent_c) == 0)
3281 if (!arg
3282 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3283 return false;
3284 if (fn_fputc)
3286 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3287 replace_call_with_call_and_fold (gsi, repl);
3288 return true;
3292 return false;
3295 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3296 FMT and ARG are the arguments to the call; we don't fold cases with
3297 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3299 Return NULL_TREE if no simplification was possible, otherwise return the
3300 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3301 code of the function to be simplified. */
3303 static bool
3304 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3305 tree arg, enum built_in_function fcode)
3307 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3308 tree fn_putchar, fn_puts, newarg;
3309 const char *fmt_str = NULL;
3311 /* If the return value is used, don't do the transformation. */
3312 if (gimple_call_lhs (stmt) != NULL_TREE)
3313 return false;
3315 /* Check whether the format is a literal string constant. */
3316 fmt_str = c_getstr (fmt);
3317 if (fmt_str == NULL)
3318 return false;
3320 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3322 /* If we're using an unlocked function, assume the other
3323 unlocked functions exist explicitly. */
3324 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3325 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3327 else
3329 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3330 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3333 if (!init_target_chars ())
3334 return false;
3336 if (strcmp (fmt_str, target_percent_s) == 0
3337 || strchr (fmt_str, target_percent) == NULL)
3339 const char *str;
3341 if (strcmp (fmt_str, target_percent_s) == 0)
3343 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3344 return false;
3346 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3347 return false;
3349 str = c_getstr (arg);
3350 if (str == NULL)
3351 return false;
3353 else
3355 /* The format specifier doesn't contain any '%' characters. */
3356 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3357 && arg)
3358 return false;
3359 str = fmt_str;
3362 /* If the string was "", printf does nothing. */
3363 if (str[0] == '\0')
3365 replace_call_with_value (gsi, NULL_TREE);
3366 return true;
3369 /* If the string has length of 1, call putchar. */
3370 if (str[1] == '\0')
3372 /* Given printf("c"), (where c is any one character,)
3373 convert "c"[0] to an int and pass that to the replacement
3374 function. */
3375 newarg = build_int_cst (integer_type_node, str[0]);
3376 if (fn_putchar)
3378 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3379 replace_call_with_call_and_fold (gsi, repl);
3380 return true;
3383 else
3385 /* If the string was "string\n", call puts("string"). */
3386 size_t len = strlen (str);
3387 if ((unsigned char)str[len - 1] == target_newline
3388 && (size_t) (int) len == len
3389 && (int) len > 0)
3391 char *newstr;
3392 tree offset_node, string_cst;
3394 /* Create a NUL-terminated string that's one char shorter
3395 than the original, stripping off the trailing '\n'. */
3396 newarg = build_string_literal (len, str);
3397 string_cst = string_constant (newarg, &offset_node);
3398 gcc_checking_assert (string_cst
3399 && (TREE_STRING_LENGTH (string_cst)
3400 == (int) len)
3401 && integer_zerop (offset_node)
3402 && (unsigned char)
3403 TREE_STRING_POINTER (string_cst)[len - 1]
3404 == target_newline);
3405 /* build_string_literal creates a new STRING_CST,
3406 modify it in place to avoid double copying. */
3407 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3408 newstr[len - 1] = '\0';
3409 if (fn_puts)
3411 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3412 replace_call_with_call_and_fold (gsi, repl);
3413 return true;
3416 else
3417 /* We'd like to arrange to call fputs(string,stdout) here,
3418 but we need stdout and don't have a way to get it yet. */
3419 return false;
3423 /* The other optimizations can be done only on the non-va_list variants. */
3424 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3425 return false;
3427 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3428 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3430 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3431 return false;
3432 if (fn_puts)
3434 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3435 replace_call_with_call_and_fold (gsi, repl);
3436 return true;
3440 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3441 else if (strcmp (fmt_str, target_percent_c) == 0)
3443 if (!arg || ! useless_type_conversion_p (integer_type_node,
3444 TREE_TYPE (arg)))
3445 return false;
3446 if (fn_putchar)
3448 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3449 replace_call_with_call_and_fold (gsi, repl);
3450 return true;
3454 return false;
3459 /* Fold a call to __builtin_strlen with known length LEN. */
3461 static bool
3462 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3464 gimple *stmt = gsi_stmt (*gsi);
3465 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
3466 if (!len)
3467 return false;
3468 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
3469 replace_call_with_value (gsi, len);
3470 return true;
3473 /* Fold a call to __builtin_acc_on_device. */
3475 static bool
3476 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3478 /* Defer folding until we know which compiler we're in. */
3479 if (symtab->state != EXPANSION)
3480 return false;
3482 unsigned val_host = GOMP_DEVICE_HOST;
3483 unsigned val_dev = GOMP_DEVICE_NONE;
3485 #ifdef ACCEL_COMPILER
3486 val_host = GOMP_DEVICE_NOT_HOST;
3487 val_dev = ACCEL_COMPILER_acc_device;
3488 #endif
3490 location_t loc = gimple_location (gsi_stmt (*gsi));
3492 tree host_eq = make_ssa_name (boolean_type_node);
3493 gimple *host_ass = gimple_build_assign
3494 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3495 gimple_set_location (host_ass, loc);
3496 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3498 tree dev_eq = make_ssa_name (boolean_type_node);
3499 gimple *dev_ass = gimple_build_assign
3500 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3501 gimple_set_location (dev_ass, loc);
3502 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3504 tree result = make_ssa_name (boolean_type_node);
3505 gimple *result_ass = gimple_build_assign
3506 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3507 gimple_set_location (result_ass, loc);
3508 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3510 replace_call_with_value (gsi, result);
3512 return true;
3515 /* Fold realloc (0, n) -> malloc (n). */
3517 static bool
3518 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3520 gimple *stmt = gsi_stmt (*gsi);
3521 tree arg = gimple_call_arg (stmt, 0);
3522 tree size = gimple_call_arg (stmt, 1);
3524 if (operand_equal_p (arg, null_pointer_node, 0))
3526 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3527 if (fn_malloc)
3529 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3530 replace_call_with_call_and_fold (gsi, repl);
3531 return true;
3534 return false;
3537 /* Fold the non-target builtin at *GSI and return whether any simplification
3538 was made. */
3540 static bool
3541 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3543 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3544 tree callee = gimple_call_fndecl (stmt);
3546 /* Give up for always_inline inline builtins until they are
3547 inlined. */
3548 if (avoid_folding_inline_builtin (callee))
3549 return false;
3551 unsigned n = gimple_call_num_args (stmt);
3552 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3553 switch (fcode)
3555 case BUILT_IN_BCMP:
3556 return gimple_fold_builtin_bcmp (gsi);
3557 case BUILT_IN_BCOPY:
3558 return gimple_fold_builtin_bcopy (gsi);
3559 case BUILT_IN_BZERO:
3560 return gimple_fold_builtin_bzero (gsi);
3562 case BUILT_IN_MEMSET:
3563 return gimple_fold_builtin_memset (gsi,
3564 gimple_call_arg (stmt, 1),
3565 gimple_call_arg (stmt, 2));
3566 case BUILT_IN_MEMCPY:
3567 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3568 gimple_call_arg (stmt, 1), 0);
3569 case BUILT_IN_MEMPCPY:
3570 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3571 gimple_call_arg (stmt, 1), 1);
3572 case BUILT_IN_MEMMOVE:
3573 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3574 gimple_call_arg (stmt, 1), 3);
3575 case BUILT_IN_SPRINTF_CHK:
3576 case BUILT_IN_VSPRINTF_CHK:
3577 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3578 case BUILT_IN_STRCAT_CHK:
3579 return gimple_fold_builtin_strcat_chk (gsi);
3580 case BUILT_IN_STRNCAT_CHK:
3581 return gimple_fold_builtin_strncat_chk (gsi);
3582 case BUILT_IN_STRLEN:
3583 return gimple_fold_builtin_strlen (gsi);
3584 case BUILT_IN_STRCPY:
3585 return gimple_fold_builtin_strcpy (gsi,
3586 gimple_call_arg (stmt, 0),
3587 gimple_call_arg (stmt, 1));
3588 case BUILT_IN_STRNCPY:
3589 return gimple_fold_builtin_strncpy (gsi,
3590 gimple_call_arg (stmt, 0),
3591 gimple_call_arg (stmt, 1),
3592 gimple_call_arg (stmt, 2));
3593 case BUILT_IN_STRCAT:
3594 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3595 gimple_call_arg (stmt, 1));
3596 case BUILT_IN_STRNCAT:
3597 return gimple_fold_builtin_strncat (gsi);
3598 case BUILT_IN_INDEX:
3599 case BUILT_IN_STRCHR:
3600 return gimple_fold_builtin_strchr (gsi, false);
3601 case BUILT_IN_RINDEX:
3602 case BUILT_IN_STRRCHR:
3603 return gimple_fold_builtin_strchr (gsi, true);
3604 case BUILT_IN_STRSTR:
3605 return gimple_fold_builtin_strstr (gsi);
3606 case BUILT_IN_STRCMP:
3607 case BUILT_IN_STRCASECMP:
3608 case BUILT_IN_STRNCMP:
3609 case BUILT_IN_STRNCASECMP:
3610 return gimple_fold_builtin_string_compare (gsi);
3611 case BUILT_IN_MEMCHR:
3612 return gimple_fold_builtin_memchr (gsi);
3613 case BUILT_IN_FPUTS:
3614 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3615 gimple_call_arg (stmt, 1), false);
3616 case BUILT_IN_FPUTS_UNLOCKED:
3617 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3618 gimple_call_arg (stmt, 1), true);
3619 case BUILT_IN_MEMCPY_CHK:
3620 case BUILT_IN_MEMPCPY_CHK:
3621 case BUILT_IN_MEMMOVE_CHK:
3622 case BUILT_IN_MEMSET_CHK:
3623 return gimple_fold_builtin_memory_chk (gsi,
3624 gimple_call_arg (stmt, 0),
3625 gimple_call_arg (stmt, 1),
3626 gimple_call_arg (stmt, 2),
3627 gimple_call_arg (stmt, 3),
3628 fcode);
3629 case BUILT_IN_STPCPY:
3630 return gimple_fold_builtin_stpcpy (gsi);
3631 case BUILT_IN_STRCPY_CHK:
3632 case BUILT_IN_STPCPY_CHK:
3633 return gimple_fold_builtin_stxcpy_chk (gsi,
3634 gimple_call_arg (stmt, 0),
3635 gimple_call_arg (stmt, 1),
3636 gimple_call_arg (stmt, 2),
3637 fcode);
3638 case BUILT_IN_STRNCPY_CHK:
3639 case BUILT_IN_STPNCPY_CHK:
3640 return gimple_fold_builtin_stxncpy_chk (gsi,
3641 gimple_call_arg (stmt, 0),
3642 gimple_call_arg (stmt, 1),
3643 gimple_call_arg (stmt, 2),
3644 gimple_call_arg (stmt, 3),
3645 fcode);
3646 case BUILT_IN_SNPRINTF_CHK:
3647 case BUILT_IN_VSNPRINTF_CHK:
3648 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3650 case BUILT_IN_FPRINTF:
3651 case BUILT_IN_FPRINTF_UNLOCKED:
3652 case BUILT_IN_VFPRINTF:
3653 if (n == 2 || n == 3)
3654 return gimple_fold_builtin_fprintf (gsi,
3655 gimple_call_arg (stmt, 0),
3656 gimple_call_arg (stmt, 1),
3657 n == 3
3658 ? gimple_call_arg (stmt, 2)
3659 : NULL_TREE,
3660 fcode);
3661 break;
3662 case BUILT_IN_FPRINTF_CHK:
3663 case BUILT_IN_VFPRINTF_CHK:
3664 if (n == 3 || n == 4)
3665 return gimple_fold_builtin_fprintf (gsi,
3666 gimple_call_arg (stmt, 0),
3667 gimple_call_arg (stmt, 2),
3668 n == 4
3669 ? gimple_call_arg (stmt, 3)
3670 : NULL_TREE,
3671 fcode);
3672 break;
3673 case BUILT_IN_PRINTF:
3674 case BUILT_IN_PRINTF_UNLOCKED:
3675 case BUILT_IN_VPRINTF:
3676 if (n == 1 || n == 2)
3677 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3678 n == 2
3679 ? gimple_call_arg (stmt, 1)
3680 : NULL_TREE, fcode);
3681 break;
3682 case BUILT_IN_PRINTF_CHK:
3683 case BUILT_IN_VPRINTF_CHK:
3684 if (n == 2 || n == 3)
3685 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3686 n == 3
3687 ? gimple_call_arg (stmt, 2)
3688 : NULL_TREE, fcode);
3689 break;
3690 case BUILT_IN_ACC_ON_DEVICE:
3691 return gimple_fold_builtin_acc_on_device (gsi,
3692 gimple_call_arg (stmt, 0));
3693 case BUILT_IN_REALLOC:
3694 return gimple_fold_builtin_realloc (gsi);
3696 default:;
3699 /* Try the generic builtin folder. */
3700 bool ignore = (gimple_call_lhs (stmt) == NULL);
3701 tree result = fold_call_stmt (stmt, ignore);
3702 if (result)
3704 if (ignore)
3705 STRIP_NOPS (result);
3706 else
3707 result = fold_convert (gimple_call_return_type (stmt), result);
3708 if (!update_call_from_tree (gsi, result))
3709 gimplify_and_update_call_from_tree (gsi, result);
3710 return true;
3713 return false;
3716 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3717 function calls to constants, where possible. */
3719 static tree
3720 fold_internal_goacc_dim (const gimple *call)
3722 int axis = oacc_get_ifn_dim_arg (call);
3723 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3724 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
3725 tree result = NULL_TREE;
3727 /* If the size is 1, or we only want the size and it is not dynamic,
3728 we know the answer. */
3729 if (size == 1 || (!is_pos && size))
3731 tree type = TREE_TYPE (gimple_call_lhs (call));
3732 result = build_int_cst (type, size - is_pos);
3735 return result;
3738 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3739 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3740 &var where var is only addressable because of such calls. */
3742 bool
3743 optimize_atomic_compare_exchange_p (gimple *stmt)
3745 if (gimple_call_num_args (stmt) != 6
3746 || !flag_inline_atomics
3747 || !optimize
3748 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3749 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3750 || !gimple_vdef (stmt)
3751 || !gimple_vuse (stmt))
3752 return false;
3754 tree fndecl = gimple_call_fndecl (stmt);
3755 switch (DECL_FUNCTION_CODE (fndecl))
3757 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3758 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3759 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3760 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3761 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3762 break;
3763 default:
3764 return false;
3767 tree expected = gimple_call_arg (stmt, 1);
3768 if (TREE_CODE (expected) != ADDR_EXPR
3769 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3770 return false;
3772 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3773 if (!is_gimple_reg_type (etype)
3774 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3775 || TREE_THIS_VOLATILE (etype)
3776 || VECTOR_TYPE_P (etype)
3777 || TREE_CODE (etype) == COMPLEX_TYPE
3778 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3779 might not preserve all the bits. See PR71716. */
3780 || SCALAR_FLOAT_TYPE_P (etype)
3781 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype)))
3782 return false;
3784 tree weak = gimple_call_arg (stmt, 3);
3785 if (!integer_zerop (weak) && !integer_onep (weak))
3786 return false;
3788 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3789 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3790 machine_mode mode = TYPE_MODE (itype);
3792 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3793 == CODE_FOR_nothing
3794 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3795 return false;
3797 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode))
3798 return false;
3800 return true;
3803 /* Fold
3804 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3805 into
3806 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3807 i = IMAGPART_EXPR <t>;
3808 r = (_Bool) i;
3809 e = REALPART_EXPR <t>; */
3811 void
3812 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3814 gimple *stmt = gsi_stmt (*gsi);
3815 tree fndecl = gimple_call_fndecl (stmt);
3816 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3817 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3818 tree ctype = build_complex_type (itype);
3819 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3820 bool throws = false;
3821 edge e = NULL;
3822 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3823 expected);
3824 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3825 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3826 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3828 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3829 build1 (VIEW_CONVERT_EXPR, itype,
3830 gimple_assign_lhs (g)));
3831 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3833 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3834 + int_size_in_bytes (itype);
3835 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3836 gimple_call_arg (stmt, 0),
3837 gimple_assign_lhs (g),
3838 gimple_call_arg (stmt, 2),
3839 build_int_cst (integer_type_node, flag),
3840 gimple_call_arg (stmt, 4),
3841 gimple_call_arg (stmt, 5));
3842 tree lhs = make_ssa_name (ctype);
3843 gimple_call_set_lhs (g, lhs);
3844 gimple_set_vdef (g, gimple_vdef (stmt));
3845 gimple_set_vuse (g, gimple_vuse (stmt));
3846 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3847 tree oldlhs = gimple_call_lhs (stmt);
3848 if (stmt_can_throw_internal (stmt))
3850 throws = true;
3851 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3853 gimple_call_set_nothrow (as_a <gcall *> (g),
3854 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3855 gimple_call_set_lhs (stmt, NULL_TREE);
3856 gsi_replace (gsi, g, true);
3857 if (oldlhs)
3859 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3860 build1 (IMAGPART_EXPR, itype, lhs));
3861 if (throws)
3863 gsi_insert_on_edge_immediate (e, g);
3864 *gsi = gsi_for_stmt (g);
3866 else
3867 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3868 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3869 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3871 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3872 build1 (REALPART_EXPR, itype, lhs));
3873 if (throws && oldlhs == NULL_TREE)
3875 gsi_insert_on_edge_immediate (e, g);
3876 *gsi = gsi_for_stmt (g);
3878 else
3879 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3880 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3882 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3883 VIEW_CONVERT_EXPR,
3884 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3885 gimple_assign_lhs (g)));
3886 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3888 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3889 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3890 *gsi = gsiret;
3893 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3894 doesn't fit into TYPE. The test for overflow should be regardless of
3895 -fwrapv, and even for unsigned types. */
3897 bool
3898 arith_overflowed_p (enum tree_code code, const_tree type,
3899 const_tree arg0, const_tree arg1)
3901 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3902 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3903 widest2_int_cst;
3904 widest2_int warg0 = widest2_int_cst (arg0);
3905 widest2_int warg1 = widest2_int_cst (arg1);
3906 widest2_int wres;
3907 switch (code)
3909 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3910 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3911 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3912 default: gcc_unreachable ();
3914 signop sign = TYPE_SIGN (type);
3915 if (sign == UNSIGNED && wi::neg_p (wres))
3916 return true;
3917 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3920 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3921 The statement may be replaced by another statement, e.g., if the call
3922 simplifies to a constant value. Return true if any changes were made.
3923 It is assumed that the operands have been previously folded. */
3925 static bool
3926 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3928 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3929 tree callee;
3930 bool changed = false;
3931 unsigned i;
3933 /* Fold *& in call arguments. */
3934 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3935 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3937 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3938 if (tmp)
3940 gimple_call_set_arg (stmt, i, tmp);
3941 changed = true;
3945 /* Check for virtual calls that became direct calls. */
3946 callee = gimple_call_fn (stmt);
3947 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3949 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3951 if (dump_file && virtual_method_call_p (callee)
3952 && !possible_polymorphic_call_target_p
3953 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3954 (OBJ_TYPE_REF_EXPR (callee)))))
3956 fprintf (dump_file,
3957 "Type inheritance inconsistent devirtualization of ");
3958 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3959 fprintf (dump_file, " to ");
3960 print_generic_expr (dump_file, callee, TDF_SLIM);
3961 fprintf (dump_file, "\n");
3964 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3965 changed = true;
3967 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3969 bool final;
3970 vec <cgraph_node *>targets
3971 = possible_polymorphic_call_targets (callee, stmt, &final);
3972 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3974 tree lhs = gimple_call_lhs (stmt);
3975 if (dump_enabled_p ())
3977 location_t loc = gimple_location_safe (stmt);
3978 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3979 "folding virtual function call to %s\n",
3980 targets.length () == 1
3981 ? targets[0]->name ()
3982 : "__builtin_unreachable");
3984 if (targets.length () == 1)
3986 tree fndecl = targets[0]->decl;
3987 gimple_call_set_fndecl (stmt, fndecl);
3988 changed = true;
3989 /* If changing the call to __cxa_pure_virtual
3990 or similar noreturn function, adjust gimple_call_fntype
3991 too. */
3992 if (gimple_call_noreturn_p (stmt)
3993 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3994 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3995 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3996 == void_type_node))
3997 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
3998 /* If the call becomes noreturn, remove the lhs. */
3999 if (lhs
4000 && gimple_call_noreturn_p (stmt)
4001 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4002 || should_remove_lhs_p (lhs)))
4004 if (TREE_CODE (lhs) == SSA_NAME)
4006 tree var = create_tmp_var (TREE_TYPE (lhs));
4007 tree def = get_or_create_ssa_default_def (cfun, var);
4008 gimple *new_stmt = gimple_build_assign (lhs, def);
4009 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4011 gimple_call_set_lhs (stmt, NULL_TREE);
4013 maybe_remove_unused_call_args (cfun, stmt);
4015 else
4017 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4018 gimple *new_stmt = gimple_build_call (fndecl, 0);
4019 gimple_set_location (new_stmt, gimple_location (stmt));
4020 /* If the call had a SSA name as lhs morph that into
4021 an uninitialized value. */
4022 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4024 tree var = create_tmp_var (TREE_TYPE (lhs));
4025 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4026 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4027 set_ssa_default_def (cfun, var, lhs);
4029 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4030 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4031 gsi_replace (gsi, new_stmt, false);
4032 return true;
4038 /* Check for indirect calls that became direct calls, and then
4039 no longer require a static chain. */
4040 if (gimple_call_chain (stmt))
4042 tree fn = gimple_call_fndecl (stmt);
4043 if (fn && !DECL_STATIC_CHAIN (fn))
4045 gimple_call_set_chain (stmt, NULL);
4046 changed = true;
4048 else
4050 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4051 if (tmp)
4053 gimple_call_set_chain (stmt, tmp);
4054 changed = true;
4059 if (inplace)
4060 return changed;
4062 /* Check for builtins that CCP can handle using information not
4063 available in the generic fold routines. */
4064 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4066 if (gimple_fold_builtin (gsi))
4067 changed = true;
4069 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4071 changed |= targetm.gimple_fold_builtin (gsi);
4073 else if (gimple_call_internal_p (stmt))
4075 enum tree_code subcode = ERROR_MARK;
4076 tree result = NULL_TREE;
4077 bool cplx_result = false;
4078 tree overflow = NULL_TREE;
4079 switch (gimple_call_internal_fn (stmt))
4081 case IFN_BUILTIN_EXPECT:
4082 result = fold_builtin_expect (gimple_location (stmt),
4083 gimple_call_arg (stmt, 0),
4084 gimple_call_arg (stmt, 1),
4085 gimple_call_arg (stmt, 2));
4086 break;
4087 case IFN_UBSAN_OBJECT_SIZE:
4089 tree offset = gimple_call_arg (stmt, 1);
4090 tree objsize = gimple_call_arg (stmt, 2);
4091 if (integer_all_onesp (objsize)
4092 || (TREE_CODE (offset) == INTEGER_CST
4093 && TREE_CODE (objsize) == INTEGER_CST
4094 && tree_int_cst_le (offset, objsize)))
4096 replace_call_with_value (gsi, NULL_TREE);
4097 return true;
4100 break;
4101 case IFN_UBSAN_PTR:
4102 if (integer_zerop (gimple_call_arg (stmt, 1)))
4104 replace_call_with_value (gsi, NULL_TREE);
4105 return true;
4107 break;
4108 case IFN_UBSAN_BOUNDS:
4110 tree index = gimple_call_arg (stmt, 1);
4111 tree bound = gimple_call_arg (stmt, 2);
4112 if (TREE_CODE (index) == INTEGER_CST
4113 && TREE_CODE (bound) == INTEGER_CST)
4115 index = fold_convert (TREE_TYPE (bound), index);
4116 if (TREE_CODE (index) == INTEGER_CST
4117 && tree_int_cst_le (index, bound))
4119 replace_call_with_value (gsi, NULL_TREE);
4120 return true;
4124 break;
4125 case IFN_GOACC_DIM_SIZE:
4126 case IFN_GOACC_DIM_POS:
4127 result = fold_internal_goacc_dim (stmt);
4128 break;
4129 case IFN_UBSAN_CHECK_ADD:
4130 subcode = PLUS_EXPR;
4131 break;
4132 case IFN_UBSAN_CHECK_SUB:
4133 subcode = MINUS_EXPR;
4134 break;
4135 case IFN_UBSAN_CHECK_MUL:
4136 subcode = MULT_EXPR;
4137 break;
4138 case IFN_ADD_OVERFLOW:
4139 subcode = PLUS_EXPR;
4140 cplx_result = true;
4141 break;
4142 case IFN_SUB_OVERFLOW:
4143 subcode = MINUS_EXPR;
4144 cplx_result = true;
4145 break;
4146 case IFN_MUL_OVERFLOW:
4147 subcode = MULT_EXPR;
4148 cplx_result = true;
4149 break;
4150 default:
4151 break;
4153 if (subcode != ERROR_MARK)
4155 tree arg0 = gimple_call_arg (stmt, 0);
4156 tree arg1 = gimple_call_arg (stmt, 1);
4157 tree type = TREE_TYPE (arg0);
4158 if (cplx_result)
4160 tree lhs = gimple_call_lhs (stmt);
4161 if (lhs == NULL_TREE)
4162 type = NULL_TREE;
4163 else
4164 type = TREE_TYPE (TREE_TYPE (lhs));
4166 if (type == NULL_TREE)
4168 /* x = y + 0; x = y - 0; x = y * 0; */
4169 else if (integer_zerop (arg1))
4170 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4171 /* x = 0 + y; x = 0 * y; */
4172 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4173 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4174 /* x = y - y; */
4175 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4176 result = integer_zero_node;
4177 /* x = y * 1; x = 1 * y; */
4178 else if (subcode == MULT_EXPR && integer_onep (arg1))
4179 result = arg0;
4180 else if (subcode == MULT_EXPR && integer_onep (arg0))
4181 result = arg1;
4182 else if (TREE_CODE (arg0) == INTEGER_CST
4183 && TREE_CODE (arg1) == INTEGER_CST)
4185 if (cplx_result)
4186 result = int_const_binop (subcode, fold_convert (type, arg0),
4187 fold_convert (type, arg1));
4188 else
4189 result = int_const_binop (subcode, arg0, arg1);
4190 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4192 if (cplx_result)
4193 overflow = build_one_cst (type);
4194 else
4195 result = NULL_TREE;
4198 if (result)
4200 if (result == integer_zero_node)
4201 result = build_zero_cst (type);
4202 else if (cplx_result && TREE_TYPE (result) != type)
4204 if (TREE_CODE (result) == INTEGER_CST)
4206 if (arith_overflowed_p (PLUS_EXPR, type, result,
4207 integer_zero_node))
4208 overflow = build_one_cst (type);
4210 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4211 && TYPE_UNSIGNED (type))
4212 || (TYPE_PRECISION (type)
4213 < (TYPE_PRECISION (TREE_TYPE (result))
4214 + (TYPE_UNSIGNED (TREE_TYPE (result))
4215 && !TYPE_UNSIGNED (type)))))
4216 result = NULL_TREE;
4217 if (result)
4218 result = fold_convert (type, result);
4223 if (result)
4225 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4226 result = drop_tree_overflow (result);
4227 if (cplx_result)
4229 if (overflow == NULL_TREE)
4230 overflow = build_zero_cst (TREE_TYPE (result));
4231 tree ctype = build_complex_type (TREE_TYPE (result));
4232 if (TREE_CODE (result) == INTEGER_CST
4233 && TREE_CODE (overflow) == INTEGER_CST)
4234 result = build_complex (ctype, result, overflow);
4235 else
4236 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4237 ctype, result, overflow);
4239 if (!update_call_from_tree (gsi, result))
4240 gimplify_and_update_call_from_tree (gsi, result);
4241 changed = true;
4245 return changed;
4249 /* Return true whether NAME has a use on STMT. */
4251 static bool
4252 has_use_on_stmt (tree name, gimple *stmt)
4254 imm_use_iterator iter;
4255 use_operand_p use_p;
4256 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4257 if (USE_STMT (use_p) == stmt)
4258 return true;
4259 return false;
4262 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4263 gimple_simplify.
4265 Replaces *GSI with the simplification result in RCODE and OPS
4266 and the associated statements in *SEQ. Does the replacement
4267 according to INPLACE and returns true if the operation succeeded. */
4269 static bool
4270 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4271 code_helper rcode, tree *ops,
4272 gimple_seq *seq, bool inplace)
4274 gimple *stmt = gsi_stmt (*gsi);
4276 /* Play safe and do not allow abnormals to be mentioned in
4277 newly created statements. See also maybe_push_res_to_seq.
4278 As an exception allow such uses if there was a use of the
4279 same SSA name on the old stmt. */
4280 if ((TREE_CODE (ops[0]) == SSA_NAME
4281 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
4282 && !has_use_on_stmt (ops[0], stmt))
4283 || (ops[1]
4284 && TREE_CODE (ops[1]) == SSA_NAME
4285 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
4286 && !has_use_on_stmt (ops[1], stmt))
4287 || (ops[2]
4288 && TREE_CODE (ops[2]) == SSA_NAME
4289 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
4290 && !has_use_on_stmt (ops[2], stmt))
4291 || (COMPARISON_CLASS_P (ops[0])
4292 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
4293 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
4294 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
4295 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
4296 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
4297 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
4298 return false;
4300 /* Don't insert new statements when INPLACE is true, even if we could
4301 reuse STMT for the final statement. */
4302 if (inplace && !gimple_seq_empty_p (*seq))
4303 return false;
4305 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4307 gcc_assert (rcode.is_tree_code ());
4308 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
4309 /* GIMPLE_CONDs condition may not throw. */
4310 && (!flag_exceptions
4311 || !cfun->can_throw_non_call_exceptions
4312 || !operation_could_trap_p (rcode,
4313 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4314 false, NULL_TREE)))
4315 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
4316 else if (rcode == SSA_NAME)
4317 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4318 build_zero_cst (TREE_TYPE (ops[0])));
4319 else if (rcode == INTEGER_CST)
4321 if (integer_zerop (ops[0]))
4322 gimple_cond_make_false (cond_stmt);
4323 else
4324 gimple_cond_make_true (cond_stmt);
4326 else if (!inplace)
4328 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
4329 ops, seq);
4330 if (!res)
4331 return false;
4332 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4333 build_zero_cst (TREE_TYPE (res)));
4335 else
4336 return false;
4337 if (dump_file && (dump_flags & TDF_DETAILS))
4339 fprintf (dump_file, "gimple_simplified to ");
4340 if (!gimple_seq_empty_p (*seq))
4341 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4342 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4343 0, TDF_SLIM);
4345 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4346 return true;
4348 else if (is_gimple_assign (stmt)
4349 && rcode.is_tree_code ())
4351 if (!inplace
4352 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
4354 maybe_build_generic_op (rcode,
4355 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
4356 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
4357 if (dump_file && (dump_flags & TDF_DETAILS))
4359 fprintf (dump_file, "gimple_simplified to ");
4360 if (!gimple_seq_empty_p (*seq))
4361 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4362 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4363 0, TDF_SLIM);
4365 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4366 return true;
4369 else if (rcode.is_fn_code ()
4370 && gimple_call_combined_fn (stmt) == rcode)
4372 unsigned i;
4373 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4375 gcc_assert (ops[i] != NULL_TREE);
4376 gimple_call_set_arg (stmt, i, ops[i]);
4378 if (i < 3)
4379 gcc_assert (ops[i] == NULL_TREE);
4380 if (dump_file && (dump_flags & TDF_DETAILS))
4382 fprintf (dump_file, "gimple_simplified to ");
4383 if (!gimple_seq_empty_p (*seq))
4384 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4385 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4387 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4388 return true;
4390 else if (!inplace)
4392 if (gimple_has_lhs (stmt))
4394 tree lhs = gimple_get_lhs (stmt);
4395 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
4396 ops, seq, lhs))
4397 return false;
4398 if (dump_file && (dump_flags & TDF_DETAILS))
4400 fprintf (dump_file, "gimple_simplified to ");
4401 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4403 gsi_replace_with_seq_vops (gsi, *seq);
4404 return true;
4406 else
4407 gcc_unreachable ();
4410 return false;
4413 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4415 static bool
4416 maybe_canonicalize_mem_ref_addr (tree *t)
4418 bool res = false;
4420 if (TREE_CODE (*t) == ADDR_EXPR)
4421 t = &TREE_OPERAND (*t, 0);
4423 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4424 generic vector extension. The actual vector referenced is
4425 view-converted to an array type for this purpose. If the index
4426 is constant the canonical representation in the middle-end is a
4427 BIT_FIELD_REF so re-write the former to the latter here. */
4428 if (TREE_CODE (*t) == ARRAY_REF
4429 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4430 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4431 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4433 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4434 if (VECTOR_TYPE_P (vtype))
4436 tree low = array_ref_low_bound (*t);
4437 if (TREE_CODE (low) == INTEGER_CST)
4439 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4441 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4442 wi::to_widest (low));
4443 idx = wi::mul (idx, wi::to_widest
4444 (TYPE_SIZE (TREE_TYPE (*t))));
4445 widest_int ext
4446 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4447 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4449 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4450 TREE_TYPE (*t),
4451 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4452 TYPE_SIZE (TREE_TYPE (*t)),
4453 wide_int_to_tree (bitsizetype, idx));
4454 res = true;
4461 while (handled_component_p (*t))
4462 t = &TREE_OPERAND (*t, 0);
4464 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4465 of invariant addresses into a SSA name MEM_REF address. */
4466 if (TREE_CODE (*t) == MEM_REF
4467 || TREE_CODE (*t) == TARGET_MEM_REF)
4469 tree addr = TREE_OPERAND (*t, 0);
4470 if (TREE_CODE (addr) == ADDR_EXPR
4471 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4472 || handled_component_p (TREE_OPERAND (addr, 0))))
4474 tree base;
4475 poly_int64 coffset;
4476 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4477 &coffset);
4478 if (!base)
4479 gcc_unreachable ();
4481 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4482 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4483 TREE_OPERAND (*t, 1),
4484 size_int (coffset));
4485 res = true;
4487 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4488 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4491 /* Canonicalize back MEM_REFs to plain reference trees if the object
4492 accessed is a decl that has the same access semantics as the MEM_REF. */
4493 if (TREE_CODE (*t) == MEM_REF
4494 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4495 && integer_zerop (TREE_OPERAND (*t, 1))
4496 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4498 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4499 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4500 if (/* Same volatile qualification. */
4501 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4502 /* Same TBAA behavior with -fstrict-aliasing. */
4503 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4504 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4505 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4506 /* Same alignment. */
4507 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4508 /* We have to look out here to not drop a required conversion
4509 from the rhs to the lhs if *t appears on the lhs or vice-versa
4510 if it appears on the rhs. Thus require strict type
4511 compatibility. */
4512 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4514 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4515 res = true;
4519 /* Canonicalize TARGET_MEM_REF in particular with respect to
4520 the indexes becoming constant. */
4521 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4523 tree tem = maybe_fold_tmr (*t);
4524 if (tem)
4526 *t = tem;
4527 res = true;
4531 return res;
4534 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4535 distinguishes both cases. */
4537 static bool
4538 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4540 bool changed = false;
4541 gimple *stmt = gsi_stmt (*gsi);
4542 bool nowarning = gimple_no_warning_p (stmt);
4543 unsigned i;
4544 fold_defer_overflow_warnings ();
4546 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4547 after propagation.
4548 ??? This shouldn't be done in generic folding but in the
4549 propagation helpers which also know whether an address was
4550 propagated.
4551 Also canonicalize operand order. */
4552 switch (gimple_code (stmt))
4554 case GIMPLE_ASSIGN:
4555 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4557 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4558 if ((REFERENCE_CLASS_P (*rhs)
4559 || TREE_CODE (*rhs) == ADDR_EXPR)
4560 && maybe_canonicalize_mem_ref_addr (rhs))
4561 changed = true;
4562 tree *lhs = gimple_assign_lhs_ptr (stmt);
4563 if (REFERENCE_CLASS_P (*lhs)
4564 && maybe_canonicalize_mem_ref_addr (lhs))
4565 changed = true;
4567 else
4569 /* Canonicalize operand order. */
4570 enum tree_code code = gimple_assign_rhs_code (stmt);
4571 if (TREE_CODE_CLASS (code) == tcc_comparison
4572 || commutative_tree_code (code)
4573 || commutative_ternary_tree_code (code))
4575 tree rhs1 = gimple_assign_rhs1 (stmt);
4576 tree rhs2 = gimple_assign_rhs2 (stmt);
4577 if (tree_swap_operands_p (rhs1, rhs2))
4579 gimple_assign_set_rhs1 (stmt, rhs2);
4580 gimple_assign_set_rhs2 (stmt, rhs1);
4581 if (TREE_CODE_CLASS (code) == tcc_comparison)
4582 gimple_assign_set_rhs_code (stmt,
4583 swap_tree_comparison (code));
4584 changed = true;
4588 break;
4589 case GIMPLE_CALL:
4591 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4593 tree *arg = gimple_call_arg_ptr (stmt, i);
4594 if (REFERENCE_CLASS_P (*arg)
4595 && maybe_canonicalize_mem_ref_addr (arg))
4596 changed = true;
4598 tree *lhs = gimple_call_lhs_ptr (stmt);
4599 if (*lhs
4600 && REFERENCE_CLASS_P (*lhs)
4601 && maybe_canonicalize_mem_ref_addr (lhs))
4602 changed = true;
4603 break;
4605 case GIMPLE_ASM:
4607 gasm *asm_stmt = as_a <gasm *> (stmt);
4608 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4610 tree link = gimple_asm_output_op (asm_stmt, i);
4611 tree op = TREE_VALUE (link);
4612 if (REFERENCE_CLASS_P (op)
4613 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4614 changed = true;
4616 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4618 tree link = gimple_asm_input_op (asm_stmt, i);
4619 tree op = TREE_VALUE (link);
4620 if ((REFERENCE_CLASS_P (op)
4621 || TREE_CODE (op) == ADDR_EXPR)
4622 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4623 changed = true;
4626 break;
4627 case GIMPLE_DEBUG:
4628 if (gimple_debug_bind_p (stmt))
4630 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4631 if (*val
4632 && (REFERENCE_CLASS_P (*val)
4633 || TREE_CODE (*val) == ADDR_EXPR)
4634 && maybe_canonicalize_mem_ref_addr (val))
4635 changed = true;
4637 break;
4638 case GIMPLE_COND:
4640 /* Canonicalize operand order. */
4641 tree lhs = gimple_cond_lhs (stmt);
4642 tree rhs = gimple_cond_rhs (stmt);
4643 if (tree_swap_operands_p (lhs, rhs))
4645 gcond *gc = as_a <gcond *> (stmt);
4646 gimple_cond_set_lhs (gc, rhs);
4647 gimple_cond_set_rhs (gc, lhs);
4648 gimple_cond_set_code (gc,
4649 swap_tree_comparison (gimple_cond_code (gc)));
4650 changed = true;
4653 default:;
4656 /* Dispatch to pattern-based folding. */
4657 if (!inplace
4658 || is_gimple_assign (stmt)
4659 || gimple_code (stmt) == GIMPLE_COND)
4661 gimple_seq seq = NULL;
4662 code_helper rcode;
4663 tree ops[3] = {};
4664 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4665 valueize, valueize))
4667 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4668 changed = true;
4669 else
4670 gimple_seq_discard (seq);
4674 stmt = gsi_stmt (*gsi);
4676 /* Fold the main computation performed by the statement. */
4677 switch (gimple_code (stmt))
4679 case GIMPLE_ASSIGN:
4681 /* Try to canonicalize for boolean-typed X the comparisons
4682 X == 0, X == 1, X != 0, and X != 1. */
4683 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4684 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4686 tree lhs = gimple_assign_lhs (stmt);
4687 tree op1 = gimple_assign_rhs1 (stmt);
4688 tree op2 = gimple_assign_rhs2 (stmt);
4689 tree type = TREE_TYPE (op1);
4691 /* Check whether the comparison operands are of the same boolean
4692 type as the result type is.
4693 Check that second operand is an integer-constant with value
4694 one or zero. */
4695 if (TREE_CODE (op2) == INTEGER_CST
4696 && (integer_zerop (op2) || integer_onep (op2))
4697 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4699 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4700 bool is_logical_not = false;
4702 /* X == 0 and X != 1 is a logical-not.of X
4703 X == 1 and X != 0 is X */
4704 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4705 || (cmp_code == NE_EXPR && integer_onep (op2)))
4706 is_logical_not = true;
4708 if (is_logical_not == false)
4709 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4710 /* Only for one-bit precision typed X the transformation
4711 !X -> ~X is valied. */
4712 else if (TYPE_PRECISION (type) == 1)
4713 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4714 /* Otherwise we use !X -> X ^ 1. */
4715 else
4716 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4717 build_int_cst (type, 1));
4718 changed = true;
4719 break;
4723 unsigned old_num_ops = gimple_num_ops (stmt);
4724 tree lhs = gimple_assign_lhs (stmt);
4725 tree new_rhs = fold_gimple_assign (gsi);
4726 if (new_rhs
4727 && !useless_type_conversion_p (TREE_TYPE (lhs),
4728 TREE_TYPE (new_rhs)))
4729 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4730 if (new_rhs
4731 && (!inplace
4732 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4734 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4735 changed = true;
4737 break;
4740 case GIMPLE_CALL:
4741 changed |= gimple_fold_call (gsi, inplace);
4742 break;
4744 case GIMPLE_ASM:
4745 /* Fold *& in asm operands. */
4747 gasm *asm_stmt = as_a <gasm *> (stmt);
4748 size_t noutputs;
4749 const char **oconstraints;
4750 const char *constraint;
4751 bool allows_mem, allows_reg;
4753 noutputs = gimple_asm_noutputs (asm_stmt);
4754 oconstraints = XALLOCAVEC (const char *, noutputs);
4756 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4758 tree link = gimple_asm_output_op (asm_stmt, i);
4759 tree op = TREE_VALUE (link);
4760 oconstraints[i]
4761 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4762 if (REFERENCE_CLASS_P (op)
4763 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4765 TREE_VALUE (link) = op;
4766 changed = true;
4769 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4771 tree link = gimple_asm_input_op (asm_stmt, i);
4772 tree op = TREE_VALUE (link);
4773 constraint
4774 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4775 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4776 oconstraints, &allows_mem, &allows_reg);
4777 if (REFERENCE_CLASS_P (op)
4778 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4779 != NULL_TREE)
4781 TREE_VALUE (link) = op;
4782 changed = true;
4786 break;
4788 case GIMPLE_DEBUG:
4789 if (gimple_debug_bind_p (stmt))
4791 tree val = gimple_debug_bind_get_value (stmt);
4792 if (val
4793 && REFERENCE_CLASS_P (val))
4795 tree tem = maybe_fold_reference (val, false);
4796 if (tem)
4798 gimple_debug_bind_set_value (stmt, tem);
4799 changed = true;
4802 else if (val
4803 && TREE_CODE (val) == ADDR_EXPR)
4805 tree ref = TREE_OPERAND (val, 0);
4806 tree tem = maybe_fold_reference (ref, false);
4807 if (tem)
4809 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4810 gimple_debug_bind_set_value (stmt, tem);
4811 changed = true;
4815 break;
4817 case GIMPLE_RETURN:
4819 greturn *ret_stmt = as_a<greturn *> (stmt);
4820 tree ret = gimple_return_retval(ret_stmt);
4822 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4824 tree val = valueize (ret);
4825 if (val && val != ret
4826 && may_propagate_copy (ret, val))
4828 gimple_return_set_retval (ret_stmt, val);
4829 changed = true;
4833 break;
4835 default:;
4838 stmt = gsi_stmt (*gsi);
4840 /* Fold *& on the lhs. */
4841 if (gimple_has_lhs (stmt))
4843 tree lhs = gimple_get_lhs (stmt);
4844 if (lhs && REFERENCE_CLASS_P (lhs))
4846 tree new_lhs = maybe_fold_reference (lhs, true);
4847 if (new_lhs)
4849 gimple_set_lhs (stmt, new_lhs);
4850 changed = true;
4855 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4856 return changed;
4859 /* Valueziation callback that ends up not following SSA edges. */
4861 tree
4862 no_follow_ssa_edges (tree)
4864 return NULL_TREE;
4867 /* Valueization callback that ends up following single-use SSA edges only. */
4869 tree
4870 follow_single_use_edges (tree val)
4872 if (TREE_CODE (val) == SSA_NAME
4873 && !has_single_use (val))
4874 return NULL_TREE;
4875 return val;
4878 /* Fold the statement pointed to by GSI. In some cases, this function may
4879 replace the whole statement with a new one. Returns true iff folding
4880 makes any changes.
4881 The statement pointed to by GSI should be in valid gimple form but may
4882 be in unfolded state as resulting from for example constant propagation
4883 which can produce *&x = 0. */
4885 bool
4886 fold_stmt (gimple_stmt_iterator *gsi)
4888 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4891 bool
4892 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4894 return fold_stmt_1 (gsi, false, valueize);
4897 /* Perform the minimal folding on statement *GSI. Only operations like
4898 *&x created by constant propagation are handled. The statement cannot
4899 be replaced with a new one. Return true if the statement was
4900 changed, false otherwise.
4901 The statement *GSI should be in valid gimple form but may
4902 be in unfolded state as resulting from for example constant propagation
4903 which can produce *&x = 0. */
4905 bool
4906 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4908 gimple *stmt = gsi_stmt (*gsi);
4909 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4910 gcc_assert (gsi_stmt (*gsi) == stmt);
4911 return changed;
4914 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4915 if EXPR is null or we don't know how.
4916 If non-null, the result always has boolean type. */
4918 static tree
4919 canonicalize_bool (tree expr, bool invert)
4921 if (!expr)
4922 return NULL_TREE;
4923 else if (invert)
4925 if (integer_nonzerop (expr))
4926 return boolean_false_node;
4927 else if (integer_zerop (expr))
4928 return boolean_true_node;
4929 else if (TREE_CODE (expr) == SSA_NAME)
4930 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4931 build_int_cst (TREE_TYPE (expr), 0));
4932 else if (COMPARISON_CLASS_P (expr))
4933 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4934 boolean_type_node,
4935 TREE_OPERAND (expr, 0),
4936 TREE_OPERAND (expr, 1));
4937 else
4938 return NULL_TREE;
4940 else
4942 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4943 return expr;
4944 if (integer_nonzerop (expr))
4945 return boolean_true_node;
4946 else if (integer_zerop (expr))
4947 return boolean_false_node;
4948 else if (TREE_CODE (expr) == SSA_NAME)
4949 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4950 build_int_cst (TREE_TYPE (expr), 0));
4951 else if (COMPARISON_CLASS_P (expr))
4952 return fold_build2 (TREE_CODE (expr),
4953 boolean_type_node,
4954 TREE_OPERAND (expr, 0),
4955 TREE_OPERAND (expr, 1));
4956 else
4957 return NULL_TREE;
4961 /* Check to see if a boolean expression EXPR is logically equivalent to the
4962 comparison (OP1 CODE OP2). Check for various identities involving
4963 SSA_NAMEs. */
4965 static bool
4966 same_bool_comparison_p (const_tree expr, enum tree_code code,
4967 const_tree op1, const_tree op2)
4969 gimple *s;
4971 /* The obvious case. */
4972 if (TREE_CODE (expr) == code
4973 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4974 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4975 return true;
4977 /* Check for comparing (name, name != 0) and the case where expr
4978 is an SSA_NAME with a definition matching the comparison. */
4979 if (TREE_CODE (expr) == SSA_NAME
4980 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4982 if (operand_equal_p (expr, op1, 0))
4983 return ((code == NE_EXPR && integer_zerop (op2))
4984 || (code == EQ_EXPR && integer_nonzerop (op2)));
4985 s = SSA_NAME_DEF_STMT (expr);
4986 if (is_gimple_assign (s)
4987 && gimple_assign_rhs_code (s) == code
4988 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4989 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4990 return true;
4993 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4994 of name is a comparison, recurse. */
4995 if (TREE_CODE (op1) == SSA_NAME
4996 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4998 s = SSA_NAME_DEF_STMT (op1);
4999 if (is_gimple_assign (s)
5000 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5002 enum tree_code c = gimple_assign_rhs_code (s);
5003 if ((c == NE_EXPR && integer_zerop (op2))
5004 || (c == EQ_EXPR && integer_nonzerop (op2)))
5005 return same_bool_comparison_p (expr, c,
5006 gimple_assign_rhs1 (s),
5007 gimple_assign_rhs2 (s));
5008 if ((c == EQ_EXPR && integer_zerop (op2))
5009 || (c == NE_EXPR && integer_nonzerop (op2)))
5010 return same_bool_comparison_p (expr,
5011 invert_tree_comparison (c, false),
5012 gimple_assign_rhs1 (s),
5013 gimple_assign_rhs2 (s));
5016 return false;
5019 /* Check to see if two boolean expressions OP1 and OP2 are logically
5020 equivalent. */
5022 static bool
5023 same_bool_result_p (const_tree op1, const_tree op2)
5025 /* Simple cases first. */
5026 if (operand_equal_p (op1, op2, 0))
5027 return true;
5029 /* Check the cases where at least one of the operands is a comparison.
5030 These are a bit smarter than operand_equal_p in that they apply some
5031 identifies on SSA_NAMEs. */
5032 if (COMPARISON_CLASS_P (op2)
5033 && same_bool_comparison_p (op1, TREE_CODE (op2),
5034 TREE_OPERAND (op2, 0),
5035 TREE_OPERAND (op2, 1)))
5036 return true;
5037 if (COMPARISON_CLASS_P (op1)
5038 && same_bool_comparison_p (op2, TREE_CODE (op1),
5039 TREE_OPERAND (op1, 0),
5040 TREE_OPERAND (op1, 1)))
5041 return true;
5043 /* Default case. */
5044 return false;
5047 /* Forward declarations for some mutually recursive functions. */
5049 static tree
5050 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5051 enum tree_code code2, tree op2a, tree op2b);
5052 static tree
5053 and_var_with_comparison (tree var, bool invert,
5054 enum tree_code code2, tree op2a, tree op2b);
5055 static tree
5056 and_var_with_comparison_1 (gimple *stmt,
5057 enum tree_code code2, tree op2a, tree op2b);
5058 static tree
5059 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5060 enum tree_code code2, tree op2a, tree op2b);
5061 static tree
5062 or_var_with_comparison (tree var, bool invert,
5063 enum tree_code code2, tree op2a, tree op2b);
5064 static tree
5065 or_var_with_comparison_1 (gimple *stmt,
5066 enum tree_code code2, tree op2a, tree op2b);
5068 /* Helper function for and_comparisons_1: try to simplify the AND of the
5069 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5070 If INVERT is true, invert the value of the VAR before doing the AND.
5071 Return NULL_EXPR if we can't simplify this to a single expression. */
5073 static tree
5074 and_var_with_comparison (tree var, bool invert,
5075 enum tree_code code2, tree op2a, tree op2b)
5077 tree t;
5078 gimple *stmt = SSA_NAME_DEF_STMT (var);
5080 /* We can only deal with variables whose definitions are assignments. */
5081 if (!is_gimple_assign (stmt))
5082 return NULL_TREE;
5084 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5085 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5086 Then we only have to consider the simpler non-inverted cases. */
5087 if (invert)
5088 t = or_var_with_comparison_1 (stmt,
5089 invert_tree_comparison (code2, false),
5090 op2a, op2b);
5091 else
5092 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5093 return canonicalize_bool (t, invert);
5096 /* Try to simplify the AND of the ssa variable defined by the assignment
5097 STMT with the comparison specified by (OP2A CODE2 OP2B).
5098 Return NULL_EXPR if we can't simplify this to a single expression. */
5100 static tree
5101 and_var_with_comparison_1 (gimple *stmt,
5102 enum tree_code code2, tree op2a, tree op2b)
5104 tree var = gimple_assign_lhs (stmt);
5105 tree true_test_var = NULL_TREE;
5106 tree false_test_var = NULL_TREE;
5107 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5109 /* Check for identities like (var AND (var == 0)) => false. */
5110 if (TREE_CODE (op2a) == SSA_NAME
5111 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5113 if ((code2 == NE_EXPR && integer_zerop (op2b))
5114 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5116 true_test_var = op2a;
5117 if (var == true_test_var)
5118 return var;
5120 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5121 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5123 false_test_var = op2a;
5124 if (var == false_test_var)
5125 return boolean_false_node;
5129 /* If the definition is a comparison, recurse on it. */
5130 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5132 tree t = and_comparisons_1 (innercode,
5133 gimple_assign_rhs1 (stmt),
5134 gimple_assign_rhs2 (stmt),
5135 code2,
5136 op2a,
5137 op2b);
5138 if (t)
5139 return t;
5142 /* If the definition is an AND or OR expression, we may be able to
5143 simplify by reassociating. */
5144 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5145 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5147 tree inner1 = gimple_assign_rhs1 (stmt);
5148 tree inner2 = gimple_assign_rhs2 (stmt);
5149 gimple *s;
5150 tree t;
5151 tree partial = NULL_TREE;
5152 bool is_and = (innercode == BIT_AND_EXPR);
5154 /* Check for boolean identities that don't require recursive examination
5155 of inner1/inner2:
5156 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5157 inner1 AND (inner1 OR inner2) => inner1
5158 !inner1 AND (inner1 AND inner2) => false
5159 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5160 Likewise for similar cases involving inner2. */
5161 if (inner1 == true_test_var)
5162 return (is_and ? var : inner1);
5163 else if (inner2 == true_test_var)
5164 return (is_and ? var : inner2);
5165 else if (inner1 == false_test_var)
5166 return (is_and
5167 ? boolean_false_node
5168 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5169 else if (inner2 == false_test_var)
5170 return (is_and
5171 ? boolean_false_node
5172 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5174 /* Next, redistribute/reassociate the AND across the inner tests.
5175 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5176 if (TREE_CODE (inner1) == SSA_NAME
5177 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5178 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5179 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5180 gimple_assign_rhs1 (s),
5181 gimple_assign_rhs2 (s),
5182 code2, op2a, op2b)))
5184 /* Handle the AND case, where we are reassociating:
5185 (inner1 AND inner2) AND (op2a code2 op2b)
5186 => (t AND inner2)
5187 If the partial result t is a constant, we win. Otherwise
5188 continue on to try reassociating with the other inner test. */
5189 if (is_and)
5191 if (integer_onep (t))
5192 return inner2;
5193 else if (integer_zerop (t))
5194 return boolean_false_node;
5197 /* Handle the OR case, where we are redistributing:
5198 (inner1 OR inner2) AND (op2a code2 op2b)
5199 => (t OR (inner2 AND (op2a code2 op2b))) */
5200 else if (integer_onep (t))
5201 return boolean_true_node;
5203 /* Save partial result for later. */
5204 partial = t;
5207 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5208 if (TREE_CODE (inner2) == SSA_NAME
5209 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5210 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5211 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5212 gimple_assign_rhs1 (s),
5213 gimple_assign_rhs2 (s),
5214 code2, op2a, op2b)))
5216 /* Handle the AND case, where we are reassociating:
5217 (inner1 AND inner2) AND (op2a code2 op2b)
5218 => (inner1 AND t) */
5219 if (is_and)
5221 if (integer_onep (t))
5222 return inner1;
5223 else if (integer_zerop (t))
5224 return boolean_false_node;
5225 /* If both are the same, we can apply the identity
5226 (x AND x) == x. */
5227 else if (partial && same_bool_result_p (t, partial))
5228 return t;
5231 /* Handle the OR case. where we are redistributing:
5232 (inner1 OR inner2) AND (op2a code2 op2b)
5233 => (t OR (inner1 AND (op2a code2 op2b)))
5234 => (t OR partial) */
5235 else
5237 if (integer_onep (t))
5238 return boolean_true_node;
5239 else if (partial)
5241 /* We already got a simplification for the other
5242 operand to the redistributed OR expression. The
5243 interesting case is when at least one is false.
5244 Or, if both are the same, we can apply the identity
5245 (x OR x) == x. */
5246 if (integer_zerop (partial))
5247 return t;
5248 else if (integer_zerop (t))
5249 return partial;
5250 else if (same_bool_result_p (t, partial))
5251 return t;
5256 return NULL_TREE;
5259 /* Try to simplify the AND of two comparisons defined by
5260 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5261 If this can be done without constructing an intermediate value,
5262 return the resulting tree; otherwise NULL_TREE is returned.
5263 This function is deliberately asymmetric as it recurses on SSA_DEFs
5264 in the first comparison but not the second. */
5266 static tree
5267 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5268 enum tree_code code2, tree op2a, tree op2b)
5270 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5272 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5273 if (operand_equal_p (op1a, op2a, 0)
5274 && operand_equal_p (op1b, op2b, 0))
5276 /* Result will be either NULL_TREE, or a combined comparison. */
5277 tree t = combine_comparisons (UNKNOWN_LOCATION,
5278 TRUTH_ANDIF_EXPR, code1, code2,
5279 truth_type, op1a, op1b);
5280 if (t)
5281 return t;
5284 /* Likewise the swapped case of the above. */
5285 if (operand_equal_p (op1a, op2b, 0)
5286 && operand_equal_p (op1b, op2a, 0))
5288 /* Result will be either NULL_TREE, or a combined comparison. */
5289 tree t = combine_comparisons (UNKNOWN_LOCATION,
5290 TRUTH_ANDIF_EXPR, code1,
5291 swap_tree_comparison (code2),
5292 truth_type, op1a, op1b);
5293 if (t)
5294 return t;
5297 /* If both comparisons are of the same value against constants, we might
5298 be able to merge them. */
5299 if (operand_equal_p (op1a, op2a, 0)
5300 && TREE_CODE (op1b) == INTEGER_CST
5301 && TREE_CODE (op2b) == INTEGER_CST)
5303 int cmp = tree_int_cst_compare (op1b, op2b);
5305 /* If we have (op1a == op1b), we should either be able to
5306 return that or FALSE, depending on whether the constant op1b
5307 also satisfies the other comparison against op2b. */
5308 if (code1 == EQ_EXPR)
5310 bool done = true;
5311 bool val;
5312 switch (code2)
5314 case EQ_EXPR: val = (cmp == 0); break;
5315 case NE_EXPR: val = (cmp != 0); break;
5316 case LT_EXPR: val = (cmp < 0); break;
5317 case GT_EXPR: val = (cmp > 0); break;
5318 case LE_EXPR: val = (cmp <= 0); break;
5319 case GE_EXPR: val = (cmp >= 0); break;
5320 default: done = false;
5322 if (done)
5324 if (val)
5325 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5326 else
5327 return boolean_false_node;
5330 /* Likewise if the second comparison is an == comparison. */
5331 else if (code2 == EQ_EXPR)
5333 bool done = true;
5334 bool val;
5335 switch (code1)
5337 case EQ_EXPR: val = (cmp == 0); break;
5338 case NE_EXPR: val = (cmp != 0); break;
5339 case LT_EXPR: val = (cmp > 0); break;
5340 case GT_EXPR: val = (cmp < 0); break;
5341 case LE_EXPR: val = (cmp >= 0); break;
5342 case GE_EXPR: val = (cmp <= 0); break;
5343 default: done = false;
5345 if (done)
5347 if (val)
5348 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5349 else
5350 return boolean_false_node;
5354 /* Same business with inequality tests. */
5355 else if (code1 == NE_EXPR)
5357 bool val;
5358 switch (code2)
5360 case EQ_EXPR: val = (cmp != 0); break;
5361 case NE_EXPR: val = (cmp == 0); break;
5362 case LT_EXPR: val = (cmp >= 0); break;
5363 case GT_EXPR: val = (cmp <= 0); break;
5364 case LE_EXPR: val = (cmp > 0); break;
5365 case GE_EXPR: val = (cmp < 0); break;
5366 default:
5367 val = false;
5369 if (val)
5370 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5372 else if (code2 == NE_EXPR)
5374 bool val;
5375 switch (code1)
5377 case EQ_EXPR: val = (cmp == 0); break;
5378 case NE_EXPR: val = (cmp != 0); break;
5379 case LT_EXPR: val = (cmp <= 0); break;
5380 case GT_EXPR: val = (cmp >= 0); break;
5381 case LE_EXPR: val = (cmp < 0); break;
5382 case GE_EXPR: val = (cmp > 0); break;
5383 default:
5384 val = false;
5386 if (val)
5387 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5390 /* Chose the more restrictive of two < or <= comparisons. */
5391 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5392 && (code2 == LT_EXPR || code2 == LE_EXPR))
5394 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5395 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5396 else
5397 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5400 /* Likewise chose the more restrictive of two > or >= comparisons. */
5401 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5402 && (code2 == GT_EXPR || code2 == GE_EXPR))
5404 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5405 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5406 else
5407 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5410 /* Check for singleton ranges. */
5411 else if (cmp == 0
5412 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5413 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5414 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5416 /* Check for disjoint ranges. */
5417 else if (cmp <= 0
5418 && (code1 == LT_EXPR || code1 == LE_EXPR)
5419 && (code2 == GT_EXPR || code2 == GE_EXPR))
5420 return boolean_false_node;
5421 else if (cmp >= 0
5422 && (code1 == GT_EXPR || code1 == GE_EXPR)
5423 && (code2 == LT_EXPR || code2 == LE_EXPR))
5424 return boolean_false_node;
5427 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5428 NAME's definition is a truth value. See if there are any simplifications
5429 that can be done against the NAME's definition. */
5430 if (TREE_CODE (op1a) == SSA_NAME
5431 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5432 && (integer_zerop (op1b) || integer_onep (op1b)))
5434 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5435 || (code1 == NE_EXPR && integer_onep (op1b)));
5436 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5437 switch (gimple_code (stmt))
5439 case GIMPLE_ASSIGN:
5440 /* Try to simplify by copy-propagating the definition. */
5441 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5443 case GIMPLE_PHI:
5444 /* If every argument to the PHI produces the same result when
5445 ANDed with the second comparison, we win.
5446 Do not do this unless the type is bool since we need a bool
5447 result here anyway. */
5448 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5450 tree result = NULL_TREE;
5451 unsigned i;
5452 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5454 tree arg = gimple_phi_arg_def (stmt, i);
5456 /* If this PHI has itself as an argument, ignore it.
5457 If all the other args produce the same result,
5458 we're still OK. */
5459 if (arg == gimple_phi_result (stmt))
5460 continue;
5461 else if (TREE_CODE (arg) == INTEGER_CST)
5463 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5465 if (!result)
5466 result = boolean_false_node;
5467 else if (!integer_zerop (result))
5468 return NULL_TREE;
5470 else if (!result)
5471 result = fold_build2 (code2, boolean_type_node,
5472 op2a, op2b);
5473 else if (!same_bool_comparison_p (result,
5474 code2, op2a, op2b))
5475 return NULL_TREE;
5477 else if (TREE_CODE (arg) == SSA_NAME
5478 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5480 tree temp;
5481 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5482 /* In simple cases we can look through PHI nodes,
5483 but we have to be careful with loops.
5484 See PR49073. */
5485 if (! dom_info_available_p (CDI_DOMINATORS)
5486 || gimple_bb (def_stmt) == gimple_bb (stmt)
5487 || dominated_by_p (CDI_DOMINATORS,
5488 gimple_bb (def_stmt),
5489 gimple_bb (stmt)))
5490 return NULL_TREE;
5491 temp = and_var_with_comparison (arg, invert, code2,
5492 op2a, op2b);
5493 if (!temp)
5494 return NULL_TREE;
5495 else if (!result)
5496 result = temp;
5497 else if (!same_bool_result_p (result, temp))
5498 return NULL_TREE;
5500 else
5501 return NULL_TREE;
5503 return result;
5506 default:
5507 break;
5510 return NULL_TREE;
5513 /* Try to simplify the AND of two comparisons, specified by
5514 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5515 If this can be simplified to a single expression (without requiring
5516 introducing more SSA variables to hold intermediate values),
5517 return the resulting tree. Otherwise return NULL_TREE.
5518 If the result expression is non-null, it has boolean type. */
5520 tree
5521 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5522 enum tree_code code2, tree op2a, tree op2b)
5524 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5525 if (t)
5526 return t;
5527 else
5528 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5531 /* Helper function for or_comparisons_1: try to simplify the OR of the
5532 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5533 If INVERT is true, invert the value of VAR before doing the OR.
5534 Return NULL_EXPR if we can't simplify this to a single expression. */
5536 static tree
5537 or_var_with_comparison (tree var, bool invert,
5538 enum tree_code code2, tree op2a, tree op2b)
5540 tree t;
5541 gimple *stmt = SSA_NAME_DEF_STMT (var);
5543 /* We can only deal with variables whose definitions are assignments. */
5544 if (!is_gimple_assign (stmt))
5545 return NULL_TREE;
5547 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5548 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5549 Then we only have to consider the simpler non-inverted cases. */
5550 if (invert)
5551 t = and_var_with_comparison_1 (stmt,
5552 invert_tree_comparison (code2, false),
5553 op2a, op2b);
5554 else
5555 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5556 return canonicalize_bool (t, invert);
5559 /* Try to simplify the OR of the ssa variable defined by the assignment
5560 STMT with the comparison specified by (OP2A CODE2 OP2B).
5561 Return NULL_EXPR if we can't simplify this to a single expression. */
5563 static tree
5564 or_var_with_comparison_1 (gimple *stmt,
5565 enum tree_code code2, tree op2a, tree op2b)
5567 tree var = gimple_assign_lhs (stmt);
5568 tree true_test_var = NULL_TREE;
5569 tree false_test_var = NULL_TREE;
5570 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5572 /* Check for identities like (var OR (var != 0)) => true . */
5573 if (TREE_CODE (op2a) == SSA_NAME
5574 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5576 if ((code2 == NE_EXPR && integer_zerop (op2b))
5577 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5579 true_test_var = op2a;
5580 if (var == true_test_var)
5581 return var;
5583 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5584 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5586 false_test_var = op2a;
5587 if (var == false_test_var)
5588 return boolean_true_node;
5592 /* If the definition is a comparison, recurse on it. */
5593 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5595 tree t = or_comparisons_1 (innercode,
5596 gimple_assign_rhs1 (stmt),
5597 gimple_assign_rhs2 (stmt),
5598 code2,
5599 op2a,
5600 op2b);
5601 if (t)
5602 return t;
5605 /* If the definition is an AND or OR expression, we may be able to
5606 simplify by reassociating. */
5607 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5608 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5610 tree inner1 = gimple_assign_rhs1 (stmt);
5611 tree inner2 = gimple_assign_rhs2 (stmt);
5612 gimple *s;
5613 tree t;
5614 tree partial = NULL_TREE;
5615 bool is_or = (innercode == BIT_IOR_EXPR);
5617 /* Check for boolean identities that don't require recursive examination
5618 of inner1/inner2:
5619 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5620 inner1 OR (inner1 AND inner2) => inner1
5621 !inner1 OR (inner1 OR inner2) => true
5622 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5624 if (inner1 == true_test_var)
5625 return (is_or ? var : inner1);
5626 else if (inner2 == true_test_var)
5627 return (is_or ? var : inner2);
5628 else if (inner1 == false_test_var)
5629 return (is_or
5630 ? boolean_true_node
5631 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5632 else if (inner2 == false_test_var)
5633 return (is_or
5634 ? boolean_true_node
5635 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5637 /* Next, redistribute/reassociate the OR across the inner tests.
5638 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5639 if (TREE_CODE (inner1) == SSA_NAME
5640 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5641 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5642 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5643 gimple_assign_rhs1 (s),
5644 gimple_assign_rhs2 (s),
5645 code2, op2a, op2b)))
5647 /* Handle the OR case, where we are reassociating:
5648 (inner1 OR inner2) OR (op2a code2 op2b)
5649 => (t OR inner2)
5650 If the partial result t is a constant, we win. Otherwise
5651 continue on to try reassociating with the other inner test. */
5652 if (is_or)
5654 if (integer_onep (t))
5655 return boolean_true_node;
5656 else if (integer_zerop (t))
5657 return inner2;
5660 /* Handle the AND case, where we are redistributing:
5661 (inner1 AND inner2) OR (op2a code2 op2b)
5662 => (t AND (inner2 OR (op2a code op2b))) */
5663 else if (integer_zerop (t))
5664 return boolean_false_node;
5666 /* Save partial result for later. */
5667 partial = t;
5670 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5671 if (TREE_CODE (inner2) == SSA_NAME
5672 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5673 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5674 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5675 gimple_assign_rhs1 (s),
5676 gimple_assign_rhs2 (s),
5677 code2, op2a, op2b)))
5679 /* Handle the OR case, where we are reassociating:
5680 (inner1 OR inner2) OR (op2a code2 op2b)
5681 => (inner1 OR t)
5682 => (t OR partial) */
5683 if (is_or)
5685 if (integer_zerop (t))
5686 return inner1;
5687 else if (integer_onep (t))
5688 return boolean_true_node;
5689 /* If both are the same, we can apply the identity
5690 (x OR x) == x. */
5691 else if (partial && same_bool_result_p (t, partial))
5692 return t;
5695 /* Handle the AND case, where we are redistributing:
5696 (inner1 AND inner2) OR (op2a code2 op2b)
5697 => (t AND (inner1 OR (op2a code2 op2b)))
5698 => (t AND partial) */
5699 else
5701 if (integer_zerop (t))
5702 return boolean_false_node;
5703 else if (partial)
5705 /* We already got a simplification for the other
5706 operand to the redistributed AND expression. The
5707 interesting case is when at least one is true.
5708 Or, if both are the same, we can apply the identity
5709 (x AND x) == x. */
5710 if (integer_onep (partial))
5711 return t;
5712 else if (integer_onep (t))
5713 return partial;
5714 else if (same_bool_result_p (t, partial))
5715 return t;
5720 return NULL_TREE;
5723 /* Try to simplify the OR of two comparisons defined by
5724 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5725 If this can be done without constructing an intermediate value,
5726 return the resulting tree; otherwise NULL_TREE is returned.
5727 This function is deliberately asymmetric as it recurses on SSA_DEFs
5728 in the first comparison but not the second. */
5730 static tree
5731 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5732 enum tree_code code2, tree op2a, tree op2b)
5734 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5736 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5737 if (operand_equal_p (op1a, op2a, 0)
5738 && operand_equal_p (op1b, op2b, 0))
5740 /* Result will be either NULL_TREE, or a combined comparison. */
5741 tree t = combine_comparisons (UNKNOWN_LOCATION,
5742 TRUTH_ORIF_EXPR, code1, code2,
5743 truth_type, op1a, op1b);
5744 if (t)
5745 return t;
5748 /* Likewise the swapped case of the above. */
5749 if (operand_equal_p (op1a, op2b, 0)
5750 && operand_equal_p (op1b, op2a, 0))
5752 /* Result will be either NULL_TREE, or a combined comparison. */
5753 tree t = combine_comparisons (UNKNOWN_LOCATION,
5754 TRUTH_ORIF_EXPR, code1,
5755 swap_tree_comparison (code2),
5756 truth_type, op1a, op1b);
5757 if (t)
5758 return t;
5761 /* If both comparisons are of the same value against constants, we might
5762 be able to merge them. */
5763 if (operand_equal_p (op1a, op2a, 0)
5764 && TREE_CODE (op1b) == INTEGER_CST
5765 && TREE_CODE (op2b) == INTEGER_CST)
5767 int cmp = tree_int_cst_compare (op1b, op2b);
5769 /* If we have (op1a != op1b), we should either be able to
5770 return that or TRUE, depending on whether the constant op1b
5771 also satisfies the other comparison against op2b. */
5772 if (code1 == NE_EXPR)
5774 bool done = true;
5775 bool val;
5776 switch (code2)
5778 case EQ_EXPR: val = (cmp == 0); break;
5779 case NE_EXPR: val = (cmp != 0); break;
5780 case LT_EXPR: val = (cmp < 0); break;
5781 case GT_EXPR: val = (cmp > 0); break;
5782 case LE_EXPR: val = (cmp <= 0); break;
5783 case GE_EXPR: val = (cmp >= 0); break;
5784 default: done = false;
5786 if (done)
5788 if (val)
5789 return boolean_true_node;
5790 else
5791 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5794 /* Likewise if the second comparison is a != comparison. */
5795 else if (code2 == NE_EXPR)
5797 bool done = true;
5798 bool val;
5799 switch (code1)
5801 case EQ_EXPR: val = (cmp == 0); break;
5802 case NE_EXPR: val = (cmp != 0); break;
5803 case LT_EXPR: val = (cmp > 0); break;
5804 case GT_EXPR: val = (cmp < 0); break;
5805 case LE_EXPR: val = (cmp >= 0); break;
5806 case GE_EXPR: val = (cmp <= 0); break;
5807 default: done = false;
5809 if (done)
5811 if (val)
5812 return boolean_true_node;
5813 else
5814 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5818 /* See if an equality test is redundant with the other comparison. */
5819 else if (code1 == EQ_EXPR)
5821 bool val;
5822 switch (code2)
5824 case EQ_EXPR: val = (cmp == 0); break;
5825 case NE_EXPR: val = (cmp != 0); break;
5826 case LT_EXPR: val = (cmp < 0); break;
5827 case GT_EXPR: val = (cmp > 0); break;
5828 case LE_EXPR: val = (cmp <= 0); break;
5829 case GE_EXPR: val = (cmp >= 0); break;
5830 default:
5831 val = false;
5833 if (val)
5834 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5836 else if (code2 == EQ_EXPR)
5838 bool val;
5839 switch (code1)
5841 case EQ_EXPR: val = (cmp == 0); break;
5842 case NE_EXPR: val = (cmp != 0); break;
5843 case LT_EXPR: val = (cmp > 0); break;
5844 case GT_EXPR: val = (cmp < 0); break;
5845 case LE_EXPR: val = (cmp >= 0); break;
5846 case GE_EXPR: val = (cmp <= 0); break;
5847 default:
5848 val = false;
5850 if (val)
5851 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5854 /* Chose the less restrictive of two < or <= comparisons. */
5855 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5856 && (code2 == LT_EXPR || code2 == LE_EXPR))
5858 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5859 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5860 else
5861 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5864 /* Likewise chose the less restrictive of two > or >= comparisons. */
5865 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5866 && (code2 == GT_EXPR || code2 == GE_EXPR))
5868 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5869 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5870 else
5871 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5874 /* Check for singleton ranges. */
5875 else if (cmp == 0
5876 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5877 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5878 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5880 /* Check for less/greater pairs that don't restrict the range at all. */
5881 else if (cmp >= 0
5882 && (code1 == LT_EXPR || code1 == LE_EXPR)
5883 && (code2 == GT_EXPR || code2 == GE_EXPR))
5884 return boolean_true_node;
5885 else if (cmp <= 0
5886 && (code1 == GT_EXPR || code1 == GE_EXPR)
5887 && (code2 == LT_EXPR || code2 == LE_EXPR))
5888 return boolean_true_node;
5891 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5892 NAME's definition is a truth value. See if there are any simplifications
5893 that can be done against the NAME's definition. */
5894 if (TREE_CODE (op1a) == SSA_NAME
5895 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5896 && (integer_zerop (op1b) || integer_onep (op1b)))
5898 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5899 || (code1 == NE_EXPR && integer_onep (op1b)));
5900 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5901 switch (gimple_code (stmt))
5903 case GIMPLE_ASSIGN:
5904 /* Try to simplify by copy-propagating the definition. */
5905 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5907 case GIMPLE_PHI:
5908 /* If every argument to the PHI produces the same result when
5909 ORed with the second comparison, we win.
5910 Do not do this unless the type is bool since we need a bool
5911 result here anyway. */
5912 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5914 tree result = NULL_TREE;
5915 unsigned i;
5916 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5918 tree arg = gimple_phi_arg_def (stmt, i);
5920 /* If this PHI has itself as an argument, ignore it.
5921 If all the other args produce the same result,
5922 we're still OK. */
5923 if (arg == gimple_phi_result (stmt))
5924 continue;
5925 else if (TREE_CODE (arg) == INTEGER_CST)
5927 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5929 if (!result)
5930 result = boolean_true_node;
5931 else if (!integer_onep (result))
5932 return NULL_TREE;
5934 else if (!result)
5935 result = fold_build2 (code2, boolean_type_node,
5936 op2a, op2b);
5937 else if (!same_bool_comparison_p (result,
5938 code2, op2a, op2b))
5939 return NULL_TREE;
5941 else if (TREE_CODE (arg) == SSA_NAME
5942 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5944 tree temp;
5945 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5946 /* In simple cases we can look through PHI nodes,
5947 but we have to be careful with loops.
5948 See PR49073. */
5949 if (! dom_info_available_p (CDI_DOMINATORS)
5950 || gimple_bb (def_stmt) == gimple_bb (stmt)
5951 || dominated_by_p (CDI_DOMINATORS,
5952 gimple_bb (def_stmt),
5953 gimple_bb (stmt)))
5954 return NULL_TREE;
5955 temp = or_var_with_comparison (arg, invert, code2,
5956 op2a, op2b);
5957 if (!temp)
5958 return NULL_TREE;
5959 else if (!result)
5960 result = temp;
5961 else if (!same_bool_result_p (result, temp))
5962 return NULL_TREE;
5964 else
5965 return NULL_TREE;
5967 return result;
5970 default:
5971 break;
5974 return NULL_TREE;
5977 /* Try to simplify the OR of two comparisons, specified by
5978 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5979 If this can be simplified to a single expression (without requiring
5980 introducing more SSA variables to hold intermediate values),
5981 return the resulting tree. Otherwise return NULL_TREE.
5982 If the result expression is non-null, it has boolean type. */
5984 tree
5985 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5986 enum tree_code code2, tree op2a, tree op2b)
5988 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5989 if (t)
5990 return t;
5991 else
5992 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5996 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5998 Either NULL_TREE, a simplified but non-constant or a constant
5999 is returned.
6001 ??? This should go into a gimple-fold-inline.h file to be eventually
6002 privatized with the single valueize function used in the various TUs
6003 to avoid the indirect function call overhead. */
6005 tree
6006 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6007 tree (*gvalueize) (tree))
6009 code_helper rcode;
6010 tree ops[3] = {};
6011 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6012 edges if there are intermediate VARYING defs. For this reason
6013 do not follow SSA edges here even though SCCVN can technically
6014 just deal fine with that. */
6015 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
6017 tree res = NULL_TREE;
6018 if (gimple_simplified_result_is_gimple_val (rcode, ops))
6019 res = ops[0];
6020 else if (mprts_hook)
6021 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
6022 if (res)
6024 if (dump_file && dump_flags & TDF_DETAILS)
6026 fprintf (dump_file, "Match-and-simplified ");
6027 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6028 fprintf (dump_file, " to ");
6029 print_generic_expr (dump_file, res);
6030 fprintf (dump_file, "\n");
6032 return res;
6036 location_t loc = gimple_location (stmt);
6037 switch (gimple_code (stmt))
6039 case GIMPLE_ASSIGN:
6041 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6043 switch (get_gimple_rhs_class (subcode))
6045 case GIMPLE_SINGLE_RHS:
6047 tree rhs = gimple_assign_rhs1 (stmt);
6048 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6050 if (TREE_CODE (rhs) == SSA_NAME)
6052 /* If the RHS is an SSA_NAME, return its known constant value,
6053 if any. */
6054 return (*valueize) (rhs);
6056 /* Handle propagating invariant addresses into address
6057 operations. */
6058 else if (TREE_CODE (rhs) == ADDR_EXPR
6059 && !is_gimple_min_invariant (rhs))
6061 poly_int64 offset = 0;
6062 tree base;
6063 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6064 &offset,
6065 valueize);
6066 if (base
6067 && (CONSTANT_CLASS_P (base)
6068 || decl_address_invariant_p (base)))
6069 return build_invariant_address (TREE_TYPE (rhs),
6070 base, offset);
6072 else if (TREE_CODE (rhs) == CONSTRUCTOR
6073 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6074 && (CONSTRUCTOR_NELTS (rhs)
6075 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6077 unsigned i, nelts;
6078 tree val;
6080 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs));
6081 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6082 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6084 val = (*valueize) (val);
6085 if (TREE_CODE (val) == INTEGER_CST
6086 || TREE_CODE (val) == REAL_CST
6087 || TREE_CODE (val) == FIXED_CST)
6088 vec.quick_push (val);
6089 else
6090 return NULL_TREE;
6093 return vec.build ();
6095 if (subcode == OBJ_TYPE_REF)
6097 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6098 /* If callee is constant, we can fold away the wrapper. */
6099 if (is_gimple_min_invariant (val))
6100 return val;
6103 if (kind == tcc_reference)
6105 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6106 || TREE_CODE (rhs) == REALPART_EXPR
6107 || TREE_CODE (rhs) == IMAGPART_EXPR)
6108 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6110 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6111 return fold_unary_loc (EXPR_LOCATION (rhs),
6112 TREE_CODE (rhs),
6113 TREE_TYPE (rhs), val);
6115 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6116 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6118 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6119 return fold_ternary_loc (EXPR_LOCATION (rhs),
6120 TREE_CODE (rhs),
6121 TREE_TYPE (rhs), val,
6122 TREE_OPERAND (rhs, 1),
6123 TREE_OPERAND (rhs, 2));
6125 else if (TREE_CODE (rhs) == MEM_REF
6126 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6128 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6129 if (TREE_CODE (val) == ADDR_EXPR
6130 && is_gimple_min_invariant (val))
6132 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6133 unshare_expr (val),
6134 TREE_OPERAND (rhs, 1));
6135 if (tem)
6136 rhs = tem;
6139 return fold_const_aggregate_ref_1 (rhs, valueize);
6141 else if (kind == tcc_declaration)
6142 return get_symbol_constant_value (rhs);
6143 return rhs;
6146 case GIMPLE_UNARY_RHS:
6147 return NULL_TREE;
6149 case GIMPLE_BINARY_RHS:
6150 /* Translate &x + CST into an invariant form suitable for
6151 further propagation. */
6152 if (subcode == POINTER_PLUS_EXPR)
6154 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6155 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6156 if (TREE_CODE (op0) == ADDR_EXPR
6157 && TREE_CODE (op1) == INTEGER_CST)
6159 tree off = fold_convert (ptr_type_node, op1);
6160 return build_fold_addr_expr_loc
6161 (loc,
6162 fold_build2 (MEM_REF,
6163 TREE_TYPE (TREE_TYPE (op0)),
6164 unshare_expr (op0), off));
6167 /* Canonicalize bool != 0 and bool == 0 appearing after
6168 valueization. While gimple_simplify handles this
6169 it can get confused by the ~X == 1 -> X == 0 transform
6170 which we cant reduce to a SSA name or a constant
6171 (and we have no way to tell gimple_simplify to not
6172 consider those transforms in the first place). */
6173 else if (subcode == EQ_EXPR
6174 || subcode == NE_EXPR)
6176 tree lhs = gimple_assign_lhs (stmt);
6177 tree op0 = gimple_assign_rhs1 (stmt);
6178 if (useless_type_conversion_p (TREE_TYPE (lhs),
6179 TREE_TYPE (op0)))
6181 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6182 op0 = (*valueize) (op0);
6183 if (TREE_CODE (op0) == INTEGER_CST)
6184 std::swap (op0, op1);
6185 if (TREE_CODE (op1) == INTEGER_CST
6186 && ((subcode == NE_EXPR && integer_zerop (op1))
6187 || (subcode == EQ_EXPR && integer_onep (op1))))
6188 return op0;
6191 return NULL_TREE;
6193 case GIMPLE_TERNARY_RHS:
6195 /* Handle ternary operators that can appear in GIMPLE form. */
6196 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6197 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6198 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6199 return fold_ternary_loc (loc, subcode,
6200 gimple_expr_type (stmt), op0, op1, op2);
6203 default:
6204 gcc_unreachable ();
6208 case GIMPLE_CALL:
6210 tree fn;
6211 gcall *call_stmt = as_a <gcall *> (stmt);
6213 if (gimple_call_internal_p (stmt))
6215 enum tree_code subcode = ERROR_MARK;
6216 switch (gimple_call_internal_fn (stmt))
6218 case IFN_UBSAN_CHECK_ADD:
6219 subcode = PLUS_EXPR;
6220 break;
6221 case IFN_UBSAN_CHECK_SUB:
6222 subcode = MINUS_EXPR;
6223 break;
6224 case IFN_UBSAN_CHECK_MUL:
6225 subcode = MULT_EXPR;
6226 break;
6227 case IFN_BUILTIN_EXPECT:
6229 tree arg0 = gimple_call_arg (stmt, 0);
6230 tree op0 = (*valueize) (arg0);
6231 if (TREE_CODE (op0) == INTEGER_CST)
6232 return op0;
6233 return NULL_TREE;
6235 default:
6236 return NULL_TREE;
6238 tree arg0 = gimple_call_arg (stmt, 0);
6239 tree arg1 = gimple_call_arg (stmt, 1);
6240 tree op0 = (*valueize) (arg0);
6241 tree op1 = (*valueize) (arg1);
6243 if (TREE_CODE (op0) != INTEGER_CST
6244 || TREE_CODE (op1) != INTEGER_CST)
6246 switch (subcode)
6248 case MULT_EXPR:
6249 /* x * 0 = 0 * x = 0 without overflow. */
6250 if (integer_zerop (op0) || integer_zerop (op1))
6251 return build_zero_cst (TREE_TYPE (arg0));
6252 break;
6253 case MINUS_EXPR:
6254 /* y - y = 0 without overflow. */
6255 if (operand_equal_p (op0, op1, 0))
6256 return build_zero_cst (TREE_TYPE (arg0));
6257 break;
6258 default:
6259 break;
6262 tree res
6263 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6264 if (res
6265 && TREE_CODE (res) == INTEGER_CST
6266 && !TREE_OVERFLOW (res))
6267 return res;
6268 return NULL_TREE;
6271 fn = (*valueize) (gimple_call_fn (stmt));
6272 if (TREE_CODE (fn) == ADDR_EXPR
6273 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6274 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6275 && gimple_builtin_call_types_compatible_p (stmt,
6276 TREE_OPERAND (fn, 0)))
6278 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6279 tree retval;
6280 unsigned i;
6281 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6282 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6283 retval = fold_builtin_call_array (loc,
6284 gimple_call_return_type (call_stmt),
6285 fn, gimple_call_num_args (stmt), args);
6286 if (retval)
6288 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6289 STRIP_NOPS (retval);
6290 retval = fold_convert (gimple_call_return_type (call_stmt),
6291 retval);
6293 return retval;
6295 return NULL_TREE;
6298 default:
6299 return NULL_TREE;
6303 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6304 Returns NULL_TREE if folding to a constant is not possible, otherwise
6305 returns a constant according to is_gimple_min_invariant. */
6307 tree
6308 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6310 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6311 if (res && is_gimple_min_invariant (res))
6312 return res;
6313 return NULL_TREE;
6317 /* The following set of functions are supposed to fold references using
6318 their constant initializers. */
6320 /* See if we can find constructor defining value of BASE.
6321 When we know the consructor with constant offset (such as
6322 base is array[40] and we do know constructor of array), then
6323 BIT_OFFSET is adjusted accordingly.
6325 As a special case, return error_mark_node when constructor
6326 is not explicitly available, but it is known to be zero
6327 such as 'static const int a;'. */
6328 static tree
6329 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6330 tree (*valueize)(tree))
6332 poly_int64 bit_offset2, size, max_size;
6333 bool reverse;
6335 if (TREE_CODE (base) == MEM_REF)
6337 if (!integer_zerop (TREE_OPERAND (base, 1)))
6339 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
6340 return NULL_TREE;
6341 *bit_offset += (mem_ref_offset (base).to_short_addr ()
6342 * BITS_PER_UNIT);
6345 if (valueize
6346 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6347 base = valueize (TREE_OPERAND (base, 0));
6348 if (!base || TREE_CODE (base) != ADDR_EXPR)
6349 return NULL_TREE;
6350 base = TREE_OPERAND (base, 0);
6352 else if (valueize
6353 && TREE_CODE (base) == SSA_NAME)
6354 base = valueize (base);
6356 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6357 DECL_INITIAL. If BASE is a nested reference into another
6358 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6359 the inner reference. */
6360 switch (TREE_CODE (base))
6362 case VAR_DECL:
6363 case CONST_DECL:
6365 tree init = ctor_for_folding (base);
6367 /* Our semantic is exact opposite of ctor_for_folding;
6368 NULL means unknown, while error_mark_node is 0. */
6369 if (init == error_mark_node)
6370 return NULL_TREE;
6371 if (!init)
6372 return error_mark_node;
6373 return init;
6376 case VIEW_CONVERT_EXPR:
6377 return get_base_constructor (TREE_OPERAND (base, 0),
6378 bit_offset, valueize);
6380 case ARRAY_REF:
6381 case COMPONENT_REF:
6382 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6383 &reverse);
6384 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6385 return NULL_TREE;
6386 *bit_offset += bit_offset2;
6387 return get_base_constructor (base, bit_offset, valueize);
6389 case CONSTRUCTOR:
6390 return base;
6392 default:
6393 if (CONSTANT_CLASS_P (base))
6394 return base;
6396 return NULL_TREE;
6400 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6401 SIZE to the memory at bit OFFSET. */
6403 static tree
6404 fold_array_ctor_reference (tree type, tree ctor,
6405 unsigned HOST_WIDE_INT offset,
6406 unsigned HOST_WIDE_INT size,
6407 tree from_decl)
6409 offset_int low_bound;
6410 offset_int elt_size;
6411 offset_int access_index;
6412 tree domain_type = NULL_TREE;
6413 HOST_WIDE_INT inner_offset;
6415 /* Compute low bound and elt size. */
6416 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6417 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6418 if (domain_type && TYPE_MIN_VALUE (domain_type))
6420 /* Static constructors for variably sized objects makes no sense. */
6421 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6422 return NULL_TREE;
6423 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6425 else
6426 low_bound = 0;
6427 /* Static constructors for variably sized objects makes no sense. */
6428 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6429 return NULL_TREE;
6430 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6432 /* We can handle only constantly sized accesses that are known to not
6433 be larger than size of array element. */
6434 if (!TYPE_SIZE_UNIT (type)
6435 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6436 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6437 || elt_size == 0)
6438 return NULL_TREE;
6440 /* Compute the array index we look for. */
6441 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6442 elt_size);
6443 access_index += low_bound;
6445 /* And offset within the access. */
6446 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6448 /* See if the array field is large enough to span whole access. We do not
6449 care to fold accesses spanning multiple array indexes. */
6450 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6451 return NULL_TREE;
6452 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6453 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
6455 /* When memory is not explicitely mentioned in constructor,
6456 it is 0 (or out of range). */
6457 return build_zero_cst (type);
6460 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6461 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6463 static tree
6464 fold_nonarray_ctor_reference (tree type, tree ctor,
6465 unsigned HOST_WIDE_INT offset,
6466 unsigned HOST_WIDE_INT size,
6467 tree from_decl)
6469 unsigned HOST_WIDE_INT cnt;
6470 tree cfield, cval;
6472 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6473 cval)
6475 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6476 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6477 tree field_size = DECL_SIZE (cfield);
6478 offset_int bitoffset;
6479 offset_int bitoffset_end, access_end;
6481 /* Variable sized objects in static constructors makes no sense,
6482 but field_size can be NULL for flexible array members. */
6483 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6484 && TREE_CODE (byte_offset) == INTEGER_CST
6485 && (field_size != NULL_TREE
6486 ? TREE_CODE (field_size) == INTEGER_CST
6487 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6489 /* Compute bit offset of the field. */
6490 bitoffset = (wi::to_offset (field_offset)
6491 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6492 /* Compute bit offset where the field ends. */
6493 if (field_size != NULL_TREE)
6494 bitoffset_end = bitoffset + wi::to_offset (field_size);
6495 else
6496 bitoffset_end = 0;
6498 access_end = offset_int (offset) + size;
6500 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6501 [BITOFFSET, BITOFFSET_END)? */
6502 if (wi::cmps (access_end, bitoffset) > 0
6503 && (field_size == NULL_TREE
6504 || wi::lts_p (offset, bitoffset_end)))
6506 offset_int inner_offset = offset_int (offset) - bitoffset;
6507 /* We do have overlap. Now see if field is large enough to
6508 cover the access. Give up for accesses spanning multiple
6509 fields. */
6510 if (wi::cmps (access_end, bitoffset_end) > 0)
6511 return NULL_TREE;
6512 if (offset < bitoffset)
6513 return NULL_TREE;
6514 return fold_ctor_reference (type, cval,
6515 inner_offset.to_uhwi (), size,
6516 from_decl);
6519 /* When memory is not explicitely mentioned in constructor, it is 0. */
6520 return build_zero_cst (type);
6523 /* CTOR is value initializing memory, fold reference of type TYPE and
6524 size POLY_SIZE to the memory at bit POLY_OFFSET. */
6526 tree
6527 fold_ctor_reference (tree type, tree ctor, poly_uint64 poly_offset,
6528 poly_uint64 poly_size, tree from_decl)
6530 tree ret;
6532 /* We found the field with exact match. */
6533 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6534 && known_eq (poly_offset, 0U))
6535 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6537 /* The remaining optimizations need a constant size and offset. */
6538 unsigned HOST_WIDE_INT size, offset;
6539 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6540 return NULL_TREE;
6542 /* We are at the end of walk, see if we can view convert the
6543 result. */
6544 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6545 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6546 && !compare_tree_int (TYPE_SIZE (type), size)
6547 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6549 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6550 if (ret)
6552 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6553 if (ret)
6554 STRIP_USELESS_TYPE_CONVERSION (ret);
6556 return ret;
6558 /* For constants and byte-aligned/sized reads try to go through
6559 native_encode/interpret. */
6560 if (CONSTANT_CLASS_P (ctor)
6561 && BITS_PER_UNIT == 8
6562 && offset % BITS_PER_UNIT == 0
6563 && size % BITS_PER_UNIT == 0
6564 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6566 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6567 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6568 offset / BITS_PER_UNIT);
6569 if (len > 0)
6570 return native_interpret_expr (type, buf, len);
6572 if (TREE_CODE (ctor) == CONSTRUCTOR)
6575 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6576 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6577 return fold_array_ctor_reference (type, ctor, offset, size,
6578 from_decl);
6579 else
6580 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6581 from_decl);
6584 return NULL_TREE;
6587 /* Return the tree representing the element referenced by T if T is an
6588 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6589 names using VALUEIZE. Return NULL_TREE otherwise. */
6591 tree
6592 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6594 tree ctor, idx, base;
6595 poly_int64 offset, size, max_size;
6596 tree tem;
6597 bool reverse;
6599 if (TREE_THIS_VOLATILE (t))
6600 return NULL_TREE;
6602 if (DECL_P (t))
6603 return get_symbol_constant_value (t);
6605 tem = fold_read_from_constant_string (t);
6606 if (tem)
6607 return tem;
6609 switch (TREE_CODE (t))
6611 case ARRAY_REF:
6612 case ARRAY_RANGE_REF:
6613 /* Constant indexes are handled well by get_base_constructor.
6614 Only special case variable offsets.
6615 FIXME: This code can't handle nested references with variable indexes
6616 (they will be handled only by iteration of ccp). Perhaps we can bring
6617 get_ref_base_and_extent here and make it use a valueize callback. */
6618 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6619 && valueize
6620 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6621 && poly_int_tree_p (idx))
6623 tree low_bound, unit_size;
6625 /* If the resulting bit-offset is constant, track it. */
6626 if ((low_bound = array_ref_low_bound (t),
6627 poly_int_tree_p (low_bound))
6628 && (unit_size = array_ref_element_size (t),
6629 tree_fits_uhwi_p (unit_size)))
6631 poly_offset_int woffset
6632 = wi::sext (wi::to_poly_offset (idx)
6633 - wi::to_poly_offset (low_bound),
6634 TYPE_PRECISION (TREE_TYPE (idx)));
6636 if (woffset.to_shwi (&offset))
6638 /* TODO: This code seems wrong, multiply then check
6639 to see if it fits. */
6640 offset *= tree_to_uhwi (unit_size);
6641 offset *= BITS_PER_UNIT;
6643 base = TREE_OPERAND (t, 0);
6644 ctor = get_base_constructor (base, &offset, valueize);
6645 /* Empty constructor. Always fold to 0. */
6646 if (ctor == error_mark_node)
6647 return build_zero_cst (TREE_TYPE (t));
6648 /* Out of bound array access. Value is undefined,
6649 but don't fold. */
6650 if (maybe_lt (offset, 0))
6651 return NULL_TREE;
6652 /* We can not determine ctor. */
6653 if (!ctor)
6654 return NULL_TREE;
6655 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6656 tree_to_uhwi (unit_size)
6657 * BITS_PER_UNIT,
6658 base);
6662 /* Fallthru. */
6664 case COMPONENT_REF:
6665 case BIT_FIELD_REF:
6666 case TARGET_MEM_REF:
6667 case MEM_REF:
6668 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6669 ctor = get_base_constructor (base, &offset, valueize);
6671 /* Empty constructor. Always fold to 0. */
6672 if (ctor == error_mark_node)
6673 return build_zero_cst (TREE_TYPE (t));
6674 /* We do not know precise address. */
6675 if (!known_size_p (max_size) || maybe_ne (max_size, size))
6676 return NULL_TREE;
6677 /* We can not determine ctor. */
6678 if (!ctor)
6679 return NULL_TREE;
6681 /* Out of bound array access. Value is undefined, but don't fold. */
6682 if (maybe_lt (offset, 0))
6683 return NULL_TREE;
6685 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6686 base);
6688 case REALPART_EXPR:
6689 case IMAGPART_EXPR:
6691 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6692 if (c && TREE_CODE (c) == COMPLEX_CST)
6693 return fold_build1_loc (EXPR_LOCATION (t),
6694 TREE_CODE (t), TREE_TYPE (t), c);
6695 break;
6698 default:
6699 break;
6702 return NULL_TREE;
6705 tree
6706 fold_const_aggregate_ref (tree t)
6708 return fold_const_aggregate_ref_1 (t, NULL);
6711 /* Lookup virtual method with index TOKEN in a virtual table V
6712 at OFFSET.
6713 Set CAN_REFER if non-NULL to false if method
6714 is not referable or if the virtual table is ill-formed (such as rewriten
6715 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6717 tree
6718 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6719 tree v,
6720 unsigned HOST_WIDE_INT offset,
6721 bool *can_refer)
6723 tree vtable = v, init, fn;
6724 unsigned HOST_WIDE_INT size;
6725 unsigned HOST_WIDE_INT elt_size, access_index;
6726 tree domain_type;
6728 if (can_refer)
6729 *can_refer = true;
6731 /* First of all double check we have virtual table. */
6732 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6734 /* Pass down that we lost track of the target. */
6735 if (can_refer)
6736 *can_refer = false;
6737 return NULL_TREE;
6740 init = ctor_for_folding (v);
6742 /* The virtual tables should always be born with constructors
6743 and we always should assume that they are avaialble for
6744 folding. At the moment we do not stream them in all cases,
6745 but it should never happen that ctor seem unreachable. */
6746 gcc_assert (init);
6747 if (init == error_mark_node)
6749 /* Pass down that we lost track of the target. */
6750 if (can_refer)
6751 *can_refer = false;
6752 return NULL_TREE;
6754 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6755 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6756 offset *= BITS_PER_UNIT;
6757 offset += token * size;
6759 /* Lookup the value in the constructor that is assumed to be array.
6760 This is equivalent to
6761 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6762 offset, size, NULL);
6763 but in a constant time. We expect that frontend produced a simple
6764 array without indexed initializers. */
6766 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6767 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6768 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6769 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6771 access_index = offset / BITS_PER_UNIT / elt_size;
6772 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6774 /* This code makes an assumption that there are no
6775 indexed fileds produced by C++ FE, so we can directly index the array. */
6776 if (access_index < CONSTRUCTOR_NELTS (init))
6778 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6779 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6780 STRIP_NOPS (fn);
6782 else
6783 fn = NULL;
6785 /* For type inconsistent program we may end up looking up virtual method
6786 in virtual table that does not contain TOKEN entries. We may overrun
6787 the virtual table and pick up a constant or RTTI info pointer.
6788 In any case the call is undefined. */
6789 if (!fn
6790 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6791 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6792 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6793 else
6795 fn = TREE_OPERAND (fn, 0);
6797 /* When cgraph node is missing and function is not public, we cannot
6798 devirtualize. This can happen in WHOPR when the actual method
6799 ends up in other partition, because we found devirtualization
6800 possibility too late. */
6801 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6803 if (can_refer)
6805 *can_refer = false;
6806 return fn;
6808 return NULL_TREE;
6812 /* Make sure we create a cgraph node for functions we'll reference.
6813 They can be non-existent if the reference comes from an entry
6814 of an external vtable for example. */
6815 cgraph_node::get_create (fn);
6817 return fn;
6820 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6821 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6822 KNOWN_BINFO carries the binfo describing the true type of
6823 OBJ_TYPE_REF_OBJECT(REF).
6824 Set CAN_REFER if non-NULL to false if method
6825 is not referable or if the virtual table is ill-formed (such as rewriten
6826 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6828 tree
6829 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6830 bool *can_refer)
6832 unsigned HOST_WIDE_INT offset;
6833 tree v;
6835 v = BINFO_VTABLE (known_binfo);
6836 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6837 if (!v)
6838 return NULL_TREE;
6840 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6842 if (can_refer)
6843 *can_refer = false;
6844 return NULL_TREE;
6846 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6849 /* Given a pointer value T, return a simplified version of an
6850 indirection through T, or NULL_TREE if no simplification is
6851 possible. Note that the resulting type may be different from
6852 the type pointed to in the sense that it is still compatible
6853 from the langhooks point of view. */
6855 tree
6856 gimple_fold_indirect_ref (tree t)
6858 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6859 tree sub = t;
6860 tree subtype;
6862 STRIP_NOPS (sub);
6863 subtype = TREE_TYPE (sub);
6864 if (!POINTER_TYPE_P (subtype)
6865 || TYPE_REF_CAN_ALIAS_ALL (ptype))
6866 return NULL_TREE;
6868 if (TREE_CODE (sub) == ADDR_EXPR)
6870 tree op = TREE_OPERAND (sub, 0);
6871 tree optype = TREE_TYPE (op);
6872 /* *&p => p */
6873 if (useless_type_conversion_p (type, optype))
6874 return op;
6876 /* *(foo *)&fooarray => fooarray[0] */
6877 if (TREE_CODE (optype) == ARRAY_TYPE
6878 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6879 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6881 tree type_domain = TYPE_DOMAIN (optype);
6882 tree min_val = size_zero_node;
6883 if (type_domain && TYPE_MIN_VALUE (type_domain))
6884 min_val = TYPE_MIN_VALUE (type_domain);
6885 if (TREE_CODE (min_val) == INTEGER_CST)
6886 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6888 /* *(foo *)&complexfoo => __real__ complexfoo */
6889 else if (TREE_CODE (optype) == COMPLEX_TYPE
6890 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6891 return fold_build1 (REALPART_EXPR, type, op);
6892 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6893 else if (TREE_CODE (optype) == VECTOR_TYPE
6894 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6896 tree part_width = TYPE_SIZE (type);
6897 tree index = bitsize_int (0);
6898 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6902 /* *(p + CST) -> ... */
6903 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6904 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6906 tree addr = TREE_OPERAND (sub, 0);
6907 tree off = TREE_OPERAND (sub, 1);
6908 tree addrtype;
6910 STRIP_NOPS (addr);
6911 addrtype = TREE_TYPE (addr);
6913 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6914 if (TREE_CODE (addr) == ADDR_EXPR
6915 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6916 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
6917 && tree_fits_uhwi_p (off))
6919 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
6920 tree part_width = TYPE_SIZE (type);
6921 unsigned HOST_WIDE_INT part_widthi
6922 = tree_to_shwi (part_width) / BITS_PER_UNIT;
6923 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6924 tree index = bitsize_int (indexi);
6925 if (offset / part_widthi
6926 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
6927 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6928 part_width, index);
6931 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6932 if (TREE_CODE (addr) == ADDR_EXPR
6933 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6934 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6936 tree size = TYPE_SIZE_UNIT (type);
6937 if (tree_int_cst_equal (size, off))
6938 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6941 /* *(p + CST) -> MEM_REF <p, CST>. */
6942 if (TREE_CODE (addr) != ADDR_EXPR
6943 || DECL_P (TREE_OPERAND (addr, 0)))
6944 return fold_build2 (MEM_REF, type,
6945 addr,
6946 wide_int_to_tree (ptype, wi::to_wide (off)));
6949 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6950 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6951 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6952 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6954 tree type_domain;
6955 tree min_val = size_zero_node;
6956 tree osub = sub;
6957 sub = gimple_fold_indirect_ref (sub);
6958 if (! sub)
6959 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6960 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6961 if (type_domain && TYPE_MIN_VALUE (type_domain))
6962 min_val = TYPE_MIN_VALUE (type_domain);
6963 if (TREE_CODE (min_val) == INTEGER_CST)
6964 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6967 return NULL_TREE;
6970 /* Return true if CODE is an operation that when operating on signed
6971 integer types involves undefined behavior on overflow and the
6972 operation can be expressed with unsigned arithmetic. */
6974 bool
6975 arith_code_with_undefined_signed_overflow (tree_code code)
6977 switch (code)
6979 case PLUS_EXPR:
6980 case MINUS_EXPR:
6981 case MULT_EXPR:
6982 case NEGATE_EXPR:
6983 case POINTER_PLUS_EXPR:
6984 return true;
6985 default:
6986 return false;
6990 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6991 operation that can be transformed to unsigned arithmetic by converting
6992 its operand, carrying out the operation in the corresponding unsigned
6993 type and converting the result back to the original type.
6995 Returns a sequence of statements that replace STMT and also contain
6996 a modified form of STMT itself. */
6998 gimple_seq
6999 rewrite_to_defined_overflow (gimple *stmt)
7001 if (dump_file && (dump_flags & TDF_DETAILS))
7003 fprintf (dump_file, "rewriting stmt with undefined signed "
7004 "overflow ");
7005 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7008 tree lhs = gimple_assign_lhs (stmt);
7009 tree type = unsigned_type_for (TREE_TYPE (lhs));
7010 gimple_seq stmts = NULL;
7011 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7013 tree op = gimple_op (stmt, i);
7014 op = gimple_convert (&stmts, type, op);
7015 gimple_set_op (stmt, i, op);
7017 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7018 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7019 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7020 gimple_seq_add_stmt (&stmts, stmt);
7021 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7022 gimple_seq_add_stmt (&stmts, cvt);
7024 return stmts;
7028 /* The valueization hook we use for the gimple_build API simplification.
7029 This makes us match fold_buildN behavior by only combining with
7030 statements in the sequence(s) we are currently building. */
7032 static tree
7033 gimple_build_valueize (tree op)
7035 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7036 return op;
7037 return NULL_TREE;
7040 /* Build the expression CODE OP0 of type TYPE with location LOC,
7041 simplifying it first if possible. Returns the built
7042 expression value and appends statements possibly defining it
7043 to SEQ. */
7045 tree
7046 gimple_build (gimple_seq *seq, location_t loc,
7047 enum tree_code code, tree type, tree op0)
7049 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7050 if (!res)
7052 res = create_tmp_reg_or_ssa_name (type);
7053 gimple *stmt;
7054 if (code == REALPART_EXPR
7055 || code == IMAGPART_EXPR
7056 || code == VIEW_CONVERT_EXPR)
7057 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7058 else
7059 stmt = gimple_build_assign (res, code, op0);
7060 gimple_set_location (stmt, loc);
7061 gimple_seq_add_stmt_without_update (seq, stmt);
7063 return res;
7066 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7067 simplifying it first if possible. Returns the built
7068 expression value and appends statements possibly defining it
7069 to SEQ. */
7071 tree
7072 gimple_build (gimple_seq *seq, location_t loc,
7073 enum tree_code code, tree type, tree op0, tree op1)
7075 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7076 if (!res)
7078 res = create_tmp_reg_or_ssa_name (type);
7079 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7080 gimple_set_location (stmt, loc);
7081 gimple_seq_add_stmt_without_update (seq, stmt);
7083 return res;
7086 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7087 simplifying it first if possible. Returns the built
7088 expression value and appends statements possibly defining it
7089 to SEQ. */
7091 tree
7092 gimple_build (gimple_seq *seq, location_t loc,
7093 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7095 tree res = gimple_simplify (code, type, op0, op1, op2,
7096 seq, gimple_build_valueize);
7097 if (!res)
7099 res = create_tmp_reg_or_ssa_name (type);
7100 gimple *stmt;
7101 if (code == BIT_FIELD_REF)
7102 stmt = gimple_build_assign (res, code,
7103 build3 (code, type, op0, op1, op2));
7104 else
7105 stmt = gimple_build_assign (res, code, op0, op1, op2);
7106 gimple_set_location (stmt, loc);
7107 gimple_seq_add_stmt_without_update (seq, stmt);
7109 return res;
7112 /* Build the call FN (ARG0) with a result of type TYPE
7113 (or no result if TYPE is void) with location LOC,
7114 simplifying it first if possible. Returns the built
7115 expression value (or NULL_TREE if TYPE is void) and appends
7116 statements possibly defining it to SEQ. */
7118 tree
7119 gimple_build (gimple_seq *seq, location_t loc,
7120 enum built_in_function fn, tree type, tree arg0)
7122 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7123 if (!res)
7125 tree decl = builtin_decl_implicit (fn);
7126 gimple *stmt = gimple_build_call (decl, 1, arg0);
7127 if (!VOID_TYPE_P (type))
7129 res = create_tmp_reg_or_ssa_name (type);
7130 gimple_call_set_lhs (stmt, res);
7132 gimple_set_location (stmt, loc);
7133 gimple_seq_add_stmt_without_update (seq, stmt);
7135 return res;
7138 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7139 (or no result if TYPE is void) with location LOC,
7140 simplifying it first if possible. Returns the built
7141 expression value (or NULL_TREE if TYPE is void) and appends
7142 statements possibly defining it to SEQ. */
7144 tree
7145 gimple_build (gimple_seq *seq, location_t loc,
7146 enum built_in_function fn, tree type, tree arg0, tree arg1)
7148 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7149 if (!res)
7151 tree decl = builtin_decl_implicit (fn);
7152 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
7153 if (!VOID_TYPE_P (type))
7155 res = create_tmp_reg_or_ssa_name (type);
7156 gimple_call_set_lhs (stmt, res);
7158 gimple_set_location (stmt, loc);
7159 gimple_seq_add_stmt_without_update (seq, stmt);
7161 return res;
7164 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7165 (or no result if TYPE is void) with location LOC,
7166 simplifying it first if possible. Returns the built
7167 expression value (or NULL_TREE if TYPE is void) and appends
7168 statements possibly defining it to SEQ. */
7170 tree
7171 gimple_build (gimple_seq *seq, location_t loc,
7172 enum built_in_function fn, tree type,
7173 tree arg0, tree arg1, tree arg2)
7175 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7176 seq, gimple_build_valueize);
7177 if (!res)
7179 tree decl = builtin_decl_implicit (fn);
7180 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7181 if (!VOID_TYPE_P (type))
7183 res = create_tmp_reg_or_ssa_name (type);
7184 gimple_call_set_lhs (stmt, res);
7186 gimple_set_location (stmt, loc);
7187 gimple_seq_add_stmt_without_update (seq, stmt);
7189 return res;
7192 /* Build the conversion (TYPE) OP with a result of type TYPE
7193 with location LOC if such conversion is neccesary in GIMPLE,
7194 simplifying it first.
7195 Returns the built expression value and appends
7196 statements possibly defining it to SEQ. */
7198 tree
7199 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7201 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7202 return op;
7203 return gimple_build (seq, loc, NOP_EXPR, type, op);
7206 /* Build the conversion (ptrofftype) OP with a result of a type
7207 compatible with ptrofftype with location LOC if such conversion
7208 is neccesary in GIMPLE, simplifying it first.
7209 Returns the built expression value and appends
7210 statements possibly defining it to SEQ. */
7212 tree
7213 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7215 if (ptrofftype_p (TREE_TYPE (op)))
7216 return op;
7217 return gimple_convert (seq, loc, sizetype, op);
7220 /* Build a vector of type TYPE in which each element has the value OP.
7221 Return a gimple value for the result, appending any new statements
7222 to SEQ. */
7224 tree
7225 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7226 tree op)
7228 tree res, vec = build_vector_from_val (type, op);
7229 if (is_gimple_val (vec))
7230 return vec;
7231 if (gimple_in_ssa_p (cfun))
7232 res = make_ssa_name (type);
7233 else
7234 res = create_tmp_reg (type);
7235 gimple *stmt = gimple_build_assign (res, vec);
7236 gimple_set_location (stmt, loc);
7237 gimple_seq_add_stmt_without_update (seq, stmt);
7238 return res;
7241 /* Build a vector from BUILDER, handling the case in which some elements
7242 are non-constant. Return a gimple value for the result, appending any
7243 new instructions to SEQ.
7245 BUILDER must not have a stepped encoding on entry. This is because
7246 the function is not geared up to handle the arithmetic that would
7247 be needed in the variable case, and any code building a vector that
7248 is known to be constant should use BUILDER->build () directly. */
7250 tree
7251 gimple_build_vector (gimple_seq *seq, location_t loc,
7252 tree_vector_builder *builder)
7254 gcc_assert (builder->nelts_per_pattern () <= 2);
7255 unsigned int encoded_nelts = builder->encoded_nelts ();
7256 for (unsigned int i = 0; i < encoded_nelts; ++i)
7257 if (!TREE_CONSTANT ((*builder)[i]))
7259 tree type = builder->type ();
7260 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
7261 vec<constructor_elt, va_gc> *v;
7262 vec_alloc (v, nelts);
7263 for (i = 0; i < nelts; ++i)
7264 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7266 tree res;
7267 if (gimple_in_ssa_p (cfun))
7268 res = make_ssa_name (type);
7269 else
7270 res = create_tmp_reg (type);
7271 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7272 gimple_set_location (stmt, loc);
7273 gimple_seq_add_stmt_without_update (seq, stmt);
7274 return res;
7276 return builder->build ();
7279 /* Return true if the result of assignment STMT is known to be non-negative.
7280 If the return value is based on the assumption that signed overflow is
7281 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7282 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7284 static bool
7285 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7286 int depth)
7288 enum tree_code code = gimple_assign_rhs_code (stmt);
7289 switch (get_gimple_rhs_class (code))
7291 case GIMPLE_UNARY_RHS:
7292 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7293 gimple_expr_type (stmt),
7294 gimple_assign_rhs1 (stmt),
7295 strict_overflow_p, depth);
7296 case GIMPLE_BINARY_RHS:
7297 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7298 gimple_expr_type (stmt),
7299 gimple_assign_rhs1 (stmt),
7300 gimple_assign_rhs2 (stmt),
7301 strict_overflow_p, depth);
7302 case GIMPLE_TERNARY_RHS:
7303 return false;
7304 case GIMPLE_SINGLE_RHS:
7305 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7306 strict_overflow_p, depth);
7307 case GIMPLE_INVALID_RHS:
7308 break;
7310 gcc_unreachable ();
7313 /* Return true if return value of call STMT is known to be non-negative.
7314 If the return value is based on the assumption that signed overflow is
7315 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7316 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7318 static bool
7319 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7320 int depth)
7322 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7323 gimple_call_arg (stmt, 0) : NULL_TREE;
7324 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7325 gimple_call_arg (stmt, 1) : NULL_TREE;
7327 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7328 gimple_call_combined_fn (stmt),
7329 arg0,
7330 arg1,
7331 strict_overflow_p, depth);
7334 /* Return true if return value of call STMT is known to be non-negative.
7335 If the return value is based on the assumption that signed overflow is
7336 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7337 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7339 static bool
7340 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7341 int depth)
7343 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7345 tree arg = gimple_phi_arg_def (stmt, i);
7346 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7347 return false;
7349 return true;
7352 /* Return true if STMT is known to compute a non-negative value.
7353 If the return value is based on the assumption that signed overflow is
7354 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7355 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7357 bool
7358 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7359 int depth)
7361 switch (gimple_code (stmt))
7363 case GIMPLE_ASSIGN:
7364 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7365 depth);
7366 case GIMPLE_CALL:
7367 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7368 depth);
7369 case GIMPLE_PHI:
7370 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7371 depth);
7372 default:
7373 return false;
7377 /* Return true if the floating-point value computed by assignment STMT
7378 is known to have an integer value. We also allow +Inf, -Inf and NaN
7379 to be considered integer values. Return false for signaling NaN.
7381 DEPTH is the current nesting depth of the query. */
7383 static bool
7384 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7386 enum tree_code code = gimple_assign_rhs_code (stmt);
7387 switch (get_gimple_rhs_class (code))
7389 case GIMPLE_UNARY_RHS:
7390 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7391 gimple_assign_rhs1 (stmt), depth);
7392 case GIMPLE_BINARY_RHS:
7393 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7394 gimple_assign_rhs1 (stmt),
7395 gimple_assign_rhs2 (stmt), depth);
7396 case GIMPLE_TERNARY_RHS:
7397 return false;
7398 case GIMPLE_SINGLE_RHS:
7399 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7400 case GIMPLE_INVALID_RHS:
7401 break;
7403 gcc_unreachable ();
7406 /* Return true if the floating-point value computed by call STMT is known
7407 to have an integer value. We also allow +Inf, -Inf and NaN to be
7408 considered integer values. Return false for signaling NaN.
7410 DEPTH is the current nesting depth of the query. */
7412 static bool
7413 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7415 tree arg0 = (gimple_call_num_args (stmt) > 0
7416 ? gimple_call_arg (stmt, 0)
7417 : NULL_TREE);
7418 tree arg1 = (gimple_call_num_args (stmt) > 1
7419 ? gimple_call_arg (stmt, 1)
7420 : NULL_TREE);
7421 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7422 arg0, arg1, depth);
7425 /* Return true if the floating-point result of phi STMT is known to have
7426 an integer value. We also allow +Inf, -Inf and NaN to be considered
7427 integer values. Return false for signaling NaN.
7429 DEPTH is the current nesting depth of the query. */
7431 static bool
7432 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7434 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7436 tree arg = gimple_phi_arg_def (stmt, i);
7437 if (!integer_valued_real_single_p (arg, depth + 1))
7438 return false;
7440 return true;
7443 /* Return true if the floating-point value computed by STMT is known
7444 to have an integer value. We also allow +Inf, -Inf and NaN to be
7445 considered integer values. Return false for signaling NaN.
7447 DEPTH is the current nesting depth of the query. */
7449 bool
7450 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7452 switch (gimple_code (stmt))
7454 case GIMPLE_ASSIGN:
7455 return gimple_assign_integer_valued_real_p (stmt, depth);
7456 case GIMPLE_CALL:
7457 return gimple_call_integer_valued_real_p (stmt, depth);
7458 case GIMPLE_PHI:
7459 return gimple_phi_integer_valued_real_p (stmt, depth);
7460 default:
7461 return false;