PR tree-optimization/80631
[official-gcc.git] / gcc / gimple-fold.c
blob403fbb8c940872edc4c5ccbad526cf540ad5190e
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2017 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stmt.h"
35 #include "expr.h"
36 #include "stor-layout.h"
37 #include "dumpfile.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-object-size.h"
44 #include "tree-ssa.h"
45 #include "tree-ssa-propagate.h"
46 #include "ipa-utils.h"
47 #include "tree-ssa-address.h"
48 #include "langhooks.h"
49 #include "gimplify-me.h"
50 #include "dbgcnt.h"
51 #include "builtins.h"
52 #include "tree-eh.h"
53 #include "gimple-match.h"
54 #include "gomp-constants.h"
55 #include "optabs-query.h"
56 #include "omp-general.h"
57 #include "ipa-chkp.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
68 /* Return true when DECL can be referenced from current unit.
69 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
70 We can get declarations that are not possible to reference for various
71 reasons:
73 1) When analyzing C++ virtual tables.
74 C++ virtual tables do have known constructors even
75 when they are keyed to other compilation unit.
76 Those tables can contain pointers to methods and vars
77 in other units. Those methods have both STATIC and EXTERNAL
78 set.
79 2) In WHOPR mode devirtualization might lead to reference
80 to method that was partitioned elsehwere.
81 In this case we have static VAR_DECL or FUNCTION_DECL
82 that has no corresponding callgraph/varpool node
83 declaring the body.
84 3) COMDAT functions referred by external vtables that
85 we devirtualize only during final compilation stage.
86 At this time we already decided that we will not output
87 the function body and thus we can't reference the symbol
88 directly. */
90 static bool
91 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
93 varpool_node *vnode;
94 struct cgraph_node *node;
95 symtab_node *snode;
97 if (DECL_ABSTRACT_P (decl))
98 return false;
100 /* We are concerned only about static/external vars and functions. */
101 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
102 || !VAR_OR_FUNCTION_DECL_P (decl))
103 return true;
105 /* Static objects can be referred only if they was not optimized out yet. */
106 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
108 /* Before we start optimizing unreachable code we can be sure all
109 static objects are defined. */
110 if (symtab->function_flags_ready)
111 return true;
112 snode = symtab_node::get (decl);
113 if (!snode || !snode->definition)
114 return false;
115 node = dyn_cast <cgraph_node *> (snode);
116 return !node || !node->global.inlined_to;
119 /* We will later output the initializer, so we can refer to it.
120 So we are concerned only when DECL comes from initializer of
121 external var or var that has been optimized out. */
122 if (!from_decl
123 || !VAR_P (from_decl)
124 || (!DECL_EXTERNAL (from_decl)
125 && (vnode = varpool_node::get (from_decl)) != NULL
126 && vnode->definition)
127 || (flag_ltrans
128 && (vnode = varpool_node::get (from_decl)) != NULL
129 && vnode->in_other_partition))
130 return true;
131 /* We are folding reference from external vtable. The vtable may reffer
132 to a symbol keyed to other compilation unit. The other compilation
133 unit may be in separate DSO and the symbol may be hidden. */
134 if (DECL_VISIBILITY_SPECIFIED (decl)
135 && DECL_EXTERNAL (decl)
136 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
137 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
138 return false;
139 /* When function is public, we always can introduce new reference.
140 Exception are the COMDAT functions where introducing a direct
141 reference imply need to include function body in the curren tunit. */
142 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
143 return true;
144 /* We have COMDAT. We are going to check if we still have definition
145 or if the definition is going to be output in other partition.
146 Bypass this when gimplifying; all needed functions will be produced.
148 As observed in PR20991 for already optimized out comdat virtual functions
149 it may be tempting to not necessarily give up because the copy will be
150 output elsewhere when corresponding vtable is output.
151 This is however not possible - ABI specify that COMDATs are output in
152 units where they are used and when the other unit was compiled with LTO
153 it is possible that vtable was kept public while the function itself
154 was privatized. */
155 if (!symtab->function_flags_ready)
156 return true;
158 snode = symtab_node::get (decl);
159 if (!snode
160 || ((!snode->definition || DECL_EXTERNAL (decl))
161 && (!snode->in_other_partition
162 || (!snode->forced_by_abi && !snode->force_output))))
163 return false;
164 node = dyn_cast <cgraph_node *> (snode);
165 return !node || !node->global.inlined_to;
168 /* Create a temporary for TYPE for a statement STMT. If the current function
169 is in SSA form, a SSA name is created. Otherwise a temporary register
170 is made. */
172 tree
173 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
175 if (gimple_in_ssa_p (cfun))
176 return make_ssa_name (type, stmt);
177 else
178 return create_tmp_reg (type);
181 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
182 acceptable form for is_gimple_min_invariant.
183 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
185 tree
186 canonicalize_constructor_val (tree cval, tree from_decl)
188 tree orig_cval = cval;
189 STRIP_NOPS (cval);
190 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
191 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
193 tree ptr = TREE_OPERAND (cval, 0);
194 if (is_gimple_min_invariant (ptr))
195 cval = build1_loc (EXPR_LOCATION (cval),
196 ADDR_EXPR, TREE_TYPE (ptr),
197 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
198 ptr,
199 fold_convert (ptr_type_node,
200 TREE_OPERAND (cval, 1))));
202 if (TREE_CODE (cval) == ADDR_EXPR)
204 tree base = NULL_TREE;
205 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
207 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
208 if (base)
209 TREE_OPERAND (cval, 0) = base;
211 else
212 base = get_base_address (TREE_OPERAND (cval, 0));
213 if (!base)
214 return NULL_TREE;
216 if (VAR_OR_FUNCTION_DECL_P (base)
217 && !can_refer_decl_in_current_unit_p (base, from_decl))
218 return NULL_TREE;
219 if (TREE_TYPE (base) == error_mark_node)
220 return NULL_TREE;
221 if (VAR_P (base))
222 TREE_ADDRESSABLE (base) = 1;
223 else if (TREE_CODE (base) == FUNCTION_DECL)
225 /* Make sure we create a cgraph node for functions we'll reference.
226 They can be non-existent if the reference comes from an entry
227 of an external vtable for example. */
228 cgraph_node::get_create (base);
230 /* Fixup types in global initializers. */
231 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
232 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
234 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
235 cval = fold_convert (TREE_TYPE (orig_cval), cval);
236 return cval;
238 if (TREE_OVERFLOW_P (cval))
239 return drop_tree_overflow (cval);
240 return orig_cval;
243 /* If SYM is a constant variable with known value, return the value.
244 NULL_TREE is returned otherwise. */
246 tree
247 get_symbol_constant_value (tree sym)
249 tree val = ctor_for_folding (sym);
250 if (val != error_mark_node)
252 if (val)
254 val = canonicalize_constructor_val (unshare_expr (val), sym);
255 if (val && is_gimple_min_invariant (val))
256 return val;
257 else
258 return NULL_TREE;
260 /* Variables declared 'const' without an initializer
261 have zero as the initializer if they may not be
262 overridden at link or run time. */
263 if (!val
264 && is_gimple_reg_type (TREE_TYPE (sym)))
265 return build_zero_cst (TREE_TYPE (sym));
268 return NULL_TREE;
273 /* Subroutine of fold_stmt. We perform several simplifications of the
274 memory reference tree EXPR and make sure to re-gimplify them properly
275 after propagation of constant addresses. IS_LHS is true if the
276 reference is supposed to be an lvalue. */
278 static tree
279 maybe_fold_reference (tree expr, bool is_lhs)
281 tree result;
283 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
284 || TREE_CODE (expr) == REALPART_EXPR
285 || TREE_CODE (expr) == IMAGPART_EXPR)
286 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
287 return fold_unary_loc (EXPR_LOCATION (expr),
288 TREE_CODE (expr),
289 TREE_TYPE (expr),
290 TREE_OPERAND (expr, 0));
291 else if (TREE_CODE (expr) == BIT_FIELD_REF
292 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
293 return fold_ternary_loc (EXPR_LOCATION (expr),
294 TREE_CODE (expr),
295 TREE_TYPE (expr),
296 TREE_OPERAND (expr, 0),
297 TREE_OPERAND (expr, 1),
298 TREE_OPERAND (expr, 2));
300 if (!is_lhs
301 && (result = fold_const_aggregate_ref (expr))
302 && is_gimple_min_invariant (result))
303 return result;
305 return NULL_TREE;
309 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
310 replacement rhs for the statement or NULL_TREE if no simplification
311 could be made. It is assumed that the operands have been previously
312 folded. */
314 static tree
315 fold_gimple_assign (gimple_stmt_iterator *si)
317 gimple *stmt = gsi_stmt (*si);
318 enum tree_code subcode = gimple_assign_rhs_code (stmt);
319 location_t loc = gimple_location (stmt);
321 tree result = NULL_TREE;
323 switch (get_gimple_rhs_class (subcode))
325 case GIMPLE_SINGLE_RHS:
327 tree rhs = gimple_assign_rhs1 (stmt);
329 if (TREE_CLOBBER_P (rhs))
330 return NULL_TREE;
332 if (REFERENCE_CLASS_P (rhs))
333 return maybe_fold_reference (rhs, false);
335 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
337 tree val = OBJ_TYPE_REF_EXPR (rhs);
338 if (is_gimple_min_invariant (val))
339 return val;
340 else if (flag_devirtualize && virtual_method_call_p (rhs))
342 bool final;
343 vec <cgraph_node *>targets
344 = possible_polymorphic_call_targets (rhs, stmt, &final);
345 if (final && targets.length () <= 1 && dbg_cnt (devirt))
347 if (dump_enabled_p ())
349 location_t loc = gimple_location_safe (stmt);
350 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
351 "resolving virtual function address "
352 "reference to function %s\n",
353 targets.length () == 1
354 ? targets[0]->name ()
355 : "NULL");
357 if (targets.length () == 1)
359 val = fold_convert (TREE_TYPE (val),
360 build_fold_addr_expr_loc
361 (loc, targets[0]->decl));
362 STRIP_USELESS_TYPE_CONVERSION (val);
364 else
365 /* We can not use __builtin_unreachable here because it
366 can not have address taken. */
367 val = build_int_cst (TREE_TYPE (val), 0);
368 return val;
373 else if (TREE_CODE (rhs) == ADDR_EXPR)
375 tree ref = TREE_OPERAND (rhs, 0);
376 tree tem = maybe_fold_reference (ref, true);
377 if (tem
378 && TREE_CODE (tem) == MEM_REF
379 && integer_zerop (TREE_OPERAND (tem, 1)))
380 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
381 else if (tem)
382 result = fold_convert (TREE_TYPE (rhs),
383 build_fold_addr_expr_loc (loc, tem));
384 else if (TREE_CODE (ref) == MEM_REF
385 && integer_zerop (TREE_OPERAND (ref, 1)))
386 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
388 if (result)
390 /* Strip away useless type conversions. Both the
391 NON_LVALUE_EXPR that may have been added by fold, and
392 "useless" type conversions that might now be apparent
393 due to propagation. */
394 STRIP_USELESS_TYPE_CONVERSION (result);
396 if (result != rhs && valid_gimple_rhs_p (result))
397 return result;
401 else if (TREE_CODE (rhs) == CONSTRUCTOR
402 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
404 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
405 unsigned i;
406 tree val;
408 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
409 if (! CONSTANT_CLASS_P (val))
410 return NULL_TREE;
412 return build_vector_from_ctor (TREE_TYPE (rhs),
413 CONSTRUCTOR_ELTS (rhs));
416 else if (DECL_P (rhs))
417 return get_symbol_constant_value (rhs);
419 break;
421 case GIMPLE_UNARY_RHS:
422 break;
424 case GIMPLE_BINARY_RHS:
425 break;
427 case GIMPLE_TERNARY_RHS:
428 result = fold_ternary_loc (loc, subcode,
429 TREE_TYPE (gimple_assign_lhs (stmt)),
430 gimple_assign_rhs1 (stmt),
431 gimple_assign_rhs2 (stmt),
432 gimple_assign_rhs3 (stmt));
434 if (result)
436 STRIP_USELESS_TYPE_CONVERSION (result);
437 if (valid_gimple_rhs_p (result))
438 return result;
440 break;
442 case GIMPLE_INVALID_RHS:
443 gcc_unreachable ();
446 return NULL_TREE;
450 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
451 adjusting the replacement stmts location and virtual operands.
452 If the statement has a lhs the last stmt in the sequence is expected
453 to assign to that lhs. */
455 static void
456 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
458 gimple *stmt = gsi_stmt (*si_p);
460 if (gimple_has_location (stmt))
461 annotate_all_with_location (stmts, gimple_location (stmt));
463 /* First iterate over the replacement statements backward, assigning
464 virtual operands to their defining statements. */
465 gimple *laststore = NULL;
466 for (gimple_stmt_iterator i = gsi_last (stmts);
467 !gsi_end_p (i); gsi_prev (&i))
469 gimple *new_stmt = gsi_stmt (i);
470 if ((gimple_assign_single_p (new_stmt)
471 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
472 || (is_gimple_call (new_stmt)
473 && (gimple_call_flags (new_stmt)
474 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
476 tree vdef;
477 if (!laststore)
478 vdef = gimple_vdef (stmt);
479 else
480 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
481 gimple_set_vdef (new_stmt, vdef);
482 if (vdef && TREE_CODE (vdef) == SSA_NAME)
483 SSA_NAME_DEF_STMT (vdef) = new_stmt;
484 laststore = new_stmt;
488 /* Second iterate over the statements forward, assigning virtual
489 operands to their uses. */
490 tree reaching_vuse = gimple_vuse (stmt);
491 for (gimple_stmt_iterator i = gsi_start (stmts);
492 !gsi_end_p (i); gsi_next (&i))
494 gimple *new_stmt = gsi_stmt (i);
495 /* If the new statement possibly has a VUSE, update it with exact SSA
496 name we know will reach this one. */
497 if (gimple_has_mem_ops (new_stmt))
498 gimple_set_vuse (new_stmt, reaching_vuse);
499 gimple_set_modified (new_stmt, true);
500 if (gimple_vdef (new_stmt))
501 reaching_vuse = gimple_vdef (new_stmt);
504 /* If the new sequence does not do a store release the virtual
505 definition of the original statement. */
506 if (reaching_vuse
507 && reaching_vuse == gimple_vuse (stmt))
509 tree vdef = gimple_vdef (stmt);
510 if (vdef
511 && TREE_CODE (vdef) == SSA_NAME)
513 unlink_stmt_vdef (stmt);
514 release_ssa_name (vdef);
518 /* Finally replace the original statement with the sequence. */
519 gsi_replace_with_seq (si_p, stmts, false);
522 /* Convert EXPR into a GIMPLE value suitable for substitution on the
523 RHS of an assignment. Insert the necessary statements before
524 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
525 is replaced. If the call is expected to produces a result, then it
526 is replaced by an assignment of the new RHS to the result variable.
527 If the result is to be ignored, then the call is replaced by a
528 GIMPLE_NOP. A proper VDEF chain is retained by making the first
529 VUSE and the last VDEF of the whole sequence be the same as the replaced
530 statement and using new SSA names for stores in between. */
532 void
533 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
535 tree lhs;
536 gimple *stmt, *new_stmt;
537 gimple_stmt_iterator i;
538 gimple_seq stmts = NULL;
540 stmt = gsi_stmt (*si_p);
542 gcc_assert (is_gimple_call (stmt));
544 push_gimplify_context (gimple_in_ssa_p (cfun));
546 lhs = gimple_call_lhs (stmt);
547 if (lhs == NULL_TREE)
549 gimplify_and_add (expr, &stmts);
550 /* We can end up with folding a memcpy of an empty class assignment
551 which gets optimized away by C++ gimplification. */
552 if (gimple_seq_empty_p (stmts))
554 pop_gimplify_context (NULL);
555 if (gimple_in_ssa_p (cfun))
557 unlink_stmt_vdef (stmt);
558 release_defs (stmt);
560 gsi_replace (si_p, gimple_build_nop (), false);
561 return;
564 else
566 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
567 new_stmt = gimple_build_assign (lhs, tmp);
568 i = gsi_last (stmts);
569 gsi_insert_after_without_update (&i, new_stmt,
570 GSI_CONTINUE_LINKING);
573 pop_gimplify_context (NULL);
575 gsi_replace_with_seq_vops (si_p, stmts);
579 /* Replace the call at *GSI with the gimple value VAL. */
581 void
582 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
584 gimple *stmt = gsi_stmt (*gsi);
585 tree lhs = gimple_call_lhs (stmt);
586 gimple *repl;
587 if (lhs)
589 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
590 val = fold_convert (TREE_TYPE (lhs), val);
591 repl = gimple_build_assign (lhs, val);
593 else
594 repl = gimple_build_nop ();
595 tree vdef = gimple_vdef (stmt);
596 if (vdef && TREE_CODE (vdef) == SSA_NAME)
598 unlink_stmt_vdef (stmt);
599 release_ssa_name (vdef);
601 gsi_replace (gsi, repl, false);
604 /* Replace the call at *GSI with the new call REPL and fold that
605 again. */
607 static void
608 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
610 gimple *stmt = gsi_stmt (*gsi);
611 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
612 gimple_set_location (repl, gimple_location (stmt));
613 if (gimple_vdef (stmt)
614 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
616 gimple_set_vdef (repl, gimple_vdef (stmt));
617 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
619 if (gimple_vuse (stmt))
620 gimple_set_vuse (repl, gimple_vuse (stmt));
621 gsi_replace (gsi, repl, false);
622 fold_stmt (gsi);
625 /* Return true if VAR is a VAR_DECL or a component thereof. */
627 static bool
628 var_decl_component_p (tree var)
630 tree inner = var;
631 while (handled_component_p (inner))
632 inner = TREE_OPERAND (inner, 0);
633 return SSA_VAR_P (inner);
636 /* If the SIZE argument representing the size of an object is in a range
637 of values of which exactly one is valid (and that is zero), return
638 true, otherwise false. */
640 static bool
641 size_must_be_zero_p (tree size)
643 if (integer_zerop (size))
644 return true;
646 if (TREE_CODE (size) != SSA_NAME)
647 return false;
649 wide_int min, max;
650 enum value_range_type rtype = get_range_info (size, &min, &max);
651 if (rtype != VR_ANTI_RANGE)
652 return false;
654 tree type = TREE_TYPE (size);
655 int prec = TYPE_PRECISION (type);
657 wide_int wone = wi::one (prec);
659 /* Compute the value of SSIZE_MAX, the largest positive value that
660 can be stored in ssize_t, the signed counterpart of size_t. */
661 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
663 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
666 /* Fold function call to builtin mem{{,p}cpy,move}. Return
667 false if no simplification can be made.
668 If ENDP is 0, return DEST (like memcpy).
669 If ENDP is 1, return DEST+LEN (like mempcpy).
670 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
671 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
672 (memmove). */
674 static bool
675 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
676 tree dest, tree src, int endp)
678 gimple *stmt = gsi_stmt (*gsi);
679 tree lhs = gimple_call_lhs (stmt);
680 tree len = gimple_call_arg (stmt, 2);
681 tree destvar, srcvar;
682 location_t loc = gimple_location (stmt);
684 /* If the LEN parameter is a constant zero or in range where
685 the only valid value is zero, return DEST. */
686 if (size_must_be_zero_p (len))
688 gimple *repl;
689 if (gimple_call_lhs (stmt))
690 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
691 else
692 repl = gimple_build_nop ();
693 tree vdef = gimple_vdef (stmt);
694 if (vdef && TREE_CODE (vdef) == SSA_NAME)
696 unlink_stmt_vdef (stmt);
697 release_ssa_name (vdef);
699 gsi_replace (gsi, repl, false);
700 return true;
703 /* If SRC and DEST are the same (and not volatile), return
704 DEST{,+LEN,+LEN-1}. */
705 if (operand_equal_p (src, dest, 0))
707 unlink_stmt_vdef (stmt);
708 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
709 release_ssa_name (gimple_vdef (stmt));
710 if (!lhs)
712 gsi_replace (gsi, gimple_build_nop (), false);
713 return true;
715 goto done;
717 else
719 tree srctype, desttype;
720 unsigned int src_align, dest_align;
721 tree off0;
723 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
724 pointers as wide integer) and also may result in huge function
725 size because of inlined bounds copy. Thus don't inline for
726 functions we want to instrument. */
727 if (flag_check_pointer_bounds
728 && chkp_instrumentable_p (cfun->decl)
729 /* Even if data may contain pointers we can inline if copy
730 less than a pointer size. */
731 && (!tree_fits_uhwi_p (len)
732 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
733 return false;
735 /* Build accesses at offset zero with a ref-all character type. */
736 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
737 ptr_mode, true), 0);
739 /* If we can perform the copy efficiently with first doing all loads
740 and then all stores inline it that way. Currently efficiently
741 means that we can load all the memory into a single integer
742 register which is what MOVE_MAX gives us. */
743 src_align = get_pointer_alignment (src);
744 dest_align = get_pointer_alignment (dest);
745 if (tree_fits_uhwi_p (len)
746 && compare_tree_int (len, MOVE_MAX) <= 0
747 /* ??? Don't transform copies from strings with known length this
748 confuses the tree-ssa-strlen.c. This doesn't handle
749 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
750 reason. */
751 && !c_strlen (src, 2))
753 unsigned ilen = tree_to_uhwi (len);
754 if (pow2p_hwi (ilen))
756 scalar_int_mode mode;
757 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
758 if (type
759 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
760 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
761 /* If the destination pointer is not aligned we must be able
762 to emit an unaligned store. */
763 && (dest_align >= GET_MODE_ALIGNMENT (mode)
764 || !targetm.slow_unaligned_access (mode, dest_align)
765 || (optab_handler (movmisalign_optab, mode)
766 != CODE_FOR_nothing)))
768 tree srctype = type;
769 tree desttype = type;
770 if (src_align < GET_MODE_ALIGNMENT (mode))
771 srctype = build_aligned_type (type, src_align);
772 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
773 tree tem = fold_const_aggregate_ref (srcmem);
774 if (tem)
775 srcmem = tem;
776 else if (src_align < GET_MODE_ALIGNMENT (mode)
777 && targetm.slow_unaligned_access (mode, src_align)
778 && (optab_handler (movmisalign_optab, mode)
779 == CODE_FOR_nothing))
780 srcmem = NULL_TREE;
781 if (srcmem)
783 gimple *new_stmt;
784 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
786 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
787 srcmem
788 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
789 new_stmt);
790 gimple_assign_set_lhs (new_stmt, srcmem);
791 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
792 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
794 if (dest_align < GET_MODE_ALIGNMENT (mode))
795 desttype = build_aligned_type (type, dest_align);
796 new_stmt
797 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
798 dest, off0),
799 srcmem);
800 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
801 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
802 if (gimple_vdef (new_stmt)
803 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
804 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
805 if (!lhs)
807 gsi_replace (gsi, new_stmt, false);
808 return true;
810 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
811 goto done;
817 if (endp == 3)
819 /* Both DEST and SRC must be pointer types.
820 ??? This is what old code did. Is the testing for pointer types
821 really mandatory?
823 If either SRC is readonly or length is 1, we can use memcpy. */
824 if (!dest_align || !src_align)
825 return false;
826 if (readonly_data_expr (src)
827 || (tree_fits_uhwi_p (len)
828 && (MIN (src_align, dest_align) / BITS_PER_UNIT
829 >= tree_to_uhwi (len))))
831 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
832 if (!fn)
833 return false;
834 gimple_call_set_fndecl (stmt, fn);
835 gimple_call_set_arg (stmt, 0, dest);
836 gimple_call_set_arg (stmt, 1, src);
837 fold_stmt (gsi);
838 return true;
841 /* If *src and *dest can't overlap, optimize into memcpy as well. */
842 if (TREE_CODE (src) == ADDR_EXPR
843 && TREE_CODE (dest) == ADDR_EXPR)
845 tree src_base, dest_base, fn;
846 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
847 HOST_WIDE_INT maxsize;
849 srcvar = TREE_OPERAND (src, 0);
850 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
851 if (src_base == NULL)
852 src_base = srcvar;
853 destvar = TREE_OPERAND (dest, 0);
854 dest_base = get_addr_base_and_unit_offset (destvar,
855 &dest_offset);
856 if (dest_base == NULL)
857 dest_base = destvar;
858 if (tree_fits_uhwi_p (len))
859 maxsize = tree_to_uhwi (len);
860 else
861 maxsize = -1;
862 if (SSA_VAR_P (src_base)
863 && SSA_VAR_P (dest_base))
865 if (operand_equal_p (src_base, dest_base, 0)
866 && ranges_overlap_p (src_offset, maxsize,
867 dest_offset, maxsize))
868 return false;
870 else if (TREE_CODE (src_base) == MEM_REF
871 && TREE_CODE (dest_base) == MEM_REF)
873 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
874 TREE_OPERAND (dest_base, 0), 0))
875 return false;
876 offset_int off = mem_ref_offset (src_base) + src_offset;
877 if (!wi::fits_shwi_p (off))
878 return false;
879 src_offset = off.to_shwi ();
881 off = mem_ref_offset (dest_base) + dest_offset;
882 if (!wi::fits_shwi_p (off))
883 return false;
884 dest_offset = off.to_shwi ();
885 if (ranges_overlap_p (src_offset, maxsize,
886 dest_offset, maxsize))
887 return false;
889 else
890 return false;
892 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
893 if (!fn)
894 return false;
895 gimple_call_set_fndecl (stmt, fn);
896 gimple_call_set_arg (stmt, 0, dest);
897 gimple_call_set_arg (stmt, 1, src);
898 fold_stmt (gsi);
899 return true;
902 /* If the destination and source do not alias optimize into
903 memcpy as well. */
904 if ((is_gimple_min_invariant (dest)
905 || TREE_CODE (dest) == SSA_NAME)
906 && (is_gimple_min_invariant (src)
907 || TREE_CODE (src) == SSA_NAME))
909 ao_ref destr, srcr;
910 ao_ref_init_from_ptr_and_size (&destr, dest, len);
911 ao_ref_init_from_ptr_and_size (&srcr, src, len);
912 if (!refs_may_alias_p_1 (&destr, &srcr, false))
914 tree fn;
915 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
916 if (!fn)
917 return false;
918 gimple_call_set_fndecl (stmt, fn);
919 gimple_call_set_arg (stmt, 0, dest);
920 gimple_call_set_arg (stmt, 1, src);
921 fold_stmt (gsi);
922 return true;
926 return false;
929 if (!tree_fits_shwi_p (len))
930 return false;
931 if (!POINTER_TYPE_P (TREE_TYPE (src))
932 || !POINTER_TYPE_P (TREE_TYPE (dest)))
933 return false;
934 /* In the following try to find a type that is most natural to be
935 used for the memcpy source and destination and that allows
936 the most optimization when memcpy is turned into a plain assignment
937 using that type. In theory we could always use a char[len] type
938 but that only gains us that the destination and source possibly
939 no longer will have their address taken. */
940 srctype = TREE_TYPE (TREE_TYPE (src));
941 if (TREE_CODE (srctype) == ARRAY_TYPE
942 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
943 srctype = TREE_TYPE (srctype);
944 desttype = TREE_TYPE (TREE_TYPE (dest));
945 if (TREE_CODE (desttype) == ARRAY_TYPE
946 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
947 desttype = TREE_TYPE (desttype);
948 if (TREE_ADDRESSABLE (srctype)
949 || TREE_ADDRESSABLE (desttype))
950 return false;
952 /* Make sure we are not copying using a floating-point mode or
953 a type whose size possibly does not match its precision. */
954 if (FLOAT_MODE_P (TYPE_MODE (desttype))
955 || TREE_CODE (desttype) == BOOLEAN_TYPE
956 || TREE_CODE (desttype) == ENUMERAL_TYPE)
957 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
958 if (FLOAT_MODE_P (TYPE_MODE (srctype))
959 || TREE_CODE (srctype) == BOOLEAN_TYPE
960 || TREE_CODE (srctype) == ENUMERAL_TYPE)
961 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
962 if (!srctype)
963 srctype = desttype;
964 if (!desttype)
965 desttype = srctype;
966 if (!srctype)
967 return false;
969 src_align = get_pointer_alignment (src);
970 dest_align = get_pointer_alignment (dest);
971 if (dest_align < TYPE_ALIGN (desttype)
972 || src_align < TYPE_ALIGN (srctype))
973 return false;
975 destvar = NULL_TREE;
976 if (TREE_CODE (dest) == ADDR_EXPR
977 && var_decl_component_p (TREE_OPERAND (dest, 0))
978 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
979 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
981 srcvar = NULL_TREE;
982 if (TREE_CODE (src) == ADDR_EXPR
983 && var_decl_component_p (TREE_OPERAND (src, 0))
984 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
986 if (!destvar
987 || src_align >= TYPE_ALIGN (desttype))
988 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
989 src, off0);
990 else if (!STRICT_ALIGNMENT)
992 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
993 src_align);
994 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
998 if (srcvar == NULL_TREE && destvar == NULL_TREE)
999 return false;
1001 if (srcvar == NULL_TREE)
1003 if (src_align >= TYPE_ALIGN (desttype))
1004 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1005 else
1007 if (STRICT_ALIGNMENT)
1008 return false;
1009 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1010 src_align);
1011 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1014 else if (destvar == NULL_TREE)
1016 if (dest_align >= TYPE_ALIGN (srctype))
1017 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1018 else
1020 if (STRICT_ALIGNMENT)
1021 return false;
1022 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1023 dest_align);
1024 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1028 gimple *new_stmt;
1029 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1031 tree tem = fold_const_aggregate_ref (srcvar);
1032 if (tem)
1033 srcvar = tem;
1034 if (! is_gimple_min_invariant (srcvar))
1036 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1037 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1038 new_stmt);
1039 gimple_assign_set_lhs (new_stmt, srcvar);
1040 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1041 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1043 new_stmt = gimple_build_assign (destvar, srcvar);
1044 goto set_vop_and_replace;
1047 /* We get an aggregate copy. Use an unsigned char[] type to
1048 perform the copying to preserve padding and to avoid any issues
1049 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1050 desttype = build_array_type_nelts (unsigned_char_type_node,
1051 tree_to_uhwi (len));
1052 srctype = desttype;
1053 if (src_align > TYPE_ALIGN (srctype))
1054 srctype = build_aligned_type (srctype, src_align);
1055 if (dest_align > TYPE_ALIGN (desttype))
1056 desttype = build_aligned_type (desttype, dest_align);
1057 new_stmt
1058 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1059 fold_build2 (MEM_REF, srctype, src, off0));
1060 set_vop_and_replace:
1061 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1062 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1063 if (gimple_vdef (new_stmt)
1064 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1065 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1066 if (!lhs)
1068 gsi_replace (gsi, new_stmt, false);
1069 return true;
1071 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1074 done:
1075 gimple_seq stmts = NULL;
1076 if (endp == 0 || endp == 3)
1077 len = NULL_TREE;
1078 else if (endp == 2)
1079 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1080 ssize_int (1));
1081 if (endp == 2 || endp == 1)
1083 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1084 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1085 TREE_TYPE (dest), dest, len);
1088 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1089 gimple *repl = gimple_build_assign (lhs, dest);
1090 gsi_replace (gsi, repl, false);
1091 return true;
1094 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1095 to built-in memcmp (a, b, len). */
1097 static bool
1098 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1100 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1102 if (!fn)
1103 return false;
1105 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1107 gimple *stmt = gsi_stmt (*gsi);
1108 tree a = gimple_call_arg (stmt, 0);
1109 tree b = gimple_call_arg (stmt, 1);
1110 tree len = gimple_call_arg (stmt, 2);
1112 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1113 replace_call_with_call_and_fold (gsi, repl);
1115 return true;
1118 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1119 to built-in memmove (dest, src, len). */
1121 static bool
1122 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1124 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1126 if (!fn)
1127 return false;
1129 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1130 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1131 len) into memmove (dest, src, len). */
1133 gimple *stmt = gsi_stmt (*gsi);
1134 tree src = gimple_call_arg (stmt, 0);
1135 tree dest = gimple_call_arg (stmt, 1);
1136 tree len = gimple_call_arg (stmt, 2);
1138 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1139 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1140 replace_call_with_call_and_fold (gsi, repl);
1142 return true;
1145 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1146 to built-in memset (dest, 0, len). */
1148 static bool
1149 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1151 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1153 if (!fn)
1154 return false;
1156 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1158 gimple *stmt = gsi_stmt (*gsi);
1159 tree dest = gimple_call_arg (stmt, 0);
1160 tree len = gimple_call_arg (stmt, 1);
1162 gimple_seq seq = NULL;
1163 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1164 gimple_seq_add_stmt_without_update (&seq, repl);
1165 gsi_replace_with_seq_vops (gsi, seq);
1166 fold_stmt (gsi);
1168 return true;
1171 /* Fold function call to builtin memset or bzero at *GSI setting the
1172 memory of size LEN to VAL. Return whether a simplification was made. */
1174 static bool
1175 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1177 gimple *stmt = gsi_stmt (*gsi);
1178 tree etype;
1179 unsigned HOST_WIDE_INT length, cval;
1181 /* If the LEN parameter is zero, return DEST. */
1182 if (integer_zerop (len))
1184 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1185 return true;
1188 if (! tree_fits_uhwi_p (len))
1189 return false;
1191 if (TREE_CODE (c) != INTEGER_CST)
1192 return false;
1194 tree dest = gimple_call_arg (stmt, 0);
1195 tree var = dest;
1196 if (TREE_CODE (var) != ADDR_EXPR)
1197 return false;
1199 var = TREE_OPERAND (var, 0);
1200 if (TREE_THIS_VOLATILE (var))
1201 return false;
1203 etype = TREE_TYPE (var);
1204 if (TREE_CODE (etype) == ARRAY_TYPE)
1205 etype = TREE_TYPE (etype);
1207 if (!INTEGRAL_TYPE_P (etype)
1208 && !POINTER_TYPE_P (etype))
1209 return NULL_TREE;
1211 if (! var_decl_component_p (var))
1212 return NULL_TREE;
1214 length = tree_to_uhwi (len);
1215 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1216 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1217 return NULL_TREE;
1219 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1220 return NULL_TREE;
1222 if (integer_zerop (c))
1223 cval = 0;
1224 else
1226 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1227 return NULL_TREE;
1229 cval = TREE_INT_CST_LOW (c);
1230 cval &= 0xff;
1231 cval |= cval << 8;
1232 cval |= cval << 16;
1233 cval |= (cval << 31) << 1;
1236 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1237 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1238 gimple_set_vuse (store, gimple_vuse (stmt));
1239 tree vdef = gimple_vdef (stmt);
1240 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1242 gimple_set_vdef (store, gimple_vdef (stmt));
1243 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1245 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1246 if (gimple_call_lhs (stmt))
1248 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1249 gsi_replace (gsi, asgn, false);
1251 else
1253 gimple_stmt_iterator gsi2 = *gsi;
1254 gsi_prev (gsi);
1255 gsi_remove (&gsi2, true);
1258 return true;
1262 /* Obtain the minimum and maximum string length or minimum and maximum
1263 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1264 If ARG is an SSA name variable, follow its use-def chains. When
1265 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1266 if we are unable to determine the length or value, return False.
1267 VISITED is a bitmap of visited variables.
1268 TYPE is 0 if string length should be obtained, 1 for maximum string
1269 length and 2 for maximum value ARG can have.
1270 When FUZZY is set and the length of a string cannot be determined,
1271 the function instead considers as the maximum possible length the
1272 size of a character array it may refer to.
1273 Set *FLEXP to true if the range of the string lengths has been
1274 obtained from the upper bound of an array at the end of a struct.
1275 Such an array may hold a string that's longer than its upper bound
1276 due to it being used as a poor-man's flexible array member. */
1278 static bool
1279 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1280 bool fuzzy, bool *flexp)
1282 tree var, val;
1283 gimple *def_stmt;
1285 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1286 but MINLEN may be cleared during the execution of the function. */
1287 tree *minlen = length;
1288 tree *const maxlen = length + 1;
1290 if (TREE_CODE (arg) != SSA_NAME)
1292 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1293 if (TREE_CODE (arg) == ADDR_EXPR
1294 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1295 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1297 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1298 if (TREE_CODE (aop0) == INDIRECT_REF
1299 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1300 return get_range_strlen (TREE_OPERAND (aop0, 0),
1301 length, visited, type, fuzzy, flexp);
1304 if (type == 2)
1306 val = arg;
1307 if (TREE_CODE (val) != INTEGER_CST
1308 || tree_int_cst_sgn (val) < 0)
1309 return false;
1311 else
1312 val = c_strlen (arg, 1);
1314 if (!val && fuzzy)
1316 if (TREE_CODE (arg) == ADDR_EXPR)
1317 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1318 visited, type, fuzzy, flexp);
1320 if (TREE_CODE (arg) == COMPONENT_REF
1321 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE)
1323 /* Use the type of the member array to determine the upper
1324 bound on the length of the array. This may be overly
1325 optimistic if the array itself isn't NUL-terminated and
1326 the caller relies on the subsequent member to contain
1327 the NUL.
1328 Set *FLEXP to true if the array whose bound is being
1329 used is at the end of a struct. */
1330 if (array_at_struct_end_p (arg))
1331 *flexp = true;
1333 arg = TREE_OPERAND (arg, 1);
1334 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1335 if (!val || integer_zerop (val))
1336 return false;
1337 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1338 integer_one_node);
1339 /* Set the minimum size to zero since the string in
1340 the array could have zero length. */
1341 *minlen = ssize_int (0);
1344 if (VAR_P (arg)
1345 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE)
1347 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1348 if (!val || TREE_CODE (val) != INTEGER_CST || integer_zerop (val))
1349 return false;
1350 val = wide_int_to_tree (TREE_TYPE (val),
1351 wi::sub(wi::to_wide (val), 1));
1352 /* Set the minimum size to zero since the string in
1353 the array could have zero length. */
1354 *minlen = ssize_int (0);
1358 if (!val)
1359 return false;
1361 if (minlen
1362 && (!*minlen
1363 || (type > 0
1364 && TREE_CODE (*minlen) == INTEGER_CST
1365 && TREE_CODE (val) == INTEGER_CST
1366 && tree_int_cst_lt (val, *minlen))))
1367 *minlen = val;
1369 if (*maxlen)
1371 if (type > 0)
1373 if (TREE_CODE (*maxlen) != INTEGER_CST
1374 || TREE_CODE (val) != INTEGER_CST)
1375 return false;
1377 if (tree_int_cst_lt (*maxlen, val))
1378 *maxlen = val;
1379 return true;
1381 else if (simple_cst_equal (val, *maxlen) != 1)
1382 return false;
1385 *maxlen = val;
1386 return true;
1389 /* If ARG is registered for SSA update we cannot look at its defining
1390 statement. */
1391 if (name_registered_for_update_p (arg))
1392 return false;
1394 /* If we were already here, break the infinite cycle. */
1395 if (!*visited)
1396 *visited = BITMAP_ALLOC (NULL);
1397 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1398 return true;
1400 var = arg;
1401 def_stmt = SSA_NAME_DEF_STMT (var);
1403 switch (gimple_code (def_stmt))
1405 case GIMPLE_ASSIGN:
1406 /* The RHS of the statement defining VAR must either have a
1407 constant length or come from another SSA_NAME with a constant
1408 length. */
1409 if (gimple_assign_single_p (def_stmt)
1410 || gimple_assign_unary_nop_p (def_stmt))
1412 tree rhs = gimple_assign_rhs1 (def_stmt);
1413 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
1415 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1417 tree op2 = gimple_assign_rhs2 (def_stmt);
1418 tree op3 = gimple_assign_rhs3 (def_stmt);
1419 return get_range_strlen (op2, length, visited, type, fuzzy, flexp)
1420 && get_range_strlen (op3, length, visited, type, fuzzy, flexp);
1422 return false;
1424 case GIMPLE_PHI:
1426 /* All the arguments of the PHI node must have the same constant
1427 length. */
1428 unsigned i;
1430 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1432 tree arg = gimple_phi_arg (def_stmt, i)->def;
1434 /* If this PHI has itself as an argument, we cannot
1435 determine the string length of this argument. However,
1436 if we can find a constant string length for the other
1437 PHI args then we can still be sure that this is a
1438 constant string length. So be optimistic and just
1439 continue with the next argument. */
1440 if (arg == gimple_phi_result (def_stmt))
1441 continue;
1443 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
1445 if (fuzzy)
1446 *maxlen = build_all_ones_cst (size_type_node);
1447 else
1448 return false;
1452 return true;
1454 default:
1455 return false;
1459 /* Determine the minimum and maximum value or string length that ARG
1460 refers to and store each in the first two elements of MINMAXLEN.
1461 For expressions that point to strings of unknown lengths that are
1462 character arrays, use the upper bound of the array as the maximum
1463 length. For example, given an expression like 'x ? array : "xyz"'
1464 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1465 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1466 stored in array.
1467 Return true if the range of the string lengths has been obtained
1468 from the upper bound of an array at the end of a struct. Such
1469 an array may hold a string that's longer than its upper bound
1470 due to it being used as a poor-man's flexible array member. */
1472 bool
1473 get_range_strlen (tree arg, tree minmaxlen[2])
1475 bitmap visited = NULL;
1477 minmaxlen[0] = NULL_TREE;
1478 minmaxlen[1] = NULL_TREE;
1480 bool flexarray = false;
1481 get_range_strlen (arg, minmaxlen, &visited, 1, true, &flexarray);
1483 if (visited)
1484 BITMAP_FREE (visited);
1486 return flexarray;
1489 tree
1490 get_maxval_strlen (tree arg, int type)
1492 bitmap visited = NULL;
1493 tree len[2] = { NULL_TREE, NULL_TREE };
1495 bool dummy;
1496 if (!get_range_strlen (arg, len, &visited, type, false, &dummy))
1497 len[1] = NULL_TREE;
1498 if (visited)
1499 BITMAP_FREE (visited);
1501 return len[1];
1505 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1506 If LEN is not NULL, it represents the length of the string to be
1507 copied. Return NULL_TREE if no simplification can be made. */
1509 static bool
1510 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1511 tree dest, tree src)
1513 location_t loc = gimple_location (gsi_stmt (*gsi));
1514 tree fn;
1516 /* If SRC and DEST are the same (and not volatile), return DEST. */
1517 if (operand_equal_p (src, dest, 0))
1519 replace_call_with_value (gsi, dest);
1520 return true;
1523 if (optimize_function_for_size_p (cfun))
1524 return false;
1526 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1527 if (!fn)
1528 return false;
1530 tree len = get_maxval_strlen (src, 0);
1531 if (!len)
1532 return false;
1534 len = fold_convert_loc (loc, size_type_node, len);
1535 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1536 len = force_gimple_operand_gsi (gsi, len, true,
1537 NULL_TREE, true, GSI_SAME_STMT);
1538 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1539 replace_call_with_call_and_fold (gsi, repl);
1540 return true;
1543 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1544 If SLEN is not NULL, it represents the length of the source string.
1545 Return NULL_TREE if no simplification can be made. */
1547 static bool
1548 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1549 tree dest, tree src, tree len)
1551 gimple *stmt = gsi_stmt (*gsi);
1552 location_t loc = gimple_location (stmt);
1553 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1555 /* If the LEN parameter is zero, return DEST. */
1556 if (integer_zerop (len))
1558 /* Avoid warning if the destination refers to a an array/pointer
1559 decorate with attribute nonstring. */
1560 if (!nonstring)
1562 tree fndecl = gimple_call_fndecl (stmt);
1563 gcall *call = as_a <gcall *> (stmt);
1565 /* Warn about the lack of nul termination: the result is not
1566 a (nul-terminated) string. */
1567 tree slen = get_maxval_strlen (src, 0);
1568 if (slen && !integer_zerop (slen))
1569 warning_at (loc, OPT_Wstringop_truncation,
1570 "%G%qD destination unchanged after copying no bytes "
1571 "from a string of length %E",
1572 call, fndecl, slen);
1573 else
1574 warning_at (loc, OPT_Wstringop_truncation,
1575 "%G%qD destination unchanged after copying no bytes",
1576 call, fndecl);
1579 replace_call_with_value (gsi, dest);
1580 return true;
1583 /* We can't compare slen with len as constants below if len is not a
1584 constant. */
1585 if (TREE_CODE (len) != INTEGER_CST)
1586 return false;
1588 /* Now, we must be passed a constant src ptr parameter. */
1589 tree slen = get_maxval_strlen (src, 0);
1590 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1591 return false;
1593 /* The size of the source string including the terminating nul. */
1594 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1596 /* We do not support simplification of this case, though we do
1597 support it when expanding trees into RTL. */
1598 /* FIXME: generate a call to __builtin_memset. */
1599 if (tree_int_cst_lt (ssize, len))
1600 return false;
1602 if (!nonstring)
1604 if (tree_int_cst_lt (len, slen))
1606 tree fndecl = gimple_call_fndecl (stmt);
1607 gcall *call = as_a <gcall *> (stmt);
1609 warning_at (loc, OPT_Wstringop_truncation,
1610 (tree_int_cst_equal (size_one_node, len)
1611 ? G_("%G%qD output truncated copying %E byte "
1612 "from a string of length %E")
1613 : G_("%G%qD output truncated copying %E bytes "
1614 "from a string of length %E")),
1615 call, fndecl, len, slen);
1617 else if (tree_int_cst_equal (len, slen))
1619 tree fndecl = gimple_call_fndecl (stmt);
1620 gcall *call = as_a <gcall *> (stmt);
1622 warning_at (loc, OPT_Wstringop_truncation,
1623 (tree_int_cst_equal (size_one_node, len)
1624 ? G_("%G%qD output truncated before terminating nul "
1625 "copying %E byte from a string of the same "
1626 "length")
1627 : G_("%G%qD output truncated before terminating nul "
1628 "copying %E bytes from a string of the same "
1629 "length")),
1630 call, fndecl, len);
1634 /* OK transform into builtin memcpy. */
1635 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1636 if (!fn)
1637 return false;
1639 len = fold_convert_loc (loc, size_type_node, len);
1640 len = force_gimple_operand_gsi (gsi, len, true,
1641 NULL_TREE, true, GSI_SAME_STMT);
1642 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1643 replace_call_with_call_and_fold (gsi, repl);
1645 return true;
1648 /* Fold function call to builtin strchr or strrchr.
1649 If both arguments are constant, evaluate and fold the result,
1650 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1651 In general strlen is significantly faster than strchr
1652 due to being a simpler operation. */
1653 static bool
1654 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1656 gimple *stmt = gsi_stmt (*gsi);
1657 tree str = gimple_call_arg (stmt, 0);
1658 tree c = gimple_call_arg (stmt, 1);
1659 location_t loc = gimple_location (stmt);
1660 const char *p;
1661 char ch;
1663 if (!gimple_call_lhs (stmt))
1664 return false;
1666 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1668 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1670 if (p1 == NULL)
1672 replace_call_with_value (gsi, integer_zero_node);
1673 return true;
1676 tree len = build_int_cst (size_type_node, p1 - p);
1677 gimple_seq stmts = NULL;
1678 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1679 POINTER_PLUS_EXPR, str, len);
1680 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1681 gsi_replace_with_seq_vops (gsi, stmts);
1682 return true;
1685 if (!integer_zerop (c))
1686 return false;
1688 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1689 if (is_strrchr && optimize_function_for_size_p (cfun))
1691 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1693 if (strchr_fn)
1695 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1696 replace_call_with_call_and_fold (gsi, repl);
1697 return true;
1700 return false;
1703 tree len;
1704 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1706 if (!strlen_fn)
1707 return false;
1709 /* Create newstr = strlen (str). */
1710 gimple_seq stmts = NULL;
1711 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1712 gimple_set_location (new_stmt, loc);
1713 len = create_tmp_reg_or_ssa_name (size_type_node);
1714 gimple_call_set_lhs (new_stmt, len);
1715 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1717 /* Create (str p+ strlen (str)). */
1718 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1719 POINTER_PLUS_EXPR, str, len);
1720 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1721 gsi_replace_with_seq_vops (gsi, stmts);
1722 /* gsi now points at the assignment to the lhs, get a
1723 stmt iterator to the strlen.
1724 ??? We can't use gsi_for_stmt as that doesn't work when the
1725 CFG isn't built yet. */
1726 gimple_stmt_iterator gsi2 = *gsi;
1727 gsi_prev (&gsi2);
1728 fold_stmt (&gsi2);
1729 return true;
1732 /* Fold function call to builtin strstr.
1733 If both arguments are constant, evaluate and fold the result,
1734 additionally fold strstr (x, "") into x and strstr (x, "c")
1735 into strchr (x, 'c'). */
1736 static bool
1737 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1739 gimple *stmt = gsi_stmt (*gsi);
1740 tree haystack = gimple_call_arg (stmt, 0);
1741 tree needle = gimple_call_arg (stmt, 1);
1742 const char *p, *q;
1744 if (!gimple_call_lhs (stmt))
1745 return false;
1747 q = c_getstr (needle);
1748 if (q == NULL)
1749 return false;
1751 if ((p = c_getstr (haystack)))
1753 const char *r = strstr (p, q);
1755 if (r == NULL)
1757 replace_call_with_value (gsi, integer_zero_node);
1758 return true;
1761 tree len = build_int_cst (size_type_node, r - p);
1762 gimple_seq stmts = NULL;
1763 gimple *new_stmt
1764 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1765 haystack, len);
1766 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1767 gsi_replace_with_seq_vops (gsi, stmts);
1768 return true;
1771 /* For strstr (x, "") return x. */
1772 if (q[0] == '\0')
1774 replace_call_with_value (gsi, haystack);
1775 return true;
1778 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1779 if (q[1] == '\0')
1781 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1782 if (strchr_fn)
1784 tree c = build_int_cst (integer_type_node, q[0]);
1785 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1786 replace_call_with_call_and_fold (gsi, repl);
1787 return true;
1791 return false;
1794 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1795 to the call.
1797 Return NULL_TREE if no simplification was possible, otherwise return the
1798 simplified form of the call as a tree.
1800 The simplified form may be a constant or other expression which
1801 computes the same value, but in a more efficient manner (including
1802 calls to other builtin functions).
1804 The call may contain arguments which need to be evaluated, but
1805 which are not useful to determine the result of the call. In
1806 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1807 COMPOUND_EXPR will be an argument which must be evaluated.
1808 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1809 COMPOUND_EXPR in the chain will contain the tree for the simplified
1810 form of the builtin function call. */
1812 static bool
1813 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1815 gimple *stmt = gsi_stmt (*gsi);
1816 location_t loc = gimple_location (stmt);
1818 const char *p = c_getstr (src);
1820 /* If the string length is zero, return the dst parameter. */
1821 if (p && *p == '\0')
1823 replace_call_with_value (gsi, dst);
1824 return true;
1827 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1828 return false;
1830 /* See if we can store by pieces into (dst + strlen(dst)). */
1831 tree newdst;
1832 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1833 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1835 if (!strlen_fn || !memcpy_fn)
1836 return false;
1838 /* If the length of the source string isn't computable don't
1839 split strcat into strlen and memcpy. */
1840 tree len = get_maxval_strlen (src, 0);
1841 if (! len)
1842 return false;
1844 /* Create strlen (dst). */
1845 gimple_seq stmts = NULL, stmts2;
1846 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1847 gimple_set_location (repl, loc);
1848 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1849 gimple_call_set_lhs (repl, newdst);
1850 gimple_seq_add_stmt_without_update (&stmts, repl);
1852 /* Create (dst p+ strlen (dst)). */
1853 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1854 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1855 gimple_seq_add_seq_without_update (&stmts, stmts2);
1857 len = fold_convert_loc (loc, size_type_node, len);
1858 len = size_binop_loc (loc, PLUS_EXPR, len,
1859 build_int_cst (size_type_node, 1));
1860 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1861 gimple_seq_add_seq_without_update (&stmts, stmts2);
1863 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1864 gimple_seq_add_stmt_without_update (&stmts, repl);
1865 if (gimple_call_lhs (stmt))
1867 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1868 gimple_seq_add_stmt_without_update (&stmts, repl);
1869 gsi_replace_with_seq_vops (gsi, stmts);
1870 /* gsi now points at the assignment to the lhs, get a
1871 stmt iterator to the memcpy call.
1872 ??? We can't use gsi_for_stmt as that doesn't work when the
1873 CFG isn't built yet. */
1874 gimple_stmt_iterator gsi2 = *gsi;
1875 gsi_prev (&gsi2);
1876 fold_stmt (&gsi2);
1878 else
1880 gsi_replace_with_seq_vops (gsi, stmts);
1881 fold_stmt (gsi);
1883 return true;
1886 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1887 are the arguments to the call. */
1889 static bool
1890 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1892 gimple *stmt = gsi_stmt (*gsi);
1893 tree dest = gimple_call_arg (stmt, 0);
1894 tree src = gimple_call_arg (stmt, 1);
1895 tree size = gimple_call_arg (stmt, 2);
1896 tree fn;
1897 const char *p;
1900 p = c_getstr (src);
1901 /* If the SRC parameter is "", return DEST. */
1902 if (p && *p == '\0')
1904 replace_call_with_value (gsi, dest);
1905 return true;
1908 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1909 return false;
1911 /* If __builtin_strcat_chk is used, assume strcat is available. */
1912 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1913 if (!fn)
1914 return false;
1916 gimple *repl = gimple_build_call (fn, 2, dest, src);
1917 replace_call_with_call_and_fold (gsi, repl);
1918 return true;
1921 /* Simplify a call to the strncat builtin. */
1923 static bool
1924 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1926 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1927 tree dst = gimple_call_arg (stmt, 0);
1928 tree src = gimple_call_arg (stmt, 1);
1929 tree len = gimple_call_arg (stmt, 2);
1931 const char *p = c_getstr (src);
1933 /* If the requested length is zero, or the src parameter string
1934 length is zero, return the dst parameter. */
1935 if (integer_zerop (len) || (p && *p == '\0'))
1937 replace_call_with_value (gsi, dst);
1938 return true;
1941 if (TREE_CODE (len) != INTEGER_CST || !p)
1942 return false;
1944 unsigned srclen = strlen (p);
1946 int cmpsrc = compare_tree_int (len, srclen);
1948 /* Return early if the requested len is less than the string length.
1949 Warnings will be issued elsewhere later. */
1950 if (cmpsrc < 0)
1951 return false;
1953 unsigned HOST_WIDE_INT dstsize;
1955 bool nowarn = gimple_no_warning_p (stmt);
1957 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
1959 int cmpdst = compare_tree_int (len, dstsize);
1961 if (cmpdst >= 0)
1963 tree fndecl = gimple_call_fndecl (stmt);
1965 /* Strncat copies (at most) LEN bytes and always appends
1966 the terminating NUL so the specified bound should never
1967 be equal to (or greater than) the size of the destination.
1968 If it is, the copy could overflow. */
1969 location_t loc = gimple_location (stmt);
1970 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
1971 cmpdst == 0
1972 ? G_("%G%qD specified bound %E equals "
1973 "destination size")
1974 : G_("%G%qD specified bound %E exceeds "
1975 "destination size %wu"),
1976 stmt, fndecl, len, dstsize);
1977 if (nowarn)
1978 gimple_set_no_warning (stmt, true);
1982 if (!nowarn && cmpsrc == 0)
1984 tree fndecl = gimple_call_fndecl (stmt);
1986 /* To avoid certain truncation the specified bound should also
1987 not be equal to (or less than) the length of the source. */
1988 location_t loc = gimple_location (stmt);
1989 if (warning_at (loc, OPT_Wstringop_overflow_,
1990 "%G%qD specified bound %E equals source length",
1991 stmt, fndecl, len))
1992 gimple_set_no_warning (stmt, true);
1995 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1997 /* If the replacement _DECL isn't initialized, don't do the
1998 transformation. */
1999 if (!fn)
2000 return false;
2002 /* Otherwise, emit a call to strcat. */
2003 gcall *repl = gimple_build_call (fn, 2, dst, src);
2004 replace_call_with_call_and_fold (gsi, repl);
2005 return true;
2008 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2009 LEN, and SIZE. */
2011 static bool
2012 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2014 gimple *stmt = gsi_stmt (*gsi);
2015 tree dest = gimple_call_arg (stmt, 0);
2016 tree src = gimple_call_arg (stmt, 1);
2017 tree len = gimple_call_arg (stmt, 2);
2018 tree size = gimple_call_arg (stmt, 3);
2019 tree fn;
2020 const char *p;
2022 p = c_getstr (src);
2023 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2024 if ((p && *p == '\0')
2025 || integer_zerop (len))
2027 replace_call_with_value (gsi, dest);
2028 return true;
2031 if (! tree_fits_uhwi_p (size))
2032 return false;
2034 if (! integer_all_onesp (size))
2036 tree src_len = c_strlen (src, 1);
2037 if (src_len
2038 && tree_fits_uhwi_p (src_len)
2039 && tree_fits_uhwi_p (len)
2040 && ! tree_int_cst_lt (len, src_len))
2042 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2043 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2044 if (!fn)
2045 return false;
2047 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2048 replace_call_with_call_and_fold (gsi, repl);
2049 return true;
2051 return false;
2054 /* If __builtin_strncat_chk is used, assume strncat is available. */
2055 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2056 if (!fn)
2057 return false;
2059 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2060 replace_call_with_call_and_fold (gsi, repl);
2061 return true;
2064 /* Build and append gimple statements to STMTS that would load a first
2065 character of a memory location identified by STR. LOC is location
2066 of the statement. */
2068 static tree
2069 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2071 tree var;
2073 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2074 tree cst_uchar_ptr_node
2075 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2076 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2078 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2079 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2080 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2082 gimple_assign_set_lhs (stmt, var);
2083 gimple_seq_add_stmt_without_update (stmts, stmt);
2085 return var;
2088 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2089 FCODE is the name of the builtin. */
2091 static bool
2092 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2094 gimple *stmt = gsi_stmt (*gsi);
2095 tree callee = gimple_call_fndecl (stmt);
2096 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2098 tree type = integer_type_node;
2099 tree str1 = gimple_call_arg (stmt, 0);
2100 tree str2 = gimple_call_arg (stmt, 1);
2101 tree lhs = gimple_call_lhs (stmt);
2102 HOST_WIDE_INT length = -1;
2104 /* Handle strncmp and strncasecmp functions. */
2105 if (gimple_call_num_args (stmt) == 3)
2107 tree len = gimple_call_arg (stmt, 2);
2108 if (tree_fits_uhwi_p (len))
2109 length = tree_to_uhwi (len);
2112 /* If the LEN parameter is zero, return zero. */
2113 if (length == 0)
2115 replace_call_with_value (gsi, integer_zero_node);
2116 return true;
2119 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2120 if (operand_equal_p (str1, str2, 0))
2122 replace_call_with_value (gsi, integer_zero_node);
2123 return true;
2126 const char *p1 = c_getstr (str1);
2127 const char *p2 = c_getstr (str2);
2129 /* For known strings, return an immediate value. */
2130 if (p1 && p2)
2132 int r = 0;
2133 bool known_result = false;
2135 switch (fcode)
2137 case BUILT_IN_STRCMP:
2139 r = strcmp (p1, p2);
2140 known_result = true;
2141 break;
2143 case BUILT_IN_STRNCMP:
2145 if (length == -1)
2146 break;
2147 r = strncmp (p1, p2, length);
2148 known_result = true;
2149 break;
2151 /* Only handleable situation is where the string are equal (result 0),
2152 which is already handled by operand_equal_p case. */
2153 case BUILT_IN_STRCASECMP:
2154 break;
2155 case BUILT_IN_STRNCASECMP:
2157 if (length == -1)
2158 break;
2159 r = strncmp (p1, p2, length);
2160 if (r == 0)
2161 known_result = true;
2162 break;
2164 default:
2165 gcc_unreachable ();
2168 if (known_result)
2170 replace_call_with_value (gsi, build_cmp_result (type, r));
2171 return true;
2175 bool nonzero_length = length >= 1
2176 || fcode == BUILT_IN_STRCMP
2177 || fcode == BUILT_IN_STRCASECMP;
2179 location_t loc = gimple_location (stmt);
2181 /* If the second arg is "", return *(const unsigned char*)arg1. */
2182 if (p2 && *p2 == '\0' && nonzero_length)
2184 gimple_seq stmts = NULL;
2185 tree var = gimple_load_first_char (loc, str1, &stmts);
2186 if (lhs)
2188 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2189 gimple_seq_add_stmt_without_update (&stmts, stmt);
2192 gsi_replace_with_seq_vops (gsi, stmts);
2193 return true;
2196 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2197 if (p1 && *p1 == '\0' && nonzero_length)
2199 gimple_seq stmts = NULL;
2200 tree var = gimple_load_first_char (loc, str2, &stmts);
2202 if (lhs)
2204 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2205 stmt = gimple_build_assign (c, NOP_EXPR, var);
2206 gimple_seq_add_stmt_without_update (&stmts, stmt);
2208 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2209 gimple_seq_add_stmt_without_update (&stmts, stmt);
2212 gsi_replace_with_seq_vops (gsi, stmts);
2213 return true;
2216 /* If len parameter is one, return an expression corresponding to
2217 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2218 if (fcode == BUILT_IN_STRNCMP && length == 1)
2220 gimple_seq stmts = NULL;
2221 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2222 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2224 if (lhs)
2226 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2227 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2228 gimple_seq_add_stmt_without_update (&stmts, convert1);
2230 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2231 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2232 gimple_seq_add_stmt_without_update (&stmts, convert2);
2234 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2235 gimple_seq_add_stmt_without_update (&stmts, stmt);
2238 gsi_replace_with_seq_vops (gsi, stmts);
2239 return true;
2242 /* If length is larger than the length of one constant string,
2243 replace strncmp with corresponding strcmp */
2244 if (fcode == BUILT_IN_STRNCMP
2245 && length > 0
2246 && ((p2 && (size_t) length > strlen (p2))
2247 || (p1 && (size_t) length > strlen (p1))))
2249 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2250 if (!fn)
2251 return false;
2252 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2253 replace_call_with_call_and_fold (gsi, repl);
2254 return true;
2257 return false;
2260 /* Fold a call to the memchr pointed by GSI iterator. */
2262 static bool
2263 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2265 gimple *stmt = gsi_stmt (*gsi);
2266 tree lhs = gimple_call_lhs (stmt);
2267 tree arg1 = gimple_call_arg (stmt, 0);
2268 tree arg2 = gimple_call_arg (stmt, 1);
2269 tree len = gimple_call_arg (stmt, 2);
2271 /* If the LEN parameter is zero, return zero. */
2272 if (integer_zerop (len))
2274 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2275 return true;
2278 char c;
2279 if (TREE_CODE (arg2) != INTEGER_CST
2280 || !tree_fits_uhwi_p (len)
2281 || !target_char_cst_p (arg2, &c))
2282 return false;
2284 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2285 unsigned HOST_WIDE_INT string_length;
2286 const char *p1 = c_getstr (arg1, &string_length);
2288 if (p1)
2290 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2291 if (r == NULL)
2293 if (length <= string_length)
2295 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2296 return true;
2299 else
2301 unsigned HOST_WIDE_INT offset = r - p1;
2302 gimple_seq stmts = NULL;
2303 if (lhs != NULL_TREE)
2305 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2306 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2307 arg1, offset_cst);
2308 gimple_seq_add_stmt_without_update (&stmts, stmt);
2310 else
2311 gimple_seq_add_stmt_without_update (&stmts,
2312 gimple_build_nop ());
2314 gsi_replace_with_seq_vops (gsi, stmts);
2315 return true;
2319 return false;
2322 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2323 to the call. IGNORE is true if the value returned
2324 by the builtin will be ignored. UNLOCKED is true is true if this
2325 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2326 the known length of the string. Return NULL_TREE if no simplification
2327 was possible. */
2329 static bool
2330 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2331 tree arg0, tree arg1,
2332 bool unlocked)
2334 gimple *stmt = gsi_stmt (*gsi);
2336 /* If we're using an unlocked function, assume the other unlocked
2337 functions exist explicitly. */
2338 tree const fn_fputc = (unlocked
2339 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2340 : builtin_decl_implicit (BUILT_IN_FPUTC));
2341 tree const fn_fwrite = (unlocked
2342 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2343 : builtin_decl_implicit (BUILT_IN_FWRITE));
2345 /* If the return value is used, don't do the transformation. */
2346 if (gimple_call_lhs (stmt))
2347 return false;
2349 /* Get the length of the string passed to fputs. If the length
2350 can't be determined, punt. */
2351 tree len = get_maxval_strlen (arg0, 0);
2352 if (!len
2353 || TREE_CODE (len) != INTEGER_CST)
2354 return false;
2356 switch (compare_tree_int (len, 1))
2358 case -1: /* length is 0, delete the call entirely . */
2359 replace_call_with_value (gsi, integer_zero_node);
2360 return true;
2362 case 0: /* length is 1, call fputc. */
2364 const char *p = c_getstr (arg0);
2365 if (p != NULL)
2367 if (!fn_fputc)
2368 return false;
2370 gimple *repl = gimple_build_call (fn_fputc, 2,
2371 build_int_cst
2372 (integer_type_node, p[0]), arg1);
2373 replace_call_with_call_and_fold (gsi, repl);
2374 return true;
2377 /* FALLTHROUGH */
2378 case 1: /* length is greater than 1, call fwrite. */
2380 /* If optimizing for size keep fputs. */
2381 if (optimize_function_for_size_p (cfun))
2382 return false;
2383 /* New argument list transforming fputs(string, stream) to
2384 fwrite(string, 1, len, stream). */
2385 if (!fn_fwrite)
2386 return false;
2388 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2389 size_one_node, len, arg1);
2390 replace_call_with_call_and_fold (gsi, repl);
2391 return true;
2393 default:
2394 gcc_unreachable ();
2396 return false;
2399 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2400 DEST, SRC, LEN, and SIZE are the arguments to the call.
2401 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2402 code of the builtin. If MAXLEN is not NULL, it is maximum length
2403 passed as third argument. */
2405 static bool
2406 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2407 tree dest, tree src, tree len, tree size,
2408 enum built_in_function fcode)
2410 gimple *stmt = gsi_stmt (*gsi);
2411 location_t loc = gimple_location (stmt);
2412 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2413 tree fn;
2415 /* If SRC and DEST are the same (and not volatile), return DEST
2416 (resp. DEST+LEN for __mempcpy_chk). */
2417 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2419 if (fcode != BUILT_IN_MEMPCPY_CHK)
2421 replace_call_with_value (gsi, dest);
2422 return true;
2424 else
2426 gimple_seq stmts = NULL;
2427 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2428 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2429 TREE_TYPE (dest), dest, len);
2430 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2431 replace_call_with_value (gsi, temp);
2432 return true;
2436 if (! tree_fits_uhwi_p (size))
2437 return false;
2439 tree maxlen = get_maxval_strlen (len, 2);
2440 if (! integer_all_onesp (size))
2442 if (! tree_fits_uhwi_p (len))
2444 /* If LEN is not constant, try MAXLEN too.
2445 For MAXLEN only allow optimizing into non-_ocs function
2446 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2447 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2449 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2451 /* (void) __mempcpy_chk () can be optimized into
2452 (void) __memcpy_chk (). */
2453 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2454 if (!fn)
2455 return false;
2457 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2458 replace_call_with_call_and_fold (gsi, repl);
2459 return true;
2461 return false;
2464 else
2465 maxlen = len;
2467 if (tree_int_cst_lt (size, maxlen))
2468 return false;
2471 fn = NULL_TREE;
2472 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2473 mem{cpy,pcpy,move,set} is available. */
2474 switch (fcode)
2476 case BUILT_IN_MEMCPY_CHK:
2477 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2478 break;
2479 case BUILT_IN_MEMPCPY_CHK:
2480 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2481 break;
2482 case BUILT_IN_MEMMOVE_CHK:
2483 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2484 break;
2485 case BUILT_IN_MEMSET_CHK:
2486 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2487 break;
2488 default:
2489 break;
2492 if (!fn)
2493 return false;
2495 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2496 replace_call_with_call_and_fold (gsi, repl);
2497 return true;
2500 /* Fold a call to the __st[rp]cpy_chk builtin.
2501 DEST, SRC, and SIZE are the arguments to the call.
2502 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2503 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2504 strings passed as second argument. */
2506 static bool
2507 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2508 tree dest,
2509 tree src, tree size,
2510 enum built_in_function fcode)
2512 gimple *stmt = gsi_stmt (*gsi);
2513 location_t loc = gimple_location (stmt);
2514 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2515 tree len, fn;
2517 /* If SRC and DEST are the same (and not volatile), return DEST. */
2518 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2520 replace_call_with_value (gsi, dest);
2521 return true;
2524 if (! tree_fits_uhwi_p (size))
2525 return false;
2527 tree maxlen = get_maxval_strlen (src, 1);
2528 if (! integer_all_onesp (size))
2530 len = c_strlen (src, 1);
2531 if (! len || ! tree_fits_uhwi_p (len))
2533 /* If LEN is not constant, try MAXLEN too.
2534 For MAXLEN only allow optimizing into non-_ocs function
2535 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2536 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2538 if (fcode == BUILT_IN_STPCPY_CHK)
2540 if (! ignore)
2541 return false;
2543 /* If return value of __stpcpy_chk is ignored,
2544 optimize into __strcpy_chk. */
2545 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2546 if (!fn)
2547 return false;
2549 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2550 replace_call_with_call_and_fold (gsi, repl);
2551 return true;
2554 if (! len || TREE_SIDE_EFFECTS (len))
2555 return false;
2557 /* If c_strlen returned something, but not a constant,
2558 transform __strcpy_chk into __memcpy_chk. */
2559 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2560 if (!fn)
2561 return false;
2563 gimple_seq stmts = NULL;
2564 len = gimple_convert (&stmts, loc, size_type_node, len);
2565 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2566 build_int_cst (size_type_node, 1));
2567 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2568 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2569 replace_call_with_call_and_fold (gsi, repl);
2570 return true;
2573 else
2574 maxlen = len;
2576 if (! tree_int_cst_lt (maxlen, size))
2577 return false;
2580 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2581 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2582 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2583 if (!fn)
2584 return false;
2586 gimple *repl = gimple_build_call (fn, 2, dest, src);
2587 replace_call_with_call_and_fold (gsi, repl);
2588 return true;
2591 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2592 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2593 length passed as third argument. IGNORE is true if return value can be
2594 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2596 static bool
2597 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2598 tree dest, tree src,
2599 tree len, tree size,
2600 enum built_in_function fcode)
2602 gimple *stmt = gsi_stmt (*gsi);
2603 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2604 tree fn;
2606 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2608 /* If return value of __stpncpy_chk is ignored,
2609 optimize into __strncpy_chk. */
2610 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2611 if (fn)
2613 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2614 replace_call_with_call_and_fold (gsi, repl);
2615 return true;
2619 if (! tree_fits_uhwi_p (size))
2620 return false;
2622 tree maxlen = get_maxval_strlen (len, 2);
2623 if (! integer_all_onesp (size))
2625 if (! tree_fits_uhwi_p (len))
2627 /* If LEN is not constant, try MAXLEN too.
2628 For MAXLEN only allow optimizing into non-_ocs function
2629 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2630 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2631 return false;
2633 else
2634 maxlen = len;
2636 if (tree_int_cst_lt (size, maxlen))
2637 return false;
2640 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2641 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2642 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2643 if (!fn)
2644 return false;
2646 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2647 replace_call_with_call_and_fold (gsi, repl);
2648 return true;
2651 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2652 Return NULL_TREE if no simplification can be made. */
2654 static bool
2655 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2657 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2658 location_t loc = gimple_location (stmt);
2659 tree dest = gimple_call_arg (stmt, 0);
2660 tree src = gimple_call_arg (stmt, 1);
2661 tree fn, len, lenp1;
2663 /* If the result is unused, replace stpcpy with strcpy. */
2664 if (gimple_call_lhs (stmt) == NULL_TREE)
2666 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2667 if (!fn)
2668 return false;
2669 gimple_call_set_fndecl (stmt, fn);
2670 fold_stmt (gsi);
2671 return true;
2674 len = c_strlen (src, 1);
2675 if (!len
2676 || TREE_CODE (len) != INTEGER_CST)
2677 return false;
2679 if (optimize_function_for_size_p (cfun)
2680 /* If length is zero it's small enough. */
2681 && !integer_zerop (len))
2682 return false;
2684 /* If the source has a known length replace stpcpy with memcpy. */
2685 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2686 if (!fn)
2687 return false;
2689 gimple_seq stmts = NULL;
2690 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2691 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2692 tem, build_int_cst (size_type_node, 1));
2693 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2694 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2695 gimple_set_vuse (repl, gimple_vuse (stmt));
2696 gimple_set_vdef (repl, gimple_vdef (stmt));
2697 if (gimple_vdef (repl)
2698 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2699 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2700 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2701 /* Replace the result with dest + len. */
2702 stmts = NULL;
2703 tem = gimple_convert (&stmts, loc, sizetype, len);
2704 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2705 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2706 POINTER_PLUS_EXPR, dest, tem);
2707 gsi_replace (gsi, ret, false);
2708 /* Finally fold the memcpy call. */
2709 gimple_stmt_iterator gsi2 = *gsi;
2710 gsi_prev (&gsi2);
2711 fold_stmt (&gsi2);
2712 return true;
2715 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2716 NULL_TREE if a normal call should be emitted rather than expanding
2717 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2718 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2719 passed as second argument. */
2721 static bool
2722 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2723 enum built_in_function fcode)
2725 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2726 tree dest, size, len, fn, fmt, flag;
2727 const char *fmt_str;
2729 /* Verify the required arguments in the original call. */
2730 if (gimple_call_num_args (stmt) < 5)
2731 return false;
2733 dest = gimple_call_arg (stmt, 0);
2734 len = gimple_call_arg (stmt, 1);
2735 flag = gimple_call_arg (stmt, 2);
2736 size = gimple_call_arg (stmt, 3);
2737 fmt = gimple_call_arg (stmt, 4);
2739 if (! tree_fits_uhwi_p (size))
2740 return false;
2742 if (! integer_all_onesp (size))
2744 tree maxlen = get_maxval_strlen (len, 2);
2745 if (! tree_fits_uhwi_p (len))
2747 /* If LEN is not constant, try MAXLEN too.
2748 For MAXLEN only allow optimizing into non-_ocs function
2749 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2750 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2751 return false;
2753 else
2754 maxlen = len;
2756 if (tree_int_cst_lt (size, maxlen))
2757 return false;
2760 if (!init_target_chars ())
2761 return false;
2763 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2764 or if format doesn't contain % chars or is "%s". */
2765 if (! integer_zerop (flag))
2767 fmt_str = c_getstr (fmt);
2768 if (fmt_str == NULL)
2769 return false;
2770 if (strchr (fmt_str, target_percent) != NULL
2771 && strcmp (fmt_str, target_percent_s))
2772 return false;
2775 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2776 available. */
2777 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2778 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2779 if (!fn)
2780 return false;
2782 /* Replace the called function and the first 5 argument by 3 retaining
2783 trailing varargs. */
2784 gimple_call_set_fndecl (stmt, fn);
2785 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2786 gimple_call_set_arg (stmt, 0, dest);
2787 gimple_call_set_arg (stmt, 1, len);
2788 gimple_call_set_arg (stmt, 2, fmt);
2789 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2790 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2791 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2792 fold_stmt (gsi);
2793 return true;
2796 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2797 Return NULL_TREE if a normal call should be emitted rather than
2798 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2799 or BUILT_IN_VSPRINTF_CHK. */
2801 static bool
2802 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2803 enum built_in_function fcode)
2805 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2806 tree dest, size, len, fn, fmt, flag;
2807 const char *fmt_str;
2808 unsigned nargs = gimple_call_num_args (stmt);
2810 /* Verify the required arguments in the original call. */
2811 if (nargs < 4)
2812 return false;
2813 dest = gimple_call_arg (stmt, 0);
2814 flag = gimple_call_arg (stmt, 1);
2815 size = gimple_call_arg (stmt, 2);
2816 fmt = gimple_call_arg (stmt, 3);
2818 if (! tree_fits_uhwi_p (size))
2819 return false;
2821 len = NULL_TREE;
2823 if (!init_target_chars ())
2824 return false;
2826 /* Check whether the format is a literal string constant. */
2827 fmt_str = c_getstr (fmt);
2828 if (fmt_str != NULL)
2830 /* If the format doesn't contain % args or %%, we know the size. */
2831 if (strchr (fmt_str, target_percent) == 0)
2833 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2834 len = build_int_cstu (size_type_node, strlen (fmt_str));
2836 /* If the format is "%s" and first ... argument is a string literal,
2837 we know the size too. */
2838 else if (fcode == BUILT_IN_SPRINTF_CHK
2839 && strcmp (fmt_str, target_percent_s) == 0)
2841 tree arg;
2843 if (nargs == 5)
2845 arg = gimple_call_arg (stmt, 4);
2846 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2848 len = c_strlen (arg, 1);
2849 if (! len || ! tree_fits_uhwi_p (len))
2850 len = NULL_TREE;
2856 if (! integer_all_onesp (size))
2858 if (! len || ! tree_int_cst_lt (len, size))
2859 return false;
2862 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2863 or if format doesn't contain % chars or is "%s". */
2864 if (! integer_zerop (flag))
2866 if (fmt_str == NULL)
2867 return false;
2868 if (strchr (fmt_str, target_percent) != NULL
2869 && strcmp (fmt_str, target_percent_s))
2870 return false;
2873 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2874 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2875 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2876 if (!fn)
2877 return false;
2879 /* Replace the called function and the first 4 argument by 2 retaining
2880 trailing varargs. */
2881 gimple_call_set_fndecl (stmt, fn);
2882 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2883 gimple_call_set_arg (stmt, 0, dest);
2884 gimple_call_set_arg (stmt, 1, fmt);
2885 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2886 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2887 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2888 fold_stmt (gsi);
2889 return true;
2892 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2893 ORIG may be null if this is a 2-argument call. We don't attempt to
2894 simplify calls with more than 3 arguments.
2896 Return true if simplification was possible, otherwise false. */
2898 bool
2899 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2901 gimple *stmt = gsi_stmt (*gsi);
2902 tree dest = gimple_call_arg (stmt, 0);
2903 tree fmt = gimple_call_arg (stmt, 1);
2904 tree orig = NULL_TREE;
2905 const char *fmt_str = NULL;
2907 /* Verify the required arguments in the original call. We deal with two
2908 types of sprintf() calls: 'sprintf (str, fmt)' and
2909 'sprintf (dest, "%s", orig)'. */
2910 if (gimple_call_num_args (stmt) > 3)
2911 return false;
2913 if (gimple_call_num_args (stmt) == 3)
2914 orig = gimple_call_arg (stmt, 2);
2916 /* Check whether the format is a literal string constant. */
2917 fmt_str = c_getstr (fmt);
2918 if (fmt_str == NULL)
2919 return false;
2921 if (!init_target_chars ())
2922 return false;
2924 /* If the format doesn't contain % args or %%, use strcpy. */
2925 if (strchr (fmt_str, target_percent) == NULL)
2927 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2929 if (!fn)
2930 return false;
2932 /* Don't optimize sprintf (buf, "abc", ptr++). */
2933 if (orig)
2934 return false;
2936 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2937 'format' is known to contain no % formats. */
2938 gimple_seq stmts = NULL;
2939 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2940 gimple_seq_add_stmt_without_update (&stmts, repl);
2941 if (gimple_call_lhs (stmt))
2943 repl = gimple_build_assign (gimple_call_lhs (stmt),
2944 build_int_cst (integer_type_node,
2945 strlen (fmt_str)));
2946 gimple_seq_add_stmt_without_update (&stmts, repl);
2947 gsi_replace_with_seq_vops (gsi, stmts);
2948 /* gsi now points at the assignment to the lhs, get a
2949 stmt iterator to the memcpy call.
2950 ??? We can't use gsi_for_stmt as that doesn't work when the
2951 CFG isn't built yet. */
2952 gimple_stmt_iterator gsi2 = *gsi;
2953 gsi_prev (&gsi2);
2954 fold_stmt (&gsi2);
2956 else
2958 gsi_replace_with_seq_vops (gsi, stmts);
2959 fold_stmt (gsi);
2961 return true;
2964 /* If the format is "%s", use strcpy if the result isn't used. */
2965 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2967 tree fn;
2968 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2970 if (!fn)
2971 return false;
2973 /* Don't crash on sprintf (str1, "%s"). */
2974 if (!orig)
2975 return false;
2977 tree orig_len = NULL_TREE;
2978 if (gimple_call_lhs (stmt))
2980 orig_len = get_maxval_strlen (orig, 0);
2981 if (!orig_len)
2982 return false;
2985 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2986 gimple_seq stmts = NULL;
2987 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2988 gimple_seq_add_stmt_without_update (&stmts, repl);
2989 if (gimple_call_lhs (stmt))
2991 if (!useless_type_conversion_p (integer_type_node,
2992 TREE_TYPE (orig_len)))
2993 orig_len = fold_convert (integer_type_node, orig_len);
2994 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2995 gimple_seq_add_stmt_without_update (&stmts, repl);
2996 gsi_replace_with_seq_vops (gsi, stmts);
2997 /* gsi now points at the assignment to the lhs, get a
2998 stmt iterator to the memcpy call.
2999 ??? We can't use gsi_for_stmt as that doesn't work when the
3000 CFG isn't built yet. */
3001 gimple_stmt_iterator gsi2 = *gsi;
3002 gsi_prev (&gsi2);
3003 fold_stmt (&gsi2);
3005 else
3007 gsi_replace_with_seq_vops (gsi, stmts);
3008 fold_stmt (gsi);
3010 return true;
3012 return false;
3015 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3016 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3017 attempt to simplify calls with more than 4 arguments.
3019 Return true if simplification was possible, otherwise false. */
3021 bool
3022 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3024 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3025 tree dest = gimple_call_arg (stmt, 0);
3026 tree destsize = gimple_call_arg (stmt, 1);
3027 tree fmt = gimple_call_arg (stmt, 2);
3028 tree orig = NULL_TREE;
3029 const char *fmt_str = NULL;
3031 if (gimple_call_num_args (stmt) > 4)
3032 return false;
3034 if (gimple_call_num_args (stmt) == 4)
3035 orig = gimple_call_arg (stmt, 3);
3037 if (!tree_fits_uhwi_p (destsize))
3038 return false;
3039 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3041 /* Check whether the format is a literal string constant. */
3042 fmt_str = c_getstr (fmt);
3043 if (fmt_str == NULL)
3044 return false;
3046 if (!init_target_chars ())
3047 return false;
3049 /* If the format doesn't contain % args or %%, use strcpy. */
3050 if (strchr (fmt_str, target_percent) == NULL)
3052 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3053 if (!fn)
3054 return false;
3056 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3057 if (orig)
3058 return false;
3060 /* We could expand this as
3061 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3062 or to
3063 memcpy (str, fmt_with_nul_at_cstm1, cst);
3064 but in the former case that might increase code size
3065 and in the latter case grow .rodata section too much.
3066 So punt for now. */
3067 size_t len = strlen (fmt_str);
3068 if (len >= destlen)
3069 return false;
3071 gimple_seq stmts = NULL;
3072 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3073 gimple_seq_add_stmt_without_update (&stmts, repl);
3074 if (gimple_call_lhs (stmt))
3076 repl = gimple_build_assign (gimple_call_lhs (stmt),
3077 build_int_cst (integer_type_node, len));
3078 gimple_seq_add_stmt_without_update (&stmts, repl);
3079 gsi_replace_with_seq_vops (gsi, stmts);
3080 /* gsi now points at the assignment to the lhs, get a
3081 stmt iterator to the memcpy call.
3082 ??? We can't use gsi_for_stmt as that doesn't work when the
3083 CFG isn't built yet. */
3084 gimple_stmt_iterator gsi2 = *gsi;
3085 gsi_prev (&gsi2);
3086 fold_stmt (&gsi2);
3088 else
3090 gsi_replace_with_seq_vops (gsi, stmts);
3091 fold_stmt (gsi);
3093 return true;
3096 /* If the format is "%s", use strcpy if the result isn't used. */
3097 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3099 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3100 if (!fn)
3101 return false;
3103 /* Don't crash on snprintf (str1, cst, "%s"). */
3104 if (!orig)
3105 return false;
3107 tree orig_len = get_maxval_strlen (orig, 0);
3108 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3109 return false;
3111 /* We could expand this as
3112 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3113 or to
3114 memcpy (str1, str2_with_nul_at_cstm1, cst);
3115 but in the former case that might increase code size
3116 and in the latter case grow .rodata section too much.
3117 So punt for now. */
3118 if (compare_tree_int (orig_len, destlen) >= 0)
3119 return false;
3121 /* Convert snprintf (str1, cst, "%s", str2) into
3122 strcpy (str1, str2) if strlen (str2) < cst. */
3123 gimple_seq stmts = NULL;
3124 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3125 gimple_seq_add_stmt_without_update (&stmts, repl);
3126 if (gimple_call_lhs (stmt))
3128 if (!useless_type_conversion_p (integer_type_node,
3129 TREE_TYPE (orig_len)))
3130 orig_len = fold_convert (integer_type_node, orig_len);
3131 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3132 gimple_seq_add_stmt_without_update (&stmts, repl);
3133 gsi_replace_with_seq_vops (gsi, stmts);
3134 /* gsi now points at the assignment to the lhs, get a
3135 stmt iterator to the memcpy call.
3136 ??? We can't use gsi_for_stmt as that doesn't work when the
3137 CFG isn't built yet. */
3138 gimple_stmt_iterator gsi2 = *gsi;
3139 gsi_prev (&gsi2);
3140 fold_stmt (&gsi2);
3142 else
3144 gsi_replace_with_seq_vops (gsi, stmts);
3145 fold_stmt (gsi);
3147 return true;
3149 return false;
3152 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3153 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3154 more than 3 arguments, and ARG may be null in the 2-argument case.
3156 Return NULL_TREE if no simplification was possible, otherwise return the
3157 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3158 code of the function to be simplified. */
3160 static bool
3161 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3162 tree fp, tree fmt, tree arg,
3163 enum built_in_function fcode)
3165 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3166 tree fn_fputc, fn_fputs;
3167 const char *fmt_str = NULL;
3169 /* If the return value is used, don't do the transformation. */
3170 if (gimple_call_lhs (stmt) != NULL_TREE)
3171 return false;
3173 /* Check whether the format is a literal string constant. */
3174 fmt_str = c_getstr (fmt);
3175 if (fmt_str == NULL)
3176 return false;
3178 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3180 /* If we're using an unlocked function, assume the other
3181 unlocked functions exist explicitly. */
3182 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3183 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3185 else
3187 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3188 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3191 if (!init_target_chars ())
3192 return false;
3194 /* If the format doesn't contain % args or %%, use strcpy. */
3195 if (strchr (fmt_str, target_percent) == NULL)
3197 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3198 && arg)
3199 return false;
3201 /* If the format specifier was "", fprintf does nothing. */
3202 if (fmt_str[0] == '\0')
3204 replace_call_with_value (gsi, NULL_TREE);
3205 return true;
3208 /* When "string" doesn't contain %, replace all cases of
3209 fprintf (fp, string) with fputs (string, fp). The fputs
3210 builtin will take care of special cases like length == 1. */
3211 if (fn_fputs)
3213 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3214 replace_call_with_call_and_fold (gsi, repl);
3215 return true;
3219 /* The other optimizations can be done only on the non-va_list variants. */
3220 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3221 return false;
3223 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3224 else if (strcmp (fmt_str, target_percent_s) == 0)
3226 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3227 return false;
3228 if (fn_fputs)
3230 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3231 replace_call_with_call_and_fold (gsi, repl);
3232 return true;
3236 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3237 else if (strcmp (fmt_str, target_percent_c) == 0)
3239 if (!arg
3240 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3241 return false;
3242 if (fn_fputc)
3244 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3245 replace_call_with_call_and_fold (gsi, repl);
3246 return true;
3250 return false;
3253 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3254 FMT and ARG are the arguments to the call; we don't fold cases with
3255 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3257 Return NULL_TREE if no simplification was possible, otherwise return the
3258 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3259 code of the function to be simplified. */
3261 static bool
3262 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3263 tree arg, enum built_in_function fcode)
3265 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3266 tree fn_putchar, fn_puts, newarg;
3267 const char *fmt_str = NULL;
3269 /* If the return value is used, don't do the transformation. */
3270 if (gimple_call_lhs (stmt) != NULL_TREE)
3271 return false;
3273 /* Check whether the format is a literal string constant. */
3274 fmt_str = c_getstr (fmt);
3275 if (fmt_str == NULL)
3276 return false;
3278 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3280 /* If we're using an unlocked function, assume the other
3281 unlocked functions exist explicitly. */
3282 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3283 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3285 else
3287 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3288 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3291 if (!init_target_chars ())
3292 return false;
3294 if (strcmp (fmt_str, target_percent_s) == 0
3295 || strchr (fmt_str, target_percent) == NULL)
3297 const char *str;
3299 if (strcmp (fmt_str, target_percent_s) == 0)
3301 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3302 return false;
3304 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3305 return false;
3307 str = c_getstr (arg);
3308 if (str == NULL)
3309 return false;
3311 else
3313 /* The format specifier doesn't contain any '%' characters. */
3314 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3315 && arg)
3316 return false;
3317 str = fmt_str;
3320 /* If the string was "", printf does nothing. */
3321 if (str[0] == '\0')
3323 replace_call_with_value (gsi, NULL_TREE);
3324 return true;
3327 /* If the string has length of 1, call putchar. */
3328 if (str[1] == '\0')
3330 /* Given printf("c"), (where c is any one character,)
3331 convert "c"[0] to an int and pass that to the replacement
3332 function. */
3333 newarg = build_int_cst (integer_type_node, str[0]);
3334 if (fn_putchar)
3336 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3337 replace_call_with_call_and_fold (gsi, repl);
3338 return true;
3341 else
3343 /* If the string was "string\n", call puts("string"). */
3344 size_t len = strlen (str);
3345 if ((unsigned char)str[len - 1] == target_newline
3346 && (size_t) (int) len == len
3347 && (int) len > 0)
3349 char *newstr;
3350 tree offset_node, string_cst;
3352 /* Create a NUL-terminated string that's one char shorter
3353 than the original, stripping off the trailing '\n'. */
3354 newarg = build_string_literal (len, str);
3355 string_cst = string_constant (newarg, &offset_node);
3356 gcc_checking_assert (string_cst
3357 && (TREE_STRING_LENGTH (string_cst)
3358 == (int) len)
3359 && integer_zerop (offset_node)
3360 && (unsigned char)
3361 TREE_STRING_POINTER (string_cst)[len - 1]
3362 == target_newline);
3363 /* build_string_literal creates a new STRING_CST,
3364 modify it in place to avoid double copying. */
3365 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3366 newstr[len - 1] = '\0';
3367 if (fn_puts)
3369 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3370 replace_call_with_call_and_fold (gsi, repl);
3371 return true;
3374 else
3375 /* We'd like to arrange to call fputs(string,stdout) here,
3376 but we need stdout and don't have a way to get it yet. */
3377 return false;
3381 /* The other optimizations can be done only on the non-va_list variants. */
3382 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3383 return false;
3385 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3386 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3388 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3389 return false;
3390 if (fn_puts)
3392 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3393 replace_call_with_call_and_fold (gsi, repl);
3394 return true;
3398 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3399 else if (strcmp (fmt_str, target_percent_c) == 0)
3401 if (!arg || ! useless_type_conversion_p (integer_type_node,
3402 TREE_TYPE (arg)))
3403 return false;
3404 if (fn_putchar)
3406 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3407 replace_call_with_call_and_fold (gsi, repl);
3408 return true;
3412 return false;
3417 /* Fold a call to __builtin_strlen with known length LEN. */
3419 static bool
3420 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3422 gimple *stmt = gsi_stmt (*gsi);
3423 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
3424 if (!len)
3425 return false;
3426 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
3427 replace_call_with_value (gsi, len);
3428 return true;
3431 /* Fold a call to __builtin_acc_on_device. */
3433 static bool
3434 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3436 /* Defer folding until we know which compiler we're in. */
3437 if (symtab->state != EXPANSION)
3438 return false;
3440 unsigned val_host = GOMP_DEVICE_HOST;
3441 unsigned val_dev = GOMP_DEVICE_NONE;
3443 #ifdef ACCEL_COMPILER
3444 val_host = GOMP_DEVICE_NOT_HOST;
3445 val_dev = ACCEL_COMPILER_acc_device;
3446 #endif
3448 location_t loc = gimple_location (gsi_stmt (*gsi));
3450 tree host_eq = make_ssa_name (boolean_type_node);
3451 gimple *host_ass = gimple_build_assign
3452 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3453 gimple_set_location (host_ass, loc);
3454 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3456 tree dev_eq = make_ssa_name (boolean_type_node);
3457 gimple *dev_ass = gimple_build_assign
3458 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3459 gimple_set_location (dev_ass, loc);
3460 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3462 tree result = make_ssa_name (boolean_type_node);
3463 gimple *result_ass = gimple_build_assign
3464 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3465 gimple_set_location (result_ass, loc);
3466 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3468 replace_call_with_value (gsi, result);
3470 return true;
3473 /* Fold realloc (0, n) -> malloc (n). */
3475 static bool
3476 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3478 gimple *stmt = gsi_stmt (*gsi);
3479 tree arg = gimple_call_arg (stmt, 0);
3480 tree size = gimple_call_arg (stmt, 1);
3482 if (operand_equal_p (arg, null_pointer_node, 0))
3484 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3485 if (fn_malloc)
3487 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3488 replace_call_with_call_and_fold (gsi, repl);
3489 return true;
3492 return false;
3495 /* Fold the non-target builtin at *GSI and return whether any simplification
3496 was made. */
3498 static bool
3499 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3501 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3502 tree callee = gimple_call_fndecl (stmt);
3504 /* Give up for always_inline inline builtins until they are
3505 inlined. */
3506 if (avoid_folding_inline_builtin (callee))
3507 return false;
3509 unsigned n = gimple_call_num_args (stmt);
3510 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3511 switch (fcode)
3513 case BUILT_IN_BCMP:
3514 return gimple_fold_builtin_bcmp (gsi);
3515 case BUILT_IN_BCOPY:
3516 return gimple_fold_builtin_bcopy (gsi);
3517 case BUILT_IN_BZERO:
3518 return gimple_fold_builtin_bzero (gsi);
3520 case BUILT_IN_MEMSET:
3521 return gimple_fold_builtin_memset (gsi,
3522 gimple_call_arg (stmt, 1),
3523 gimple_call_arg (stmt, 2));
3524 case BUILT_IN_MEMCPY:
3525 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3526 gimple_call_arg (stmt, 1), 0);
3527 case BUILT_IN_MEMPCPY:
3528 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3529 gimple_call_arg (stmt, 1), 1);
3530 case BUILT_IN_MEMMOVE:
3531 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3532 gimple_call_arg (stmt, 1), 3);
3533 case BUILT_IN_SPRINTF_CHK:
3534 case BUILT_IN_VSPRINTF_CHK:
3535 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3536 case BUILT_IN_STRCAT_CHK:
3537 return gimple_fold_builtin_strcat_chk (gsi);
3538 case BUILT_IN_STRNCAT_CHK:
3539 return gimple_fold_builtin_strncat_chk (gsi);
3540 case BUILT_IN_STRLEN:
3541 return gimple_fold_builtin_strlen (gsi);
3542 case BUILT_IN_STRCPY:
3543 return gimple_fold_builtin_strcpy (gsi,
3544 gimple_call_arg (stmt, 0),
3545 gimple_call_arg (stmt, 1));
3546 case BUILT_IN_STRNCPY:
3547 return gimple_fold_builtin_strncpy (gsi,
3548 gimple_call_arg (stmt, 0),
3549 gimple_call_arg (stmt, 1),
3550 gimple_call_arg (stmt, 2));
3551 case BUILT_IN_STRCAT:
3552 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3553 gimple_call_arg (stmt, 1));
3554 case BUILT_IN_STRNCAT:
3555 return gimple_fold_builtin_strncat (gsi);
3556 case BUILT_IN_INDEX:
3557 case BUILT_IN_STRCHR:
3558 return gimple_fold_builtin_strchr (gsi, false);
3559 case BUILT_IN_RINDEX:
3560 case BUILT_IN_STRRCHR:
3561 return gimple_fold_builtin_strchr (gsi, true);
3562 case BUILT_IN_STRSTR:
3563 return gimple_fold_builtin_strstr (gsi);
3564 case BUILT_IN_STRCMP:
3565 case BUILT_IN_STRCASECMP:
3566 case BUILT_IN_STRNCMP:
3567 case BUILT_IN_STRNCASECMP:
3568 return gimple_fold_builtin_string_compare (gsi);
3569 case BUILT_IN_MEMCHR:
3570 return gimple_fold_builtin_memchr (gsi);
3571 case BUILT_IN_FPUTS:
3572 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3573 gimple_call_arg (stmt, 1), false);
3574 case BUILT_IN_FPUTS_UNLOCKED:
3575 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3576 gimple_call_arg (stmt, 1), true);
3577 case BUILT_IN_MEMCPY_CHK:
3578 case BUILT_IN_MEMPCPY_CHK:
3579 case BUILT_IN_MEMMOVE_CHK:
3580 case BUILT_IN_MEMSET_CHK:
3581 return gimple_fold_builtin_memory_chk (gsi,
3582 gimple_call_arg (stmt, 0),
3583 gimple_call_arg (stmt, 1),
3584 gimple_call_arg (stmt, 2),
3585 gimple_call_arg (stmt, 3),
3586 fcode);
3587 case BUILT_IN_STPCPY:
3588 return gimple_fold_builtin_stpcpy (gsi);
3589 case BUILT_IN_STRCPY_CHK:
3590 case BUILT_IN_STPCPY_CHK:
3591 return gimple_fold_builtin_stxcpy_chk (gsi,
3592 gimple_call_arg (stmt, 0),
3593 gimple_call_arg (stmt, 1),
3594 gimple_call_arg (stmt, 2),
3595 fcode);
3596 case BUILT_IN_STRNCPY_CHK:
3597 case BUILT_IN_STPNCPY_CHK:
3598 return gimple_fold_builtin_stxncpy_chk (gsi,
3599 gimple_call_arg (stmt, 0),
3600 gimple_call_arg (stmt, 1),
3601 gimple_call_arg (stmt, 2),
3602 gimple_call_arg (stmt, 3),
3603 fcode);
3604 case BUILT_IN_SNPRINTF_CHK:
3605 case BUILT_IN_VSNPRINTF_CHK:
3606 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3608 case BUILT_IN_FPRINTF:
3609 case BUILT_IN_FPRINTF_UNLOCKED:
3610 case BUILT_IN_VFPRINTF:
3611 if (n == 2 || n == 3)
3612 return gimple_fold_builtin_fprintf (gsi,
3613 gimple_call_arg (stmt, 0),
3614 gimple_call_arg (stmt, 1),
3615 n == 3
3616 ? gimple_call_arg (stmt, 2)
3617 : NULL_TREE,
3618 fcode);
3619 break;
3620 case BUILT_IN_FPRINTF_CHK:
3621 case BUILT_IN_VFPRINTF_CHK:
3622 if (n == 3 || n == 4)
3623 return gimple_fold_builtin_fprintf (gsi,
3624 gimple_call_arg (stmt, 0),
3625 gimple_call_arg (stmt, 2),
3626 n == 4
3627 ? gimple_call_arg (stmt, 3)
3628 : NULL_TREE,
3629 fcode);
3630 break;
3631 case BUILT_IN_PRINTF:
3632 case BUILT_IN_PRINTF_UNLOCKED:
3633 case BUILT_IN_VPRINTF:
3634 if (n == 1 || n == 2)
3635 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3636 n == 2
3637 ? gimple_call_arg (stmt, 1)
3638 : NULL_TREE, fcode);
3639 break;
3640 case BUILT_IN_PRINTF_CHK:
3641 case BUILT_IN_VPRINTF_CHK:
3642 if (n == 2 || n == 3)
3643 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3644 n == 3
3645 ? gimple_call_arg (stmt, 2)
3646 : NULL_TREE, fcode);
3647 break;
3648 case BUILT_IN_ACC_ON_DEVICE:
3649 return gimple_fold_builtin_acc_on_device (gsi,
3650 gimple_call_arg (stmt, 0));
3651 case BUILT_IN_REALLOC:
3652 return gimple_fold_builtin_realloc (gsi);
3654 default:;
3657 /* Try the generic builtin folder. */
3658 bool ignore = (gimple_call_lhs (stmt) == NULL);
3659 tree result = fold_call_stmt (stmt, ignore);
3660 if (result)
3662 if (ignore)
3663 STRIP_NOPS (result);
3664 else
3665 result = fold_convert (gimple_call_return_type (stmt), result);
3666 if (!update_call_from_tree (gsi, result))
3667 gimplify_and_update_call_from_tree (gsi, result);
3668 return true;
3671 return false;
3674 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3675 function calls to constants, where possible. */
3677 static tree
3678 fold_internal_goacc_dim (const gimple *call)
3680 int axis = oacc_get_ifn_dim_arg (call);
3681 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3682 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
3683 tree result = NULL_TREE;
3685 /* If the size is 1, or we only want the size and it is not dynamic,
3686 we know the answer. */
3687 if (size == 1 || (!is_pos && size))
3689 tree type = TREE_TYPE (gimple_call_lhs (call));
3690 result = build_int_cst (type, size - is_pos);
3693 return result;
3696 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3697 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3698 &var where var is only addressable because of such calls. */
3700 bool
3701 optimize_atomic_compare_exchange_p (gimple *stmt)
3703 if (gimple_call_num_args (stmt) != 6
3704 || !flag_inline_atomics
3705 || !optimize
3706 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3707 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3708 || !gimple_vdef (stmt)
3709 || !gimple_vuse (stmt))
3710 return false;
3712 tree fndecl = gimple_call_fndecl (stmt);
3713 switch (DECL_FUNCTION_CODE (fndecl))
3715 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3716 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3717 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3718 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3719 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3720 break;
3721 default:
3722 return false;
3725 tree expected = gimple_call_arg (stmt, 1);
3726 if (TREE_CODE (expected) != ADDR_EXPR
3727 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3728 return false;
3730 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3731 if (!is_gimple_reg_type (etype)
3732 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3733 || TREE_THIS_VOLATILE (etype)
3734 || VECTOR_TYPE_P (etype)
3735 || TREE_CODE (etype) == COMPLEX_TYPE
3736 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3737 might not preserve all the bits. See PR71716. */
3738 || SCALAR_FLOAT_TYPE_P (etype)
3739 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype)))
3740 return false;
3742 tree weak = gimple_call_arg (stmt, 3);
3743 if (!integer_zerop (weak) && !integer_onep (weak))
3744 return false;
3746 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3747 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3748 machine_mode mode = TYPE_MODE (itype);
3750 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3751 == CODE_FOR_nothing
3752 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3753 return false;
3755 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode))
3756 return false;
3758 return true;
3761 /* Fold
3762 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3763 into
3764 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3765 i = IMAGPART_EXPR <t>;
3766 r = (_Bool) i;
3767 e = REALPART_EXPR <t>; */
3769 void
3770 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3772 gimple *stmt = gsi_stmt (*gsi);
3773 tree fndecl = gimple_call_fndecl (stmt);
3774 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3775 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3776 tree ctype = build_complex_type (itype);
3777 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3778 bool throws = false;
3779 edge e = NULL;
3780 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3781 expected);
3782 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3783 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3784 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3786 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3787 build1 (VIEW_CONVERT_EXPR, itype,
3788 gimple_assign_lhs (g)));
3789 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3791 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3792 + int_size_in_bytes (itype);
3793 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3794 gimple_call_arg (stmt, 0),
3795 gimple_assign_lhs (g),
3796 gimple_call_arg (stmt, 2),
3797 build_int_cst (integer_type_node, flag),
3798 gimple_call_arg (stmt, 4),
3799 gimple_call_arg (stmt, 5));
3800 tree lhs = make_ssa_name (ctype);
3801 gimple_call_set_lhs (g, lhs);
3802 gimple_set_vdef (g, gimple_vdef (stmt));
3803 gimple_set_vuse (g, gimple_vuse (stmt));
3804 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3805 tree oldlhs = gimple_call_lhs (stmt);
3806 if (stmt_can_throw_internal (stmt))
3808 throws = true;
3809 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3811 gimple_call_set_nothrow (as_a <gcall *> (g),
3812 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3813 gimple_call_set_lhs (stmt, NULL_TREE);
3814 gsi_replace (gsi, g, true);
3815 if (oldlhs)
3817 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3818 build1 (IMAGPART_EXPR, itype, lhs));
3819 if (throws)
3821 gsi_insert_on_edge_immediate (e, g);
3822 *gsi = gsi_for_stmt (g);
3824 else
3825 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3826 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3827 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3829 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3830 build1 (REALPART_EXPR, itype, lhs));
3831 if (throws && oldlhs == NULL_TREE)
3833 gsi_insert_on_edge_immediate (e, g);
3834 *gsi = gsi_for_stmt (g);
3836 else
3837 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3838 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3840 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3841 VIEW_CONVERT_EXPR,
3842 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3843 gimple_assign_lhs (g)));
3844 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3846 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3847 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3848 *gsi = gsiret;
3851 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3852 doesn't fit into TYPE. The test for overflow should be regardless of
3853 -fwrapv, and even for unsigned types. */
3855 bool
3856 arith_overflowed_p (enum tree_code code, const_tree type,
3857 const_tree arg0, const_tree arg1)
3859 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3860 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3861 widest2_int_cst;
3862 widest2_int warg0 = widest2_int_cst (arg0);
3863 widest2_int warg1 = widest2_int_cst (arg1);
3864 widest2_int wres;
3865 switch (code)
3867 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3868 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3869 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3870 default: gcc_unreachable ();
3872 signop sign = TYPE_SIGN (type);
3873 if (sign == UNSIGNED && wi::neg_p (wres))
3874 return true;
3875 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3878 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3879 The statement may be replaced by another statement, e.g., if the call
3880 simplifies to a constant value. Return true if any changes were made.
3881 It is assumed that the operands have been previously folded. */
3883 static bool
3884 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3886 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3887 tree callee;
3888 bool changed = false;
3889 unsigned i;
3891 /* Fold *& in call arguments. */
3892 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3893 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3895 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3896 if (tmp)
3898 gimple_call_set_arg (stmt, i, tmp);
3899 changed = true;
3903 /* Check for virtual calls that became direct calls. */
3904 callee = gimple_call_fn (stmt);
3905 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3907 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3909 if (dump_file && virtual_method_call_p (callee)
3910 && !possible_polymorphic_call_target_p
3911 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3912 (OBJ_TYPE_REF_EXPR (callee)))))
3914 fprintf (dump_file,
3915 "Type inheritance inconsistent devirtualization of ");
3916 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3917 fprintf (dump_file, " to ");
3918 print_generic_expr (dump_file, callee, TDF_SLIM);
3919 fprintf (dump_file, "\n");
3922 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3923 changed = true;
3925 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3927 bool final;
3928 vec <cgraph_node *>targets
3929 = possible_polymorphic_call_targets (callee, stmt, &final);
3930 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3932 tree lhs = gimple_call_lhs (stmt);
3933 if (dump_enabled_p ())
3935 location_t loc = gimple_location_safe (stmt);
3936 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3937 "folding virtual function call to %s\n",
3938 targets.length () == 1
3939 ? targets[0]->name ()
3940 : "__builtin_unreachable");
3942 if (targets.length () == 1)
3944 tree fndecl = targets[0]->decl;
3945 gimple_call_set_fndecl (stmt, fndecl);
3946 changed = true;
3947 /* If changing the call to __cxa_pure_virtual
3948 or similar noreturn function, adjust gimple_call_fntype
3949 too. */
3950 if (gimple_call_noreturn_p (stmt)
3951 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3952 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3953 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3954 == void_type_node))
3955 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
3956 /* If the call becomes noreturn, remove the lhs. */
3957 if (lhs
3958 && gimple_call_noreturn_p (stmt)
3959 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
3960 || should_remove_lhs_p (lhs)))
3962 if (TREE_CODE (lhs) == SSA_NAME)
3964 tree var = create_tmp_var (TREE_TYPE (lhs));
3965 tree def = get_or_create_ssa_default_def (cfun, var);
3966 gimple *new_stmt = gimple_build_assign (lhs, def);
3967 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3969 gimple_call_set_lhs (stmt, NULL_TREE);
3971 maybe_remove_unused_call_args (cfun, stmt);
3973 else
3975 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3976 gimple *new_stmt = gimple_build_call (fndecl, 0);
3977 gimple_set_location (new_stmt, gimple_location (stmt));
3978 /* If the call had a SSA name as lhs morph that into
3979 an uninitialized value. */
3980 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3982 tree var = create_tmp_var (TREE_TYPE (lhs));
3983 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
3984 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
3985 set_ssa_default_def (cfun, var, lhs);
3987 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3988 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3989 gsi_replace (gsi, new_stmt, false);
3990 return true;
3996 /* Check for indirect calls that became direct calls, and then
3997 no longer require a static chain. */
3998 if (gimple_call_chain (stmt))
4000 tree fn = gimple_call_fndecl (stmt);
4001 if (fn && !DECL_STATIC_CHAIN (fn))
4003 gimple_call_set_chain (stmt, NULL);
4004 changed = true;
4006 else
4008 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4009 if (tmp)
4011 gimple_call_set_chain (stmt, tmp);
4012 changed = true;
4017 if (inplace)
4018 return changed;
4020 /* Check for builtins that CCP can handle using information not
4021 available in the generic fold routines. */
4022 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4024 if (gimple_fold_builtin (gsi))
4025 changed = true;
4027 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4029 changed |= targetm.gimple_fold_builtin (gsi);
4031 else if (gimple_call_internal_p (stmt))
4033 enum tree_code subcode = ERROR_MARK;
4034 tree result = NULL_TREE;
4035 bool cplx_result = false;
4036 tree overflow = NULL_TREE;
4037 switch (gimple_call_internal_fn (stmt))
4039 case IFN_BUILTIN_EXPECT:
4040 result = fold_builtin_expect (gimple_location (stmt),
4041 gimple_call_arg (stmt, 0),
4042 gimple_call_arg (stmt, 1),
4043 gimple_call_arg (stmt, 2));
4044 break;
4045 case IFN_UBSAN_OBJECT_SIZE:
4047 tree offset = gimple_call_arg (stmt, 1);
4048 tree objsize = gimple_call_arg (stmt, 2);
4049 if (integer_all_onesp (objsize)
4050 || (TREE_CODE (offset) == INTEGER_CST
4051 && TREE_CODE (objsize) == INTEGER_CST
4052 && tree_int_cst_le (offset, objsize)))
4054 replace_call_with_value (gsi, NULL_TREE);
4055 return true;
4058 break;
4059 case IFN_UBSAN_PTR:
4060 if (integer_zerop (gimple_call_arg (stmt, 1)))
4062 replace_call_with_value (gsi, NULL_TREE);
4063 return true;
4065 break;
4066 case IFN_UBSAN_BOUNDS:
4068 tree index = gimple_call_arg (stmt, 1);
4069 tree bound = gimple_call_arg (stmt, 2);
4070 if (TREE_CODE (index) == INTEGER_CST
4071 && TREE_CODE (bound) == INTEGER_CST)
4073 index = fold_convert (TREE_TYPE (bound), index);
4074 if (TREE_CODE (index) == INTEGER_CST
4075 && tree_int_cst_le (index, bound))
4077 replace_call_with_value (gsi, NULL_TREE);
4078 return true;
4082 break;
4083 case IFN_GOACC_DIM_SIZE:
4084 case IFN_GOACC_DIM_POS:
4085 result = fold_internal_goacc_dim (stmt);
4086 break;
4087 case IFN_UBSAN_CHECK_ADD:
4088 subcode = PLUS_EXPR;
4089 break;
4090 case IFN_UBSAN_CHECK_SUB:
4091 subcode = MINUS_EXPR;
4092 break;
4093 case IFN_UBSAN_CHECK_MUL:
4094 subcode = MULT_EXPR;
4095 break;
4096 case IFN_ADD_OVERFLOW:
4097 subcode = PLUS_EXPR;
4098 cplx_result = true;
4099 break;
4100 case IFN_SUB_OVERFLOW:
4101 subcode = MINUS_EXPR;
4102 cplx_result = true;
4103 break;
4104 case IFN_MUL_OVERFLOW:
4105 subcode = MULT_EXPR;
4106 cplx_result = true;
4107 break;
4108 default:
4109 break;
4111 if (subcode != ERROR_MARK)
4113 tree arg0 = gimple_call_arg (stmt, 0);
4114 tree arg1 = gimple_call_arg (stmt, 1);
4115 tree type = TREE_TYPE (arg0);
4116 if (cplx_result)
4118 tree lhs = gimple_call_lhs (stmt);
4119 if (lhs == NULL_TREE)
4120 type = NULL_TREE;
4121 else
4122 type = TREE_TYPE (TREE_TYPE (lhs));
4124 if (type == NULL_TREE)
4126 /* x = y + 0; x = y - 0; x = y * 0; */
4127 else if (integer_zerop (arg1))
4128 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4129 /* x = 0 + y; x = 0 * y; */
4130 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4131 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4132 /* x = y - y; */
4133 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4134 result = integer_zero_node;
4135 /* x = y * 1; x = 1 * y; */
4136 else if (subcode == MULT_EXPR && integer_onep (arg1))
4137 result = arg0;
4138 else if (subcode == MULT_EXPR && integer_onep (arg0))
4139 result = arg1;
4140 else if (TREE_CODE (arg0) == INTEGER_CST
4141 && TREE_CODE (arg1) == INTEGER_CST)
4143 if (cplx_result)
4144 result = int_const_binop (subcode, fold_convert (type, arg0),
4145 fold_convert (type, arg1));
4146 else
4147 result = int_const_binop (subcode, arg0, arg1);
4148 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4150 if (cplx_result)
4151 overflow = build_one_cst (type);
4152 else
4153 result = NULL_TREE;
4156 if (result)
4158 if (result == integer_zero_node)
4159 result = build_zero_cst (type);
4160 else if (cplx_result && TREE_TYPE (result) != type)
4162 if (TREE_CODE (result) == INTEGER_CST)
4164 if (arith_overflowed_p (PLUS_EXPR, type, result,
4165 integer_zero_node))
4166 overflow = build_one_cst (type);
4168 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4169 && TYPE_UNSIGNED (type))
4170 || (TYPE_PRECISION (type)
4171 < (TYPE_PRECISION (TREE_TYPE (result))
4172 + (TYPE_UNSIGNED (TREE_TYPE (result))
4173 && !TYPE_UNSIGNED (type)))))
4174 result = NULL_TREE;
4175 if (result)
4176 result = fold_convert (type, result);
4181 if (result)
4183 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4184 result = drop_tree_overflow (result);
4185 if (cplx_result)
4187 if (overflow == NULL_TREE)
4188 overflow = build_zero_cst (TREE_TYPE (result));
4189 tree ctype = build_complex_type (TREE_TYPE (result));
4190 if (TREE_CODE (result) == INTEGER_CST
4191 && TREE_CODE (overflow) == INTEGER_CST)
4192 result = build_complex (ctype, result, overflow);
4193 else
4194 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4195 ctype, result, overflow);
4197 if (!update_call_from_tree (gsi, result))
4198 gimplify_and_update_call_from_tree (gsi, result);
4199 changed = true;
4203 return changed;
4207 /* Return true whether NAME has a use on STMT. */
4209 static bool
4210 has_use_on_stmt (tree name, gimple *stmt)
4212 imm_use_iterator iter;
4213 use_operand_p use_p;
4214 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4215 if (USE_STMT (use_p) == stmt)
4216 return true;
4217 return false;
4220 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4221 gimple_simplify.
4223 Replaces *GSI with the simplification result in RCODE and OPS
4224 and the associated statements in *SEQ. Does the replacement
4225 according to INPLACE and returns true if the operation succeeded. */
4227 static bool
4228 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4229 code_helper rcode, tree *ops,
4230 gimple_seq *seq, bool inplace)
4232 gimple *stmt = gsi_stmt (*gsi);
4234 /* Play safe and do not allow abnormals to be mentioned in
4235 newly created statements. See also maybe_push_res_to_seq.
4236 As an exception allow such uses if there was a use of the
4237 same SSA name on the old stmt. */
4238 if ((TREE_CODE (ops[0]) == SSA_NAME
4239 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
4240 && !has_use_on_stmt (ops[0], stmt))
4241 || (ops[1]
4242 && TREE_CODE (ops[1]) == SSA_NAME
4243 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
4244 && !has_use_on_stmt (ops[1], stmt))
4245 || (ops[2]
4246 && TREE_CODE (ops[2]) == SSA_NAME
4247 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
4248 && !has_use_on_stmt (ops[2], stmt))
4249 || (COMPARISON_CLASS_P (ops[0])
4250 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
4251 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
4252 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
4253 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
4254 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
4255 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
4256 return false;
4258 /* Don't insert new statements when INPLACE is true, even if we could
4259 reuse STMT for the final statement. */
4260 if (inplace && !gimple_seq_empty_p (*seq))
4261 return false;
4263 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4265 gcc_assert (rcode.is_tree_code ());
4266 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
4267 /* GIMPLE_CONDs condition may not throw. */
4268 && (!flag_exceptions
4269 || !cfun->can_throw_non_call_exceptions
4270 || !operation_could_trap_p (rcode,
4271 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4272 false, NULL_TREE)))
4273 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
4274 else if (rcode == SSA_NAME)
4275 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4276 build_zero_cst (TREE_TYPE (ops[0])));
4277 else if (rcode == INTEGER_CST)
4279 if (integer_zerop (ops[0]))
4280 gimple_cond_make_false (cond_stmt);
4281 else
4282 gimple_cond_make_true (cond_stmt);
4284 else if (!inplace)
4286 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
4287 ops, seq);
4288 if (!res)
4289 return false;
4290 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4291 build_zero_cst (TREE_TYPE (res)));
4293 else
4294 return false;
4295 if (dump_file && (dump_flags & TDF_DETAILS))
4297 fprintf (dump_file, "gimple_simplified to ");
4298 if (!gimple_seq_empty_p (*seq))
4299 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4300 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4301 0, TDF_SLIM);
4303 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4304 return true;
4306 else if (is_gimple_assign (stmt)
4307 && rcode.is_tree_code ())
4309 if (!inplace
4310 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
4312 maybe_build_generic_op (rcode,
4313 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
4314 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
4315 if (dump_file && (dump_flags & TDF_DETAILS))
4317 fprintf (dump_file, "gimple_simplified to ");
4318 if (!gimple_seq_empty_p (*seq))
4319 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4320 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4321 0, TDF_SLIM);
4323 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4324 return true;
4327 else if (rcode.is_fn_code ()
4328 && gimple_call_combined_fn (stmt) == rcode)
4330 unsigned i;
4331 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4333 gcc_assert (ops[i] != NULL_TREE);
4334 gimple_call_set_arg (stmt, i, ops[i]);
4336 if (i < 3)
4337 gcc_assert (ops[i] == NULL_TREE);
4338 if (dump_file && (dump_flags & TDF_DETAILS))
4340 fprintf (dump_file, "gimple_simplified to ");
4341 if (!gimple_seq_empty_p (*seq))
4342 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4343 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4345 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4346 return true;
4348 else if (!inplace)
4350 if (gimple_has_lhs (stmt))
4352 tree lhs = gimple_get_lhs (stmt);
4353 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
4354 ops, seq, lhs))
4355 return false;
4356 if (dump_file && (dump_flags & TDF_DETAILS))
4358 fprintf (dump_file, "gimple_simplified to ");
4359 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4361 gsi_replace_with_seq_vops (gsi, *seq);
4362 return true;
4364 else
4365 gcc_unreachable ();
4368 return false;
4371 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4373 static bool
4374 maybe_canonicalize_mem_ref_addr (tree *t)
4376 bool res = false;
4378 if (TREE_CODE (*t) == ADDR_EXPR)
4379 t = &TREE_OPERAND (*t, 0);
4381 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4382 generic vector extension. The actual vector referenced is
4383 view-converted to an array type for this purpose. If the index
4384 is constant the canonical representation in the middle-end is a
4385 BIT_FIELD_REF so re-write the former to the latter here. */
4386 if (TREE_CODE (*t) == ARRAY_REF
4387 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4388 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4389 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4391 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4392 if (VECTOR_TYPE_P (vtype))
4394 tree low = array_ref_low_bound (*t);
4395 if (TREE_CODE (low) == INTEGER_CST)
4397 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4399 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4400 wi::to_widest (low));
4401 idx = wi::mul (idx, wi::to_widest
4402 (TYPE_SIZE (TREE_TYPE (*t))));
4403 widest_int ext
4404 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4405 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4407 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4408 TREE_TYPE (*t),
4409 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4410 TYPE_SIZE (TREE_TYPE (*t)),
4411 wide_int_to_tree (bitsizetype, idx));
4412 res = true;
4419 while (handled_component_p (*t))
4420 t = &TREE_OPERAND (*t, 0);
4422 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4423 of invariant addresses into a SSA name MEM_REF address. */
4424 if (TREE_CODE (*t) == MEM_REF
4425 || TREE_CODE (*t) == TARGET_MEM_REF)
4427 tree addr = TREE_OPERAND (*t, 0);
4428 if (TREE_CODE (addr) == ADDR_EXPR
4429 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4430 || handled_component_p (TREE_OPERAND (addr, 0))))
4432 tree base;
4433 HOST_WIDE_INT coffset;
4434 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4435 &coffset);
4436 if (!base)
4437 gcc_unreachable ();
4439 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4440 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4441 TREE_OPERAND (*t, 1),
4442 size_int (coffset));
4443 res = true;
4445 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4446 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4449 /* Canonicalize back MEM_REFs to plain reference trees if the object
4450 accessed is a decl that has the same access semantics as the MEM_REF. */
4451 if (TREE_CODE (*t) == MEM_REF
4452 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4453 && integer_zerop (TREE_OPERAND (*t, 1))
4454 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4456 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4457 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4458 if (/* Same volatile qualification. */
4459 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4460 /* Same TBAA behavior with -fstrict-aliasing. */
4461 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4462 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4463 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4464 /* Same alignment. */
4465 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4466 /* We have to look out here to not drop a required conversion
4467 from the rhs to the lhs if *t appears on the lhs or vice-versa
4468 if it appears on the rhs. Thus require strict type
4469 compatibility. */
4470 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4472 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4473 res = true;
4477 /* Canonicalize TARGET_MEM_REF in particular with respect to
4478 the indexes becoming constant. */
4479 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4481 tree tem = maybe_fold_tmr (*t);
4482 if (tem)
4484 *t = tem;
4485 res = true;
4489 return res;
4492 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4493 distinguishes both cases. */
4495 static bool
4496 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4498 bool changed = false;
4499 gimple *stmt = gsi_stmt (*gsi);
4500 bool nowarning = gimple_no_warning_p (stmt);
4501 unsigned i;
4502 fold_defer_overflow_warnings ();
4504 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4505 after propagation.
4506 ??? This shouldn't be done in generic folding but in the
4507 propagation helpers which also know whether an address was
4508 propagated.
4509 Also canonicalize operand order. */
4510 switch (gimple_code (stmt))
4512 case GIMPLE_ASSIGN:
4513 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4515 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4516 if ((REFERENCE_CLASS_P (*rhs)
4517 || TREE_CODE (*rhs) == ADDR_EXPR)
4518 && maybe_canonicalize_mem_ref_addr (rhs))
4519 changed = true;
4520 tree *lhs = gimple_assign_lhs_ptr (stmt);
4521 if (REFERENCE_CLASS_P (*lhs)
4522 && maybe_canonicalize_mem_ref_addr (lhs))
4523 changed = true;
4525 else
4527 /* Canonicalize operand order. */
4528 enum tree_code code = gimple_assign_rhs_code (stmt);
4529 if (TREE_CODE_CLASS (code) == tcc_comparison
4530 || commutative_tree_code (code)
4531 || commutative_ternary_tree_code (code))
4533 tree rhs1 = gimple_assign_rhs1 (stmt);
4534 tree rhs2 = gimple_assign_rhs2 (stmt);
4535 if (tree_swap_operands_p (rhs1, rhs2))
4537 gimple_assign_set_rhs1 (stmt, rhs2);
4538 gimple_assign_set_rhs2 (stmt, rhs1);
4539 if (TREE_CODE_CLASS (code) == tcc_comparison)
4540 gimple_assign_set_rhs_code (stmt,
4541 swap_tree_comparison (code));
4542 changed = true;
4546 break;
4547 case GIMPLE_CALL:
4549 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4551 tree *arg = gimple_call_arg_ptr (stmt, i);
4552 if (REFERENCE_CLASS_P (*arg)
4553 && maybe_canonicalize_mem_ref_addr (arg))
4554 changed = true;
4556 tree *lhs = gimple_call_lhs_ptr (stmt);
4557 if (*lhs
4558 && REFERENCE_CLASS_P (*lhs)
4559 && maybe_canonicalize_mem_ref_addr (lhs))
4560 changed = true;
4561 break;
4563 case GIMPLE_ASM:
4565 gasm *asm_stmt = as_a <gasm *> (stmt);
4566 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4568 tree link = gimple_asm_output_op (asm_stmt, i);
4569 tree op = TREE_VALUE (link);
4570 if (REFERENCE_CLASS_P (op)
4571 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4572 changed = true;
4574 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4576 tree link = gimple_asm_input_op (asm_stmt, i);
4577 tree op = TREE_VALUE (link);
4578 if ((REFERENCE_CLASS_P (op)
4579 || TREE_CODE (op) == ADDR_EXPR)
4580 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4581 changed = true;
4584 break;
4585 case GIMPLE_DEBUG:
4586 if (gimple_debug_bind_p (stmt))
4588 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4589 if (*val
4590 && (REFERENCE_CLASS_P (*val)
4591 || TREE_CODE (*val) == ADDR_EXPR)
4592 && maybe_canonicalize_mem_ref_addr (val))
4593 changed = true;
4595 break;
4596 case GIMPLE_COND:
4598 /* Canonicalize operand order. */
4599 tree lhs = gimple_cond_lhs (stmt);
4600 tree rhs = gimple_cond_rhs (stmt);
4601 if (tree_swap_operands_p (lhs, rhs))
4603 gcond *gc = as_a <gcond *> (stmt);
4604 gimple_cond_set_lhs (gc, rhs);
4605 gimple_cond_set_rhs (gc, lhs);
4606 gimple_cond_set_code (gc,
4607 swap_tree_comparison (gimple_cond_code (gc)));
4608 changed = true;
4611 default:;
4614 /* Dispatch to pattern-based folding. */
4615 if (!inplace
4616 || is_gimple_assign (stmt)
4617 || gimple_code (stmt) == GIMPLE_COND)
4619 gimple_seq seq = NULL;
4620 code_helper rcode;
4621 tree ops[3] = {};
4622 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4623 valueize, valueize))
4625 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4626 changed = true;
4627 else
4628 gimple_seq_discard (seq);
4632 stmt = gsi_stmt (*gsi);
4634 /* Fold the main computation performed by the statement. */
4635 switch (gimple_code (stmt))
4637 case GIMPLE_ASSIGN:
4639 /* Try to canonicalize for boolean-typed X the comparisons
4640 X == 0, X == 1, X != 0, and X != 1. */
4641 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4642 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4644 tree lhs = gimple_assign_lhs (stmt);
4645 tree op1 = gimple_assign_rhs1 (stmt);
4646 tree op2 = gimple_assign_rhs2 (stmt);
4647 tree type = TREE_TYPE (op1);
4649 /* Check whether the comparison operands are of the same boolean
4650 type as the result type is.
4651 Check that second operand is an integer-constant with value
4652 one or zero. */
4653 if (TREE_CODE (op2) == INTEGER_CST
4654 && (integer_zerop (op2) || integer_onep (op2))
4655 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4657 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4658 bool is_logical_not = false;
4660 /* X == 0 and X != 1 is a logical-not.of X
4661 X == 1 and X != 0 is X */
4662 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4663 || (cmp_code == NE_EXPR && integer_onep (op2)))
4664 is_logical_not = true;
4666 if (is_logical_not == false)
4667 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4668 /* Only for one-bit precision typed X the transformation
4669 !X -> ~X is valied. */
4670 else if (TYPE_PRECISION (type) == 1)
4671 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4672 /* Otherwise we use !X -> X ^ 1. */
4673 else
4674 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4675 build_int_cst (type, 1));
4676 changed = true;
4677 break;
4681 unsigned old_num_ops = gimple_num_ops (stmt);
4682 tree lhs = gimple_assign_lhs (stmt);
4683 tree new_rhs = fold_gimple_assign (gsi);
4684 if (new_rhs
4685 && !useless_type_conversion_p (TREE_TYPE (lhs),
4686 TREE_TYPE (new_rhs)))
4687 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4688 if (new_rhs
4689 && (!inplace
4690 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4692 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4693 changed = true;
4695 break;
4698 case GIMPLE_CALL:
4699 changed |= gimple_fold_call (gsi, inplace);
4700 break;
4702 case GIMPLE_ASM:
4703 /* Fold *& in asm operands. */
4705 gasm *asm_stmt = as_a <gasm *> (stmt);
4706 size_t noutputs;
4707 const char **oconstraints;
4708 const char *constraint;
4709 bool allows_mem, allows_reg;
4711 noutputs = gimple_asm_noutputs (asm_stmt);
4712 oconstraints = XALLOCAVEC (const char *, noutputs);
4714 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4716 tree link = gimple_asm_output_op (asm_stmt, i);
4717 tree op = TREE_VALUE (link);
4718 oconstraints[i]
4719 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4720 if (REFERENCE_CLASS_P (op)
4721 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4723 TREE_VALUE (link) = op;
4724 changed = true;
4727 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4729 tree link = gimple_asm_input_op (asm_stmt, i);
4730 tree op = TREE_VALUE (link);
4731 constraint
4732 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4733 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4734 oconstraints, &allows_mem, &allows_reg);
4735 if (REFERENCE_CLASS_P (op)
4736 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4737 != NULL_TREE)
4739 TREE_VALUE (link) = op;
4740 changed = true;
4744 break;
4746 case GIMPLE_DEBUG:
4747 if (gimple_debug_bind_p (stmt))
4749 tree val = gimple_debug_bind_get_value (stmt);
4750 if (val
4751 && REFERENCE_CLASS_P (val))
4753 tree tem = maybe_fold_reference (val, false);
4754 if (tem)
4756 gimple_debug_bind_set_value (stmt, tem);
4757 changed = true;
4760 else if (val
4761 && TREE_CODE (val) == ADDR_EXPR)
4763 tree ref = TREE_OPERAND (val, 0);
4764 tree tem = maybe_fold_reference (ref, false);
4765 if (tem)
4767 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4768 gimple_debug_bind_set_value (stmt, tem);
4769 changed = true;
4773 break;
4775 case GIMPLE_RETURN:
4777 greturn *ret_stmt = as_a<greturn *> (stmt);
4778 tree ret = gimple_return_retval(ret_stmt);
4780 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4782 tree val = valueize (ret);
4783 if (val && val != ret
4784 && may_propagate_copy (ret, val))
4786 gimple_return_set_retval (ret_stmt, val);
4787 changed = true;
4791 break;
4793 default:;
4796 stmt = gsi_stmt (*gsi);
4798 /* Fold *& on the lhs. */
4799 if (gimple_has_lhs (stmt))
4801 tree lhs = gimple_get_lhs (stmt);
4802 if (lhs && REFERENCE_CLASS_P (lhs))
4804 tree new_lhs = maybe_fold_reference (lhs, true);
4805 if (new_lhs)
4807 gimple_set_lhs (stmt, new_lhs);
4808 changed = true;
4813 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4814 return changed;
4817 /* Valueziation callback that ends up not following SSA edges. */
4819 tree
4820 no_follow_ssa_edges (tree)
4822 return NULL_TREE;
4825 /* Valueization callback that ends up following single-use SSA edges only. */
4827 tree
4828 follow_single_use_edges (tree val)
4830 if (TREE_CODE (val) == SSA_NAME
4831 && !has_single_use (val))
4832 return NULL_TREE;
4833 return val;
4836 /* Fold the statement pointed to by GSI. In some cases, this function may
4837 replace the whole statement with a new one. Returns true iff folding
4838 makes any changes.
4839 The statement pointed to by GSI should be in valid gimple form but may
4840 be in unfolded state as resulting from for example constant propagation
4841 which can produce *&x = 0. */
4843 bool
4844 fold_stmt (gimple_stmt_iterator *gsi)
4846 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4849 bool
4850 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4852 return fold_stmt_1 (gsi, false, valueize);
4855 /* Perform the minimal folding on statement *GSI. Only operations like
4856 *&x created by constant propagation are handled. The statement cannot
4857 be replaced with a new one. Return true if the statement was
4858 changed, false otherwise.
4859 The statement *GSI should be in valid gimple form but may
4860 be in unfolded state as resulting from for example constant propagation
4861 which can produce *&x = 0. */
4863 bool
4864 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4866 gimple *stmt = gsi_stmt (*gsi);
4867 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4868 gcc_assert (gsi_stmt (*gsi) == stmt);
4869 return changed;
4872 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4873 if EXPR is null or we don't know how.
4874 If non-null, the result always has boolean type. */
4876 static tree
4877 canonicalize_bool (tree expr, bool invert)
4879 if (!expr)
4880 return NULL_TREE;
4881 else if (invert)
4883 if (integer_nonzerop (expr))
4884 return boolean_false_node;
4885 else if (integer_zerop (expr))
4886 return boolean_true_node;
4887 else if (TREE_CODE (expr) == SSA_NAME)
4888 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4889 build_int_cst (TREE_TYPE (expr), 0));
4890 else if (COMPARISON_CLASS_P (expr))
4891 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4892 boolean_type_node,
4893 TREE_OPERAND (expr, 0),
4894 TREE_OPERAND (expr, 1));
4895 else
4896 return NULL_TREE;
4898 else
4900 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4901 return expr;
4902 if (integer_nonzerop (expr))
4903 return boolean_true_node;
4904 else if (integer_zerop (expr))
4905 return boolean_false_node;
4906 else if (TREE_CODE (expr) == SSA_NAME)
4907 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4908 build_int_cst (TREE_TYPE (expr), 0));
4909 else if (COMPARISON_CLASS_P (expr))
4910 return fold_build2 (TREE_CODE (expr),
4911 boolean_type_node,
4912 TREE_OPERAND (expr, 0),
4913 TREE_OPERAND (expr, 1));
4914 else
4915 return NULL_TREE;
4919 /* Check to see if a boolean expression EXPR is logically equivalent to the
4920 comparison (OP1 CODE OP2). Check for various identities involving
4921 SSA_NAMEs. */
4923 static bool
4924 same_bool_comparison_p (const_tree expr, enum tree_code code,
4925 const_tree op1, const_tree op2)
4927 gimple *s;
4929 /* The obvious case. */
4930 if (TREE_CODE (expr) == code
4931 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4932 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4933 return true;
4935 /* Check for comparing (name, name != 0) and the case where expr
4936 is an SSA_NAME with a definition matching the comparison. */
4937 if (TREE_CODE (expr) == SSA_NAME
4938 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4940 if (operand_equal_p (expr, op1, 0))
4941 return ((code == NE_EXPR && integer_zerop (op2))
4942 || (code == EQ_EXPR && integer_nonzerop (op2)));
4943 s = SSA_NAME_DEF_STMT (expr);
4944 if (is_gimple_assign (s)
4945 && gimple_assign_rhs_code (s) == code
4946 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4947 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4948 return true;
4951 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4952 of name is a comparison, recurse. */
4953 if (TREE_CODE (op1) == SSA_NAME
4954 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4956 s = SSA_NAME_DEF_STMT (op1);
4957 if (is_gimple_assign (s)
4958 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
4960 enum tree_code c = gimple_assign_rhs_code (s);
4961 if ((c == NE_EXPR && integer_zerop (op2))
4962 || (c == EQ_EXPR && integer_nonzerop (op2)))
4963 return same_bool_comparison_p (expr, c,
4964 gimple_assign_rhs1 (s),
4965 gimple_assign_rhs2 (s));
4966 if ((c == EQ_EXPR && integer_zerop (op2))
4967 || (c == NE_EXPR && integer_nonzerop (op2)))
4968 return same_bool_comparison_p (expr,
4969 invert_tree_comparison (c, false),
4970 gimple_assign_rhs1 (s),
4971 gimple_assign_rhs2 (s));
4974 return false;
4977 /* Check to see if two boolean expressions OP1 and OP2 are logically
4978 equivalent. */
4980 static bool
4981 same_bool_result_p (const_tree op1, const_tree op2)
4983 /* Simple cases first. */
4984 if (operand_equal_p (op1, op2, 0))
4985 return true;
4987 /* Check the cases where at least one of the operands is a comparison.
4988 These are a bit smarter than operand_equal_p in that they apply some
4989 identifies on SSA_NAMEs. */
4990 if (COMPARISON_CLASS_P (op2)
4991 && same_bool_comparison_p (op1, TREE_CODE (op2),
4992 TREE_OPERAND (op2, 0),
4993 TREE_OPERAND (op2, 1)))
4994 return true;
4995 if (COMPARISON_CLASS_P (op1)
4996 && same_bool_comparison_p (op2, TREE_CODE (op1),
4997 TREE_OPERAND (op1, 0),
4998 TREE_OPERAND (op1, 1)))
4999 return true;
5001 /* Default case. */
5002 return false;
5005 /* Forward declarations for some mutually recursive functions. */
5007 static tree
5008 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5009 enum tree_code code2, tree op2a, tree op2b);
5010 static tree
5011 and_var_with_comparison (tree var, bool invert,
5012 enum tree_code code2, tree op2a, tree op2b);
5013 static tree
5014 and_var_with_comparison_1 (gimple *stmt,
5015 enum tree_code code2, tree op2a, tree op2b);
5016 static tree
5017 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5018 enum tree_code code2, tree op2a, tree op2b);
5019 static tree
5020 or_var_with_comparison (tree var, bool invert,
5021 enum tree_code code2, tree op2a, tree op2b);
5022 static tree
5023 or_var_with_comparison_1 (gimple *stmt,
5024 enum tree_code code2, tree op2a, tree op2b);
5026 /* Helper function for and_comparisons_1: try to simplify the AND of the
5027 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5028 If INVERT is true, invert the value of the VAR before doing the AND.
5029 Return NULL_EXPR if we can't simplify this to a single expression. */
5031 static tree
5032 and_var_with_comparison (tree var, bool invert,
5033 enum tree_code code2, tree op2a, tree op2b)
5035 tree t;
5036 gimple *stmt = SSA_NAME_DEF_STMT (var);
5038 /* We can only deal with variables whose definitions are assignments. */
5039 if (!is_gimple_assign (stmt))
5040 return NULL_TREE;
5042 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5043 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5044 Then we only have to consider the simpler non-inverted cases. */
5045 if (invert)
5046 t = or_var_with_comparison_1 (stmt,
5047 invert_tree_comparison (code2, false),
5048 op2a, op2b);
5049 else
5050 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5051 return canonicalize_bool (t, invert);
5054 /* Try to simplify the AND of the ssa variable defined by the assignment
5055 STMT with the comparison specified by (OP2A CODE2 OP2B).
5056 Return NULL_EXPR if we can't simplify this to a single expression. */
5058 static tree
5059 and_var_with_comparison_1 (gimple *stmt,
5060 enum tree_code code2, tree op2a, tree op2b)
5062 tree var = gimple_assign_lhs (stmt);
5063 tree true_test_var = NULL_TREE;
5064 tree false_test_var = NULL_TREE;
5065 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5067 /* Check for identities like (var AND (var == 0)) => false. */
5068 if (TREE_CODE (op2a) == SSA_NAME
5069 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5071 if ((code2 == NE_EXPR && integer_zerop (op2b))
5072 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5074 true_test_var = op2a;
5075 if (var == true_test_var)
5076 return var;
5078 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5079 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5081 false_test_var = op2a;
5082 if (var == false_test_var)
5083 return boolean_false_node;
5087 /* If the definition is a comparison, recurse on it. */
5088 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5090 tree t = and_comparisons_1 (innercode,
5091 gimple_assign_rhs1 (stmt),
5092 gimple_assign_rhs2 (stmt),
5093 code2,
5094 op2a,
5095 op2b);
5096 if (t)
5097 return t;
5100 /* If the definition is an AND or OR expression, we may be able to
5101 simplify by reassociating. */
5102 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5103 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5105 tree inner1 = gimple_assign_rhs1 (stmt);
5106 tree inner2 = gimple_assign_rhs2 (stmt);
5107 gimple *s;
5108 tree t;
5109 tree partial = NULL_TREE;
5110 bool is_and = (innercode == BIT_AND_EXPR);
5112 /* Check for boolean identities that don't require recursive examination
5113 of inner1/inner2:
5114 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5115 inner1 AND (inner1 OR inner2) => inner1
5116 !inner1 AND (inner1 AND inner2) => false
5117 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5118 Likewise for similar cases involving inner2. */
5119 if (inner1 == true_test_var)
5120 return (is_and ? var : inner1);
5121 else if (inner2 == true_test_var)
5122 return (is_and ? var : inner2);
5123 else if (inner1 == false_test_var)
5124 return (is_and
5125 ? boolean_false_node
5126 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5127 else if (inner2 == false_test_var)
5128 return (is_and
5129 ? boolean_false_node
5130 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5132 /* Next, redistribute/reassociate the AND across the inner tests.
5133 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5134 if (TREE_CODE (inner1) == SSA_NAME
5135 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5136 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5137 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5138 gimple_assign_rhs1 (s),
5139 gimple_assign_rhs2 (s),
5140 code2, op2a, op2b)))
5142 /* Handle the AND case, where we are reassociating:
5143 (inner1 AND inner2) AND (op2a code2 op2b)
5144 => (t AND inner2)
5145 If the partial result t is a constant, we win. Otherwise
5146 continue on to try reassociating with the other inner test. */
5147 if (is_and)
5149 if (integer_onep (t))
5150 return inner2;
5151 else if (integer_zerop (t))
5152 return boolean_false_node;
5155 /* Handle the OR case, where we are redistributing:
5156 (inner1 OR inner2) AND (op2a code2 op2b)
5157 => (t OR (inner2 AND (op2a code2 op2b))) */
5158 else if (integer_onep (t))
5159 return boolean_true_node;
5161 /* Save partial result for later. */
5162 partial = t;
5165 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5166 if (TREE_CODE (inner2) == SSA_NAME
5167 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5168 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5169 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5170 gimple_assign_rhs1 (s),
5171 gimple_assign_rhs2 (s),
5172 code2, op2a, op2b)))
5174 /* Handle the AND case, where we are reassociating:
5175 (inner1 AND inner2) AND (op2a code2 op2b)
5176 => (inner1 AND t) */
5177 if (is_and)
5179 if (integer_onep (t))
5180 return inner1;
5181 else if (integer_zerop (t))
5182 return boolean_false_node;
5183 /* If both are the same, we can apply the identity
5184 (x AND x) == x. */
5185 else if (partial && same_bool_result_p (t, partial))
5186 return t;
5189 /* Handle the OR case. where we are redistributing:
5190 (inner1 OR inner2) AND (op2a code2 op2b)
5191 => (t OR (inner1 AND (op2a code2 op2b)))
5192 => (t OR partial) */
5193 else
5195 if (integer_onep (t))
5196 return boolean_true_node;
5197 else if (partial)
5199 /* We already got a simplification for the other
5200 operand to the redistributed OR expression. The
5201 interesting case is when at least one is false.
5202 Or, if both are the same, we can apply the identity
5203 (x OR x) == x. */
5204 if (integer_zerop (partial))
5205 return t;
5206 else if (integer_zerop (t))
5207 return partial;
5208 else if (same_bool_result_p (t, partial))
5209 return t;
5214 return NULL_TREE;
5217 /* Try to simplify the AND of two comparisons defined by
5218 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5219 If this can be done without constructing an intermediate value,
5220 return the resulting tree; otherwise NULL_TREE is returned.
5221 This function is deliberately asymmetric as it recurses on SSA_DEFs
5222 in the first comparison but not the second. */
5224 static tree
5225 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5226 enum tree_code code2, tree op2a, tree op2b)
5228 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5230 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5231 if (operand_equal_p (op1a, op2a, 0)
5232 && operand_equal_p (op1b, op2b, 0))
5234 /* Result will be either NULL_TREE, or a combined comparison. */
5235 tree t = combine_comparisons (UNKNOWN_LOCATION,
5236 TRUTH_ANDIF_EXPR, code1, code2,
5237 truth_type, op1a, op1b);
5238 if (t)
5239 return t;
5242 /* Likewise the swapped case of the above. */
5243 if (operand_equal_p (op1a, op2b, 0)
5244 && operand_equal_p (op1b, op2a, 0))
5246 /* Result will be either NULL_TREE, or a combined comparison. */
5247 tree t = combine_comparisons (UNKNOWN_LOCATION,
5248 TRUTH_ANDIF_EXPR, code1,
5249 swap_tree_comparison (code2),
5250 truth_type, op1a, op1b);
5251 if (t)
5252 return t;
5255 /* If both comparisons are of the same value against constants, we might
5256 be able to merge them. */
5257 if (operand_equal_p (op1a, op2a, 0)
5258 && TREE_CODE (op1b) == INTEGER_CST
5259 && TREE_CODE (op2b) == INTEGER_CST)
5261 int cmp = tree_int_cst_compare (op1b, op2b);
5263 /* If we have (op1a == op1b), we should either be able to
5264 return that or FALSE, depending on whether the constant op1b
5265 also satisfies the other comparison against op2b. */
5266 if (code1 == EQ_EXPR)
5268 bool done = true;
5269 bool val;
5270 switch (code2)
5272 case EQ_EXPR: val = (cmp == 0); break;
5273 case NE_EXPR: val = (cmp != 0); break;
5274 case LT_EXPR: val = (cmp < 0); break;
5275 case GT_EXPR: val = (cmp > 0); break;
5276 case LE_EXPR: val = (cmp <= 0); break;
5277 case GE_EXPR: val = (cmp >= 0); break;
5278 default: done = false;
5280 if (done)
5282 if (val)
5283 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5284 else
5285 return boolean_false_node;
5288 /* Likewise if the second comparison is an == comparison. */
5289 else if (code2 == EQ_EXPR)
5291 bool done = true;
5292 bool val;
5293 switch (code1)
5295 case EQ_EXPR: val = (cmp == 0); break;
5296 case NE_EXPR: val = (cmp != 0); break;
5297 case LT_EXPR: val = (cmp > 0); break;
5298 case GT_EXPR: val = (cmp < 0); break;
5299 case LE_EXPR: val = (cmp >= 0); break;
5300 case GE_EXPR: val = (cmp <= 0); break;
5301 default: done = false;
5303 if (done)
5305 if (val)
5306 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5307 else
5308 return boolean_false_node;
5312 /* Same business with inequality tests. */
5313 else if (code1 == NE_EXPR)
5315 bool val;
5316 switch (code2)
5318 case EQ_EXPR: val = (cmp != 0); break;
5319 case NE_EXPR: val = (cmp == 0); break;
5320 case LT_EXPR: val = (cmp >= 0); break;
5321 case GT_EXPR: val = (cmp <= 0); break;
5322 case LE_EXPR: val = (cmp > 0); break;
5323 case GE_EXPR: val = (cmp < 0); break;
5324 default:
5325 val = false;
5327 if (val)
5328 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5330 else if (code2 == NE_EXPR)
5332 bool val;
5333 switch (code1)
5335 case EQ_EXPR: val = (cmp == 0); break;
5336 case NE_EXPR: val = (cmp != 0); break;
5337 case LT_EXPR: val = (cmp <= 0); break;
5338 case GT_EXPR: val = (cmp >= 0); break;
5339 case LE_EXPR: val = (cmp < 0); break;
5340 case GE_EXPR: val = (cmp > 0); break;
5341 default:
5342 val = false;
5344 if (val)
5345 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5348 /* Chose the more restrictive of two < or <= comparisons. */
5349 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5350 && (code2 == LT_EXPR || code2 == LE_EXPR))
5352 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5353 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5354 else
5355 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5358 /* Likewise chose the more restrictive of two > or >= comparisons. */
5359 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5360 && (code2 == GT_EXPR || code2 == GE_EXPR))
5362 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5363 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5364 else
5365 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5368 /* Check for singleton ranges. */
5369 else if (cmp == 0
5370 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5371 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5372 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5374 /* Check for disjoint ranges. */
5375 else if (cmp <= 0
5376 && (code1 == LT_EXPR || code1 == LE_EXPR)
5377 && (code2 == GT_EXPR || code2 == GE_EXPR))
5378 return boolean_false_node;
5379 else if (cmp >= 0
5380 && (code1 == GT_EXPR || code1 == GE_EXPR)
5381 && (code2 == LT_EXPR || code2 == LE_EXPR))
5382 return boolean_false_node;
5385 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5386 NAME's definition is a truth value. See if there are any simplifications
5387 that can be done against the NAME's definition. */
5388 if (TREE_CODE (op1a) == SSA_NAME
5389 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5390 && (integer_zerop (op1b) || integer_onep (op1b)))
5392 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5393 || (code1 == NE_EXPR && integer_onep (op1b)));
5394 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5395 switch (gimple_code (stmt))
5397 case GIMPLE_ASSIGN:
5398 /* Try to simplify by copy-propagating the definition. */
5399 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5401 case GIMPLE_PHI:
5402 /* If every argument to the PHI produces the same result when
5403 ANDed with the second comparison, we win.
5404 Do not do this unless the type is bool since we need a bool
5405 result here anyway. */
5406 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5408 tree result = NULL_TREE;
5409 unsigned i;
5410 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5412 tree arg = gimple_phi_arg_def (stmt, i);
5414 /* If this PHI has itself as an argument, ignore it.
5415 If all the other args produce the same result,
5416 we're still OK. */
5417 if (arg == gimple_phi_result (stmt))
5418 continue;
5419 else if (TREE_CODE (arg) == INTEGER_CST)
5421 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5423 if (!result)
5424 result = boolean_false_node;
5425 else if (!integer_zerop (result))
5426 return NULL_TREE;
5428 else if (!result)
5429 result = fold_build2 (code2, boolean_type_node,
5430 op2a, op2b);
5431 else if (!same_bool_comparison_p (result,
5432 code2, op2a, op2b))
5433 return NULL_TREE;
5435 else if (TREE_CODE (arg) == SSA_NAME
5436 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5438 tree temp;
5439 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5440 /* In simple cases we can look through PHI nodes,
5441 but we have to be careful with loops.
5442 See PR49073. */
5443 if (! dom_info_available_p (CDI_DOMINATORS)
5444 || gimple_bb (def_stmt) == gimple_bb (stmt)
5445 || dominated_by_p (CDI_DOMINATORS,
5446 gimple_bb (def_stmt),
5447 gimple_bb (stmt)))
5448 return NULL_TREE;
5449 temp = and_var_with_comparison (arg, invert, code2,
5450 op2a, op2b);
5451 if (!temp)
5452 return NULL_TREE;
5453 else if (!result)
5454 result = temp;
5455 else if (!same_bool_result_p (result, temp))
5456 return NULL_TREE;
5458 else
5459 return NULL_TREE;
5461 return result;
5464 default:
5465 break;
5468 return NULL_TREE;
5471 /* Try to simplify the AND of two comparisons, specified by
5472 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5473 If this can be simplified to a single expression (without requiring
5474 introducing more SSA variables to hold intermediate values),
5475 return the resulting tree. Otherwise return NULL_TREE.
5476 If the result expression is non-null, it has boolean type. */
5478 tree
5479 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5480 enum tree_code code2, tree op2a, tree op2b)
5482 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5483 if (t)
5484 return t;
5485 else
5486 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5489 /* Helper function for or_comparisons_1: try to simplify the OR of the
5490 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5491 If INVERT is true, invert the value of VAR before doing the OR.
5492 Return NULL_EXPR if we can't simplify this to a single expression. */
5494 static tree
5495 or_var_with_comparison (tree var, bool invert,
5496 enum tree_code code2, tree op2a, tree op2b)
5498 tree t;
5499 gimple *stmt = SSA_NAME_DEF_STMT (var);
5501 /* We can only deal with variables whose definitions are assignments. */
5502 if (!is_gimple_assign (stmt))
5503 return NULL_TREE;
5505 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5506 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5507 Then we only have to consider the simpler non-inverted cases. */
5508 if (invert)
5509 t = and_var_with_comparison_1 (stmt,
5510 invert_tree_comparison (code2, false),
5511 op2a, op2b);
5512 else
5513 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5514 return canonicalize_bool (t, invert);
5517 /* Try to simplify the OR of the ssa variable defined by the assignment
5518 STMT with the comparison specified by (OP2A CODE2 OP2B).
5519 Return NULL_EXPR if we can't simplify this to a single expression. */
5521 static tree
5522 or_var_with_comparison_1 (gimple *stmt,
5523 enum tree_code code2, tree op2a, tree op2b)
5525 tree var = gimple_assign_lhs (stmt);
5526 tree true_test_var = NULL_TREE;
5527 tree false_test_var = NULL_TREE;
5528 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5530 /* Check for identities like (var OR (var != 0)) => true . */
5531 if (TREE_CODE (op2a) == SSA_NAME
5532 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5534 if ((code2 == NE_EXPR && integer_zerop (op2b))
5535 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5537 true_test_var = op2a;
5538 if (var == true_test_var)
5539 return var;
5541 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5542 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5544 false_test_var = op2a;
5545 if (var == false_test_var)
5546 return boolean_true_node;
5550 /* If the definition is a comparison, recurse on it. */
5551 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5553 tree t = or_comparisons_1 (innercode,
5554 gimple_assign_rhs1 (stmt),
5555 gimple_assign_rhs2 (stmt),
5556 code2,
5557 op2a,
5558 op2b);
5559 if (t)
5560 return t;
5563 /* If the definition is an AND or OR expression, we may be able to
5564 simplify by reassociating. */
5565 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5566 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5568 tree inner1 = gimple_assign_rhs1 (stmt);
5569 tree inner2 = gimple_assign_rhs2 (stmt);
5570 gimple *s;
5571 tree t;
5572 tree partial = NULL_TREE;
5573 bool is_or = (innercode == BIT_IOR_EXPR);
5575 /* Check for boolean identities that don't require recursive examination
5576 of inner1/inner2:
5577 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5578 inner1 OR (inner1 AND inner2) => inner1
5579 !inner1 OR (inner1 OR inner2) => true
5580 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5582 if (inner1 == true_test_var)
5583 return (is_or ? var : inner1);
5584 else if (inner2 == true_test_var)
5585 return (is_or ? var : inner2);
5586 else if (inner1 == false_test_var)
5587 return (is_or
5588 ? boolean_true_node
5589 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5590 else if (inner2 == false_test_var)
5591 return (is_or
5592 ? boolean_true_node
5593 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5595 /* Next, redistribute/reassociate the OR across the inner tests.
5596 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5597 if (TREE_CODE (inner1) == SSA_NAME
5598 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5599 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5600 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5601 gimple_assign_rhs1 (s),
5602 gimple_assign_rhs2 (s),
5603 code2, op2a, op2b)))
5605 /* Handle the OR case, where we are reassociating:
5606 (inner1 OR inner2) OR (op2a code2 op2b)
5607 => (t OR inner2)
5608 If the partial result t is a constant, we win. Otherwise
5609 continue on to try reassociating with the other inner test. */
5610 if (is_or)
5612 if (integer_onep (t))
5613 return boolean_true_node;
5614 else if (integer_zerop (t))
5615 return inner2;
5618 /* Handle the AND case, where we are redistributing:
5619 (inner1 AND inner2) OR (op2a code2 op2b)
5620 => (t AND (inner2 OR (op2a code op2b))) */
5621 else if (integer_zerop (t))
5622 return boolean_false_node;
5624 /* Save partial result for later. */
5625 partial = t;
5628 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5629 if (TREE_CODE (inner2) == SSA_NAME
5630 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5631 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5632 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5633 gimple_assign_rhs1 (s),
5634 gimple_assign_rhs2 (s),
5635 code2, op2a, op2b)))
5637 /* Handle the OR case, where we are reassociating:
5638 (inner1 OR inner2) OR (op2a code2 op2b)
5639 => (inner1 OR t)
5640 => (t OR partial) */
5641 if (is_or)
5643 if (integer_zerop (t))
5644 return inner1;
5645 else if (integer_onep (t))
5646 return boolean_true_node;
5647 /* If both are the same, we can apply the identity
5648 (x OR x) == x. */
5649 else if (partial && same_bool_result_p (t, partial))
5650 return t;
5653 /* Handle the AND case, where we are redistributing:
5654 (inner1 AND inner2) OR (op2a code2 op2b)
5655 => (t AND (inner1 OR (op2a code2 op2b)))
5656 => (t AND partial) */
5657 else
5659 if (integer_zerop (t))
5660 return boolean_false_node;
5661 else if (partial)
5663 /* We already got a simplification for the other
5664 operand to the redistributed AND expression. The
5665 interesting case is when at least one is true.
5666 Or, if both are the same, we can apply the identity
5667 (x AND x) == x. */
5668 if (integer_onep (partial))
5669 return t;
5670 else if (integer_onep (t))
5671 return partial;
5672 else if (same_bool_result_p (t, partial))
5673 return t;
5678 return NULL_TREE;
5681 /* Try to simplify the OR of two comparisons defined by
5682 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5683 If this can be done without constructing an intermediate value,
5684 return the resulting tree; otherwise NULL_TREE is returned.
5685 This function is deliberately asymmetric as it recurses on SSA_DEFs
5686 in the first comparison but not the second. */
5688 static tree
5689 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5690 enum tree_code code2, tree op2a, tree op2b)
5692 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5694 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5695 if (operand_equal_p (op1a, op2a, 0)
5696 && operand_equal_p (op1b, op2b, 0))
5698 /* Result will be either NULL_TREE, or a combined comparison. */
5699 tree t = combine_comparisons (UNKNOWN_LOCATION,
5700 TRUTH_ORIF_EXPR, code1, code2,
5701 truth_type, op1a, op1b);
5702 if (t)
5703 return t;
5706 /* Likewise the swapped case of the above. */
5707 if (operand_equal_p (op1a, op2b, 0)
5708 && operand_equal_p (op1b, op2a, 0))
5710 /* Result will be either NULL_TREE, or a combined comparison. */
5711 tree t = combine_comparisons (UNKNOWN_LOCATION,
5712 TRUTH_ORIF_EXPR, code1,
5713 swap_tree_comparison (code2),
5714 truth_type, op1a, op1b);
5715 if (t)
5716 return t;
5719 /* If both comparisons are of the same value against constants, we might
5720 be able to merge them. */
5721 if (operand_equal_p (op1a, op2a, 0)
5722 && TREE_CODE (op1b) == INTEGER_CST
5723 && TREE_CODE (op2b) == INTEGER_CST)
5725 int cmp = tree_int_cst_compare (op1b, op2b);
5727 /* If we have (op1a != op1b), we should either be able to
5728 return that or TRUE, depending on whether the constant op1b
5729 also satisfies the other comparison against op2b. */
5730 if (code1 == NE_EXPR)
5732 bool done = true;
5733 bool val;
5734 switch (code2)
5736 case EQ_EXPR: val = (cmp == 0); break;
5737 case NE_EXPR: val = (cmp != 0); break;
5738 case LT_EXPR: val = (cmp < 0); break;
5739 case GT_EXPR: val = (cmp > 0); break;
5740 case LE_EXPR: val = (cmp <= 0); break;
5741 case GE_EXPR: val = (cmp >= 0); break;
5742 default: done = false;
5744 if (done)
5746 if (val)
5747 return boolean_true_node;
5748 else
5749 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5752 /* Likewise if the second comparison is a != comparison. */
5753 else if (code2 == NE_EXPR)
5755 bool done = true;
5756 bool val;
5757 switch (code1)
5759 case EQ_EXPR: val = (cmp == 0); break;
5760 case NE_EXPR: val = (cmp != 0); break;
5761 case LT_EXPR: val = (cmp > 0); break;
5762 case GT_EXPR: val = (cmp < 0); break;
5763 case LE_EXPR: val = (cmp >= 0); break;
5764 case GE_EXPR: val = (cmp <= 0); break;
5765 default: done = false;
5767 if (done)
5769 if (val)
5770 return boolean_true_node;
5771 else
5772 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5776 /* See if an equality test is redundant with the other comparison. */
5777 else if (code1 == EQ_EXPR)
5779 bool val;
5780 switch (code2)
5782 case EQ_EXPR: val = (cmp == 0); break;
5783 case NE_EXPR: val = (cmp != 0); break;
5784 case LT_EXPR: val = (cmp < 0); break;
5785 case GT_EXPR: val = (cmp > 0); break;
5786 case LE_EXPR: val = (cmp <= 0); break;
5787 case GE_EXPR: val = (cmp >= 0); break;
5788 default:
5789 val = false;
5791 if (val)
5792 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5794 else if (code2 == EQ_EXPR)
5796 bool val;
5797 switch (code1)
5799 case EQ_EXPR: val = (cmp == 0); break;
5800 case NE_EXPR: val = (cmp != 0); break;
5801 case LT_EXPR: val = (cmp > 0); break;
5802 case GT_EXPR: val = (cmp < 0); break;
5803 case LE_EXPR: val = (cmp >= 0); break;
5804 case GE_EXPR: val = (cmp <= 0); break;
5805 default:
5806 val = false;
5808 if (val)
5809 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5812 /* Chose the less restrictive of two < or <= comparisons. */
5813 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5814 && (code2 == LT_EXPR || code2 == LE_EXPR))
5816 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5817 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5818 else
5819 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5822 /* Likewise chose the less restrictive of two > or >= comparisons. */
5823 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5824 && (code2 == GT_EXPR || code2 == GE_EXPR))
5826 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5827 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5828 else
5829 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5832 /* Check for singleton ranges. */
5833 else if (cmp == 0
5834 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5835 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5836 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5838 /* Check for less/greater pairs that don't restrict the range at all. */
5839 else if (cmp >= 0
5840 && (code1 == LT_EXPR || code1 == LE_EXPR)
5841 && (code2 == GT_EXPR || code2 == GE_EXPR))
5842 return boolean_true_node;
5843 else if (cmp <= 0
5844 && (code1 == GT_EXPR || code1 == GE_EXPR)
5845 && (code2 == LT_EXPR || code2 == LE_EXPR))
5846 return boolean_true_node;
5849 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5850 NAME's definition is a truth value. See if there are any simplifications
5851 that can be done against the NAME's definition. */
5852 if (TREE_CODE (op1a) == SSA_NAME
5853 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5854 && (integer_zerop (op1b) || integer_onep (op1b)))
5856 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5857 || (code1 == NE_EXPR && integer_onep (op1b)));
5858 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5859 switch (gimple_code (stmt))
5861 case GIMPLE_ASSIGN:
5862 /* Try to simplify by copy-propagating the definition. */
5863 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5865 case GIMPLE_PHI:
5866 /* If every argument to the PHI produces the same result when
5867 ORed with the second comparison, we win.
5868 Do not do this unless the type is bool since we need a bool
5869 result here anyway. */
5870 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5872 tree result = NULL_TREE;
5873 unsigned i;
5874 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5876 tree arg = gimple_phi_arg_def (stmt, i);
5878 /* If this PHI has itself as an argument, ignore it.
5879 If all the other args produce the same result,
5880 we're still OK. */
5881 if (arg == gimple_phi_result (stmt))
5882 continue;
5883 else if (TREE_CODE (arg) == INTEGER_CST)
5885 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5887 if (!result)
5888 result = boolean_true_node;
5889 else if (!integer_onep (result))
5890 return NULL_TREE;
5892 else if (!result)
5893 result = fold_build2 (code2, boolean_type_node,
5894 op2a, op2b);
5895 else if (!same_bool_comparison_p (result,
5896 code2, op2a, op2b))
5897 return NULL_TREE;
5899 else if (TREE_CODE (arg) == SSA_NAME
5900 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5902 tree temp;
5903 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5904 /* In simple cases we can look through PHI nodes,
5905 but we have to be careful with loops.
5906 See PR49073. */
5907 if (! dom_info_available_p (CDI_DOMINATORS)
5908 || gimple_bb (def_stmt) == gimple_bb (stmt)
5909 || dominated_by_p (CDI_DOMINATORS,
5910 gimple_bb (def_stmt),
5911 gimple_bb (stmt)))
5912 return NULL_TREE;
5913 temp = or_var_with_comparison (arg, invert, code2,
5914 op2a, op2b);
5915 if (!temp)
5916 return NULL_TREE;
5917 else if (!result)
5918 result = temp;
5919 else if (!same_bool_result_p (result, temp))
5920 return NULL_TREE;
5922 else
5923 return NULL_TREE;
5925 return result;
5928 default:
5929 break;
5932 return NULL_TREE;
5935 /* Try to simplify the OR of two comparisons, specified by
5936 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5937 If this can be simplified to a single expression (without requiring
5938 introducing more SSA variables to hold intermediate values),
5939 return the resulting tree. Otherwise return NULL_TREE.
5940 If the result expression is non-null, it has boolean type. */
5942 tree
5943 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5944 enum tree_code code2, tree op2a, tree op2b)
5946 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5947 if (t)
5948 return t;
5949 else
5950 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5954 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5956 Either NULL_TREE, a simplified but non-constant or a constant
5957 is returned.
5959 ??? This should go into a gimple-fold-inline.h file to be eventually
5960 privatized with the single valueize function used in the various TUs
5961 to avoid the indirect function call overhead. */
5963 tree
5964 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
5965 tree (*gvalueize) (tree))
5967 code_helper rcode;
5968 tree ops[3] = {};
5969 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5970 edges if there are intermediate VARYING defs. For this reason
5971 do not follow SSA edges here even though SCCVN can technically
5972 just deal fine with that. */
5973 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
5975 tree res = NULL_TREE;
5976 if (gimple_simplified_result_is_gimple_val (rcode, ops))
5977 res = ops[0];
5978 else if (mprts_hook)
5979 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5980 if (res)
5982 if (dump_file && dump_flags & TDF_DETAILS)
5984 fprintf (dump_file, "Match-and-simplified ");
5985 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5986 fprintf (dump_file, " to ");
5987 print_generic_expr (dump_file, res);
5988 fprintf (dump_file, "\n");
5990 return res;
5994 location_t loc = gimple_location (stmt);
5995 switch (gimple_code (stmt))
5997 case GIMPLE_ASSIGN:
5999 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6001 switch (get_gimple_rhs_class (subcode))
6003 case GIMPLE_SINGLE_RHS:
6005 tree rhs = gimple_assign_rhs1 (stmt);
6006 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6008 if (TREE_CODE (rhs) == SSA_NAME)
6010 /* If the RHS is an SSA_NAME, return its known constant value,
6011 if any. */
6012 return (*valueize) (rhs);
6014 /* Handle propagating invariant addresses into address
6015 operations. */
6016 else if (TREE_CODE (rhs) == ADDR_EXPR
6017 && !is_gimple_min_invariant (rhs))
6019 HOST_WIDE_INT offset = 0;
6020 tree base;
6021 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6022 &offset,
6023 valueize);
6024 if (base
6025 && (CONSTANT_CLASS_P (base)
6026 || decl_address_invariant_p (base)))
6027 return build_invariant_address (TREE_TYPE (rhs),
6028 base, offset);
6030 else if (TREE_CODE (rhs) == CONSTRUCTOR
6031 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6032 && (CONSTRUCTOR_NELTS (rhs)
6033 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6035 unsigned i, nelts;
6036 tree val;
6038 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs));
6039 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6040 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6042 val = (*valueize) (val);
6043 if (TREE_CODE (val) == INTEGER_CST
6044 || TREE_CODE (val) == REAL_CST
6045 || TREE_CODE (val) == FIXED_CST)
6046 vec.quick_push (val);
6047 else
6048 return NULL_TREE;
6051 return vec.build ();
6053 if (subcode == OBJ_TYPE_REF)
6055 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6056 /* If callee is constant, we can fold away the wrapper. */
6057 if (is_gimple_min_invariant (val))
6058 return val;
6061 if (kind == tcc_reference)
6063 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6064 || TREE_CODE (rhs) == REALPART_EXPR
6065 || TREE_CODE (rhs) == IMAGPART_EXPR)
6066 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6068 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6069 return fold_unary_loc (EXPR_LOCATION (rhs),
6070 TREE_CODE (rhs),
6071 TREE_TYPE (rhs), val);
6073 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6074 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6076 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6077 return fold_ternary_loc (EXPR_LOCATION (rhs),
6078 TREE_CODE (rhs),
6079 TREE_TYPE (rhs), val,
6080 TREE_OPERAND (rhs, 1),
6081 TREE_OPERAND (rhs, 2));
6083 else if (TREE_CODE (rhs) == MEM_REF
6084 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6086 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6087 if (TREE_CODE (val) == ADDR_EXPR
6088 && is_gimple_min_invariant (val))
6090 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6091 unshare_expr (val),
6092 TREE_OPERAND (rhs, 1));
6093 if (tem)
6094 rhs = tem;
6097 return fold_const_aggregate_ref_1 (rhs, valueize);
6099 else if (kind == tcc_declaration)
6100 return get_symbol_constant_value (rhs);
6101 return rhs;
6104 case GIMPLE_UNARY_RHS:
6105 return NULL_TREE;
6107 case GIMPLE_BINARY_RHS:
6108 /* Translate &x + CST into an invariant form suitable for
6109 further propagation. */
6110 if (subcode == POINTER_PLUS_EXPR)
6112 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6113 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6114 if (TREE_CODE (op0) == ADDR_EXPR
6115 && TREE_CODE (op1) == INTEGER_CST)
6117 tree off = fold_convert (ptr_type_node, op1);
6118 return build_fold_addr_expr_loc
6119 (loc,
6120 fold_build2 (MEM_REF,
6121 TREE_TYPE (TREE_TYPE (op0)),
6122 unshare_expr (op0), off));
6125 /* Canonicalize bool != 0 and bool == 0 appearing after
6126 valueization. While gimple_simplify handles this
6127 it can get confused by the ~X == 1 -> X == 0 transform
6128 which we cant reduce to a SSA name or a constant
6129 (and we have no way to tell gimple_simplify to not
6130 consider those transforms in the first place). */
6131 else if (subcode == EQ_EXPR
6132 || subcode == NE_EXPR)
6134 tree lhs = gimple_assign_lhs (stmt);
6135 tree op0 = gimple_assign_rhs1 (stmt);
6136 if (useless_type_conversion_p (TREE_TYPE (lhs),
6137 TREE_TYPE (op0)))
6139 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6140 op0 = (*valueize) (op0);
6141 if (TREE_CODE (op0) == INTEGER_CST)
6142 std::swap (op0, op1);
6143 if (TREE_CODE (op1) == INTEGER_CST
6144 && ((subcode == NE_EXPR && integer_zerop (op1))
6145 || (subcode == EQ_EXPR && integer_onep (op1))))
6146 return op0;
6149 return NULL_TREE;
6151 case GIMPLE_TERNARY_RHS:
6153 /* Handle ternary operators that can appear in GIMPLE form. */
6154 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6155 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6156 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6157 return fold_ternary_loc (loc, subcode,
6158 gimple_expr_type (stmt), op0, op1, op2);
6161 default:
6162 gcc_unreachable ();
6166 case GIMPLE_CALL:
6168 tree fn;
6169 gcall *call_stmt = as_a <gcall *> (stmt);
6171 if (gimple_call_internal_p (stmt))
6173 enum tree_code subcode = ERROR_MARK;
6174 switch (gimple_call_internal_fn (stmt))
6176 case IFN_UBSAN_CHECK_ADD:
6177 subcode = PLUS_EXPR;
6178 break;
6179 case IFN_UBSAN_CHECK_SUB:
6180 subcode = MINUS_EXPR;
6181 break;
6182 case IFN_UBSAN_CHECK_MUL:
6183 subcode = MULT_EXPR;
6184 break;
6185 case IFN_BUILTIN_EXPECT:
6187 tree arg0 = gimple_call_arg (stmt, 0);
6188 tree op0 = (*valueize) (arg0);
6189 if (TREE_CODE (op0) == INTEGER_CST)
6190 return op0;
6191 return NULL_TREE;
6193 default:
6194 return NULL_TREE;
6196 tree arg0 = gimple_call_arg (stmt, 0);
6197 tree arg1 = gimple_call_arg (stmt, 1);
6198 tree op0 = (*valueize) (arg0);
6199 tree op1 = (*valueize) (arg1);
6201 if (TREE_CODE (op0) != INTEGER_CST
6202 || TREE_CODE (op1) != INTEGER_CST)
6204 switch (subcode)
6206 case MULT_EXPR:
6207 /* x * 0 = 0 * x = 0 without overflow. */
6208 if (integer_zerop (op0) || integer_zerop (op1))
6209 return build_zero_cst (TREE_TYPE (arg0));
6210 break;
6211 case MINUS_EXPR:
6212 /* y - y = 0 without overflow. */
6213 if (operand_equal_p (op0, op1, 0))
6214 return build_zero_cst (TREE_TYPE (arg0));
6215 break;
6216 default:
6217 break;
6220 tree res
6221 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6222 if (res
6223 && TREE_CODE (res) == INTEGER_CST
6224 && !TREE_OVERFLOW (res))
6225 return res;
6226 return NULL_TREE;
6229 fn = (*valueize) (gimple_call_fn (stmt));
6230 if (TREE_CODE (fn) == ADDR_EXPR
6231 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6232 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6233 && gimple_builtin_call_types_compatible_p (stmt,
6234 TREE_OPERAND (fn, 0)))
6236 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6237 tree retval;
6238 unsigned i;
6239 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6240 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6241 retval = fold_builtin_call_array (loc,
6242 gimple_call_return_type (call_stmt),
6243 fn, gimple_call_num_args (stmt), args);
6244 if (retval)
6246 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6247 STRIP_NOPS (retval);
6248 retval = fold_convert (gimple_call_return_type (call_stmt),
6249 retval);
6251 return retval;
6253 return NULL_TREE;
6256 default:
6257 return NULL_TREE;
6261 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6262 Returns NULL_TREE if folding to a constant is not possible, otherwise
6263 returns a constant according to is_gimple_min_invariant. */
6265 tree
6266 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6268 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6269 if (res && is_gimple_min_invariant (res))
6270 return res;
6271 return NULL_TREE;
6275 /* The following set of functions are supposed to fold references using
6276 their constant initializers. */
6278 /* See if we can find constructor defining value of BASE.
6279 When we know the consructor with constant offset (such as
6280 base is array[40] and we do know constructor of array), then
6281 BIT_OFFSET is adjusted accordingly.
6283 As a special case, return error_mark_node when constructor
6284 is not explicitly available, but it is known to be zero
6285 such as 'static const int a;'. */
6286 static tree
6287 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
6288 tree (*valueize)(tree))
6290 HOST_WIDE_INT bit_offset2, size, max_size;
6291 bool reverse;
6293 if (TREE_CODE (base) == MEM_REF)
6295 if (!integer_zerop (TREE_OPERAND (base, 1)))
6297 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
6298 return NULL_TREE;
6299 *bit_offset += (mem_ref_offset (base).to_short_addr ()
6300 * BITS_PER_UNIT);
6303 if (valueize
6304 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6305 base = valueize (TREE_OPERAND (base, 0));
6306 if (!base || TREE_CODE (base) != ADDR_EXPR)
6307 return NULL_TREE;
6308 base = TREE_OPERAND (base, 0);
6310 else if (valueize
6311 && TREE_CODE (base) == SSA_NAME)
6312 base = valueize (base);
6314 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6315 DECL_INITIAL. If BASE is a nested reference into another
6316 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6317 the inner reference. */
6318 switch (TREE_CODE (base))
6320 case VAR_DECL:
6321 case CONST_DECL:
6323 tree init = ctor_for_folding (base);
6325 /* Our semantic is exact opposite of ctor_for_folding;
6326 NULL means unknown, while error_mark_node is 0. */
6327 if (init == error_mark_node)
6328 return NULL_TREE;
6329 if (!init)
6330 return error_mark_node;
6331 return init;
6334 case VIEW_CONVERT_EXPR:
6335 return get_base_constructor (TREE_OPERAND (base, 0),
6336 bit_offset, valueize);
6338 case ARRAY_REF:
6339 case COMPONENT_REF:
6340 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6341 &reverse);
6342 if (max_size == -1 || size != max_size)
6343 return NULL_TREE;
6344 *bit_offset += bit_offset2;
6345 return get_base_constructor (base, bit_offset, valueize);
6347 case CONSTRUCTOR:
6348 return base;
6350 default:
6351 if (CONSTANT_CLASS_P (base))
6352 return base;
6354 return NULL_TREE;
6358 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6359 SIZE to the memory at bit OFFSET. */
6361 static tree
6362 fold_array_ctor_reference (tree type, tree ctor,
6363 unsigned HOST_WIDE_INT offset,
6364 unsigned HOST_WIDE_INT size,
6365 tree from_decl)
6367 offset_int low_bound;
6368 offset_int elt_size;
6369 offset_int access_index;
6370 tree domain_type = NULL_TREE;
6371 HOST_WIDE_INT inner_offset;
6373 /* Compute low bound and elt size. */
6374 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6375 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6376 if (domain_type && TYPE_MIN_VALUE (domain_type))
6378 /* Static constructors for variably sized objects makes no sense. */
6379 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6380 return NULL_TREE;
6381 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6383 else
6384 low_bound = 0;
6385 /* Static constructors for variably sized objects makes no sense. */
6386 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6387 return NULL_TREE;
6388 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6390 /* We can handle only constantly sized accesses that are known to not
6391 be larger than size of array element. */
6392 if (!TYPE_SIZE_UNIT (type)
6393 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6394 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6395 || elt_size == 0)
6396 return NULL_TREE;
6398 /* Compute the array index we look for. */
6399 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6400 elt_size);
6401 access_index += low_bound;
6403 /* And offset within the access. */
6404 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6406 /* See if the array field is large enough to span whole access. We do not
6407 care to fold accesses spanning multiple array indexes. */
6408 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6409 return NULL_TREE;
6410 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6411 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
6413 /* When memory is not explicitely mentioned in constructor,
6414 it is 0 (or out of range). */
6415 return build_zero_cst (type);
6418 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6419 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6421 static tree
6422 fold_nonarray_ctor_reference (tree type, tree ctor,
6423 unsigned HOST_WIDE_INT offset,
6424 unsigned HOST_WIDE_INT size,
6425 tree from_decl)
6427 unsigned HOST_WIDE_INT cnt;
6428 tree cfield, cval;
6430 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6431 cval)
6433 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6434 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6435 tree field_size = DECL_SIZE (cfield);
6436 offset_int bitoffset;
6437 offset_int bitoffset_end, access_end;
6439 /* Variable sized objects in static constructors makes no sense,
6440 but field_size can be NULL for flexible array members. */
6441 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6442 && TREE_CODE (byte_offset) == INTEGER_CST
6443 && (field_size != NULL_TREE
6444 ? TREE_CODE (field_size) == INTEGER_CST
6445 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6447 /* Compute bit offset of the field. */
6448 bitoffset = (wi::to_offset (field_offset)
6449 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6450 /* Compute bit offset where the field ends. */
6451 if (field_size != NULL_TREE)
6452 bitoffset_end = bitoffset + wi::to_offset (field_size);
6453 else
6454 bitoffset_end = 0;
6456 access_end = offset_int (offset) + size;
6458 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6459 [BITOFFSET, BITOFFSET_END)? */
6460 if (wi::cmps (access_end, bitoffset) > 0
6461 && (field_size == NULL_TREE
6462 || wi::lts_p (offset, bitoffset_end)))
6464 offset_int inner_offset = offset_int (offset) - bitoffset;
6465 /* We do have overlap. Now see if field is large enough to
6466 cover the access. Give up for accesses spanning multiple
6467 fields. */
6468 if (wi::cmps (access_end, bitoffset_end) > 0)
6469 return NULL_TREE;
6470 if (offset < bitoffset)
6471 return NULL_TREE;
6472 return fold_ctor_reference (type, cval,
6473 inner_offset.to_uhwi (), size,
6474 from_decl);
6477 /* When memory is not explicitely mentioned in constructor, it is 0. */
6478 return build_zero_cst (type);
6481 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
6482 to the memory at bit OFFSET. */
6484 tree
6485 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
6486 unsigned HOST_WIDE_INT size, tree from_decl)
6488 tree ret;
6490 /* We found the field with exact match. */
6491 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6492 && !offset)
6493 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6495 /* We are at the end of walk, see if we can view convert the
6496 result. */
6497 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6498 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6499 && !compare_tree_int (TYPE_SIZE (type), size)
6500 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6502 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6503 if (ret)
6505 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6506 if (ret)
6507 STRIP_USELESS_TYPE_CONVERSION (ret);
6509 return ret;
6511 /* For constants and byte-aligned/sized reads try to go through
6512 native_encode/interpret. */
6513 if (CONSTANT_CLASS_P (ctor)
6514 && BITS_PER_UNIT == 8
6515 && offset % BITS_PER_UNIT == 0
6516 && size % BITS_PER_UNIT == 0
6517 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6519 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6520 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6521 offset / BITS_PER_UNIT);
6522 if (len > 0)
6523 return native_interpret_expr (type, buf, len);
6525 if (TREE_CODE (ctor) == CONSTRUCTOR)
6528 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6529 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6530 return fold_array_ctor_reference (type, ctor, offset, size,
6531 from_decl);
6532 else
6533 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6534 from_decl);
6537 return NULL_TREE;
6540 /* Return the tree representing the element referenced by T if T is an
6541 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6542 names using VALUEIZE. Return NULL_TREE otherwise. */
6544 tree
6545 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6547 tree ctor, idx, base;
6548 HOST_WIDE_INT offset, size, max_size;
6549 tree tem;
6550 bool reverse;
6552 if (TREE_THIS_VOLATILE (t))
6553 return NULL_TREE;
6555 if (DECL_P (t))
6556 return get_symbol_constant_value (t);
6558 tem = fold_read_from_constant_string (t);
6559 if (tem)
6560 return tem;
6562 switch (TREE_CODE (t))
6564 case ARRAY_REF:
6565 case ARRAY_RANGE_REF:
6566 /* Constant indexes are handled well by get_base_constructor.
6567 Only special case variable offsets.
6568 FIXME: This code can't handle nested references with variable indexes
6569 (they will be handled only by iteration of ccp). Perhaps we can bring
6570 get_ref_base_and_extent here and make it use a valueize callback. */
6571 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6572 && valueize
6573 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6574 && TREE_CODE (idx) == INTEGER_CST)
6576 tree low_bound, unit_size;
6578 /* If the resulting bit-offset is constant, track it. */
6579 if ((low_bound = array_ref_low_bound (t),
6580 TREE_CODE (low_bound) == INTEGER_CST)
6581 && (unit_size = array_ref_element_size (t),
6582 tree_fits_uhwi_p (unit_size)))
6584 offset_int woffset
6585 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
6586 TYPE_PRECISION (TREE_TYPE (idx)));
6588 if (wi::fits_shwi_p (woffset))
6590 offset = woffset.to_shwi ();
6591 /* TODO: This code seems wrong, multiply then check
6592 to see if it fits. */
6593 offset *= tree_to_uhwi (unit_size);
6594 offset *= BITS_PER_UNIT;
6596 base = TREE_OPERAND (t, 0);
6597 ctor = get_base_constructor (base, &offset, valueize);
6598 /* Empty constructor. Always fold to 0. */
6599 if (ctor == error_mark_node)
6600 return build_zero_cst (TREE_TYPE (t));
6601 /* Out of bound array access. Value is undefined,
6602 but don't fold. */
6603 if (offset < 0)
6604 return NULL_TREE;
6605 /* We can not determine ctor. */
6606 if (!ctor)
6607 return NULL_TREE;
6608 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6609 tree_to_uhwi (unit_size)
6610 * BITS_PER_UNIT,
6611 base);
6615 /* Fallthru. */
6617 case COMPONENT_REF:
6618 case BIT_FIELD_REF:
6619 case TARGET_MEM_REF:
6620 case MEM_REF:
6621 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6622 ctor = get_base_constructor (base, &offset, valueize);
6624 /* Empty constructor. Always fold to 0. */
6625 if (ctor == error_mark_node)
6626 return build_zero_cst (TREE_TYPE (t));
6627 /* We do not know precise address. */
6628 if (max_size == -1 || max_size != size)
6629 return NULL_TREE;
6630 /* We can not determine ctor. */
6631 if (!ctor)
6632 return NULL_TREE;
6634 /* Out of bound array access. Value is undefined, but don't fold. */
6635 if (offset < 0)
6636 return NULL_TREE;
6638 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6639 base);
6641 case REALPART_EXPR:
6642 case IMAGPART_EXPR:
6644 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6645 if (c && TREE_CODE (c) == COMPLEX_CST)
6646 return fold_build1_loc (EXPR_LOCATION (t),
6647 TREE_CODE (t), TREE_TYPE (t), c);
6648 break;
6651 default:
6652 break;
6655 return NULL_TREE;
6658 tree
6659 fold_const_aggregate_ref (tree t)
6661 return fold_const_aggregate_ref_1 (t, NULL);
6664 /* Lookup virtual method with index TOKEN in a virtual table V
6665 at OFFSET.
6666 Set CAN_REFER if non-NULL to false if method
6667 is not referable or if the virtual table is ill-formed (such as rewriten
6668 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6670 tree
6671 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6672 tree v,
6673 unsigned HOST_WIDE_INT offset,
6674 bool *can_refer)
6676 tree vtable = v, init, fn;
6677 unsigned HOST_WIDE_INT size;
6678 unsigned HOST_WIDE_INT elt_size, access_index;
6679 tree domain_type;
6681 if (can_refer)
6682 *can_refer = true;
6684 /* First of all double check we have virtual table. */
6685 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6687 /* Pass down that we lost track of the target. */
6688 if (can_refer)
6689 *can_refer = false;
6690 return NULL_TREE;
6693 init = ctor_for_folding (v);
6695 /* The virtual tables should always be born with constructors
6696 and we always should assume that they are avaialble for
6697 folding. At the moment we do not stream them in all cases,
6698 but it should never happen that ctor seem unreachable. */
6699 gcc_assert (init);
6700 if (init == error_mark_node)
6702 /* Pass down that we lost track of the target. */
6703 if (can_refer)
6704 *can_refer = false;
6705 return NULL_TREE;
6707 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6708 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6709 offset *= BITS_PER_UNIT;
6710 offset += token * size;
6712 /* Lookup the value in the constructor that is assumed to be array.
6713 This is equivalent to
6714 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6715 offset, size, NULL);
6716 but in a constant time. We expect that frontend produced a simple
6717 array without indexed initializers. */
6719 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6720 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6721 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6722 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6724 access_index = offset / BITS_PER_UNIT / elt_size;
6725 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6727 /* This code makes an assumption that there are no
6728 indexed fileds produced by C++ FE, so we can directly index the array. */
6729 if (access_index < CONSTRUCTOR_NELTS (init))
6731 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6732 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6733 STRIP_NOPS (fn);
6735 else
6736 fn = NULL;
6738 /* For type inconsistent program we may end up looking up virtual method
6739 in virtual table that does not contain TOKEN entries. We may overrun
6740 the virtual table and pick up a constant or RTTI info pointer.
6741 In any case the call is undefined. */
6742 if (!fn
6743 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6744 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6745 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6746 else
6748 fn = TREE_OPERAND (fn, 0);
6750 /* When cgraph node is missing and function is not public, we cannot
6751 devirtualize. This can happen in WHOPR when the actual method
6752 ends up in other partition, because we found devirtualization
6753 possibility too late. */
6754 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6756 if (can_refer)
6758 *can_refer = false;
6759 return fn;
6761 return NULL_TREE;
6765 /* Make sure we create a cgraph node for functions we'll reference.
6766 They can be non-existent if the reference comes from an entry
6767 of an external vtable for example. */
6768 cgraph_node::get_create (fn);
6770 return fn;
6773 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6774 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6775 KNOWN_BINFO carries the binfo describing the true type of
6776 OBJ_TYPE_REF_OBJECT(REF).
6777 Set CAN_REFER if non-NULL to false if method
6778 is not referable or if the virtual table is ill-formed (such as rewriten
6779 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6781 tree
6782 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6783 bool *can_refer)
6785 unsigned HOST_WIDE_INT offset;
6786 tree v;
6788 v = BINFO_VTABLE (known_binfo);
6789 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6790 if (!v)
6791 return NULL_TREE;
6793 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6795 if (can_refer)
6796 *can_refer = false;
6797 return NULL_TREE;
6799 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6802 /* Given a pointer value T, return a simplified version of an
6803 indirection through T, or NULL_TREE if no simplification is
6804 possible. Note that the resulting type may be different from
6805 the type pointed to in the sense that it is still compatible
6806 from the langhooks point of view. */
6808 tree
6809 gimple_fold_indirect_ref (tree t)
6811 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6812 tree sub = t;
6813 tree subtype;
6815 STRIP_NOPS (sub);
6816 subtype = TREE_TYPE (sub);
6817 if (!POINTER_TYPE_P (subtype)
6818 || TYPE_REF_CAN_ALIAS_ALL (ptype))
6819 return NULL_TREE;
6821 if (TREE_CODE (sub) == ADDR_EXPR)
6823 tree op = TREE_OPERAND (sub, 0);
6824 tree optype = TREE_TYPE (op);
6825 /* *&p => p */
6826 if (useless_type_conversion_p (type, optype))
6827 return op;
6829 /* *(foo *)&fooarray => fooarray[0] */
6830 if (TREE_CODE (optype) == ARRAY_TYPE
6831 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6832 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6834 tree type_domain = TYPE_DOMAIN (optype);
6835 tree min_val = size_zero_node;
6836 if (type_domain && TYPE_MIN_VALUE (type_domain))
6837 min_val = TYPE_MIN_VALUE (type_domain);
6838 if (TREE_CODE (min_val) == INTEGER_CST)
6839 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6841 /* *(foo *)&complexfoo => __real__ complexfoo */
6842 else if (TREE_CODE (optype) == COMPLEX_TYPE
6843 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6844 return fold_build1 (REALPART_EXPR, type, op);
6845 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6846 else if (TREE_CODE (optype) == VECTOR_TYPE
6847 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6849 tree part_width = TYPE_SIZE (type);
6850 tree index = bitsize_int (0);
6851 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6855 /* *(p + CST) -> ... */
6856 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6857 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6859 tree addr = TREE_OPERAND (sub, 0);
6860 tree off = TREE_OPERAND (sub, 1);
6861 tree addrtype;
6863 STRIP_NOPS (addr);
6864 addrtype = TREE_TYPE (addr);
6866 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6867 if (TREE_CODE (addr) == ADDR_EXPR
6868 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6869 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
6870 && tree_fits_uhwi_p (off))
6872 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
6873 tree part_width = TYPE_SIZE (type);
6874 unsigned HOST_WIDE_INT part_widthi
6875 = tree_to_shwi (part_width) / BITS_PER_UNIT;
6876 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6877 tree index = bitsize_int (indexi);
6878 if (offset / part_widthi
6879 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
6880 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6881 part_width, index);
6884 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6885 if (TREE_CODE (addr) == ADDR_EXPR
6886 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6887 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6889 tree size = TYPE_SIZE_UNIT (type);
6890 if (tree_int_cst_equal (size, off))
6891 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6894 /* *(p + CST) -> MEM_REF <p, CST>. */
6895 if (TREE_CODE (addr) != ADDR_EXPR
6896 || DECL_P (TREE_OPERAND (addr, 0)))
6897 return fold_build2 (MEM_REF, type,
6898 addr,
6899 wide_int_to_tree (ptype, wi::to_wide (off)));
6902 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6903 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6904 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6905 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6907 tree type_domain;
6908 tree min_val = size_zero_node;
6909 tree osub = sub;
6910 sub = gimple_fold_indirect_ref (sub);
6911 if (! sub)
6912 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6913 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6914 if (type_domain && TYPE_MIN_VALUE (type_domain))
6915 min_val = TYPE_MIN_VALUE (type_domain);
6916 if (TREE_CODE (min_val) == INTEGER_CST)
6917 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6920 return NULL_TREE;
6923 /* Return true if CODE is an operation that when operating on signed
6924 integer types involves undefined behavior on overflow and the
6925 operation can be expressed with unsigned arithmetic. */
6927 bool
6928 arith_code_with_undefined_signed_overflow (tree_code code)
6930 switch (code)
6932 case PLUS_EXPR:
6933 case MINUS_EXPR:
6934 case MULT_EXPR:
6935 case NEGATE_EXPR:
6936 case POINTER_PLUS_EXPR:
6937 return true;
6938 default:
6939 return false;
6943 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6944 operation that can be transformed to unsigned arithmetic by converting
6945 its operand, carrying out the operation in the corresponding unsigned
6946 type and converting the result back to the original type.
6948 Returns a sequence of statements that replace STMT and also contain
6949 a modified form of STMT itself. */
6951 gimple_seq
6952 rewrite_to_defined_overflow (gimple *stmt)
6954 if (dump_file && (dump_flags & TDF_DETAILS))
6956 fprintf (dump_file, "rewriting stmt with undefined signed "
6957 "overflow ");
6958 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6961 tree lhs = gimple_assign_lhs (stmt);
6962 tree type = unsigned_type_for (TREE_TYPE (lhs));
6963 gimple_seq stmts = NULL;
6964 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6966 tree op = gimple_op (stmt, i);
6967 op = gimple_convert (&stmts, type, op);
6968 gimple_set_op (stmt, i, op);
6970 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6971 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6972 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6973 gimple_seq_add_stmt (&stmts, stmt);
6974 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6975 gimple_seq_add_stmt (&stmts, cvt);
6977 return stmts;
6981 /* The valueization hook we use for the gimple_build API simplification.
6982 This makes us match fold_buildN behavior by only combining with
6983 statements in the sequence(s) we are currently building. */
6985 static tree
6986 gimple_build_valueize (tree op)
6988 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6989 return op;
6990 return NULL_TREE;
6993 /* Build the expression CODE OP0 of type TYPE with location LOC,
6994 simplifying it first if possible. Returns the built
6995 expression value and appends statements possibly defining it
6996 to SEQ. */
6998 tree
6999 gimple_build (gimple_seq *seq, location_t loc,
7000 enum tree_code code, tree type, tree op0)
7002 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7003 if (!res)
7005 res = create_tmp_reg_or_ssa_name (type);
7006 gimple *stmt;
7007 if (code == REALPART_EXPR
7008 || code == IMAGPART_EXPR
7009 || code == VIEW_CONVERT_EXPR)
7010 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7011 else
7012 stmt = gimple_build_assign (res, code, op0);
7013 gimple_set_location (stmt, loc);
7014 gimple_seq_add_stmt_without_update (seq, stmt);
7016 return res;
7019 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7020 simplifying it first if possible. Returns the built
7021 expression value and appends statements possibly defining it
7022 to SEQ. */
7024 tree
7025 gimple_build (gimple_seq *seq, location_t loc,
7026 enum tree_code code, tree type, tree op0, tree op1)
7028 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7029 if (!res)
7031 res = create_tmp_reg_or_ssa_name (type);
7032 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7033 gimple_set_location (stmt, loc);
7034 gimple_seq_add_stmt_without_update (seq, stmt);
7036 return res;
7039 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7040 simplifying it first if possible. Returns the built
7041 expression value and appends statements possibly defining it
7042 to SEQ. */
7044 tree
7045 gimple_build (gimple_seq *seq, location_t loc,
7046 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7048 tree res = gimple_simplify (code, type, op0, op1, op2,
7049 seq, gimple_build_valueize);
7050 if (!res)
7052 res = create_tmp_reg_or_ssa_name (type);
7053 gimple *stmt;
7054 if (code == BIT_FIELD_REF)
7055 stmt = gimple_build_assign (res, code,
7056 build3 (code, type, op0, op1, op2));
7057 else
7058 stmt = gimple_build_assign (res, code, op0, op1, op2);
7059 gimple_set_location (stmt, loc);
7060 gimple_seq_add_stmt_without_update (seq, stmt);
7062 return res;
7065 /* Build the call FN (ARG0) with a result of type TYPE
7066 (or no result if TYPE is void) with location LOC,
7067 simplifying it first if possible. Returns the built
7068 expression value (or NULL_TREE if TYPE is void) and appends
7069 statements possibly defining it to SEQ. */
7071 tree
7072 gimple_build (gimple_seq *seq, location_t loc,
7073 enum built_in_function fn, tree type, tree arg0)
7075 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7076 if (!res)
7078 tree decl = builtin_decl_implicit (fn);
7079 gimple *stmt = gimple_build_call (decl, 1, arg0);
7080 if (!VOID_TYPE_P (type))
7082 res = create_tmp_reg_or_ssa_name (type);
7083 gimple_call_set_lhs (stmt, res);
7085 gimple_set_location (stmt, loc);
7086 gimple_seq_add_stmt_without_update (seq, stmt);
7088 return res;
7091 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7092 (or no result if TYPE is void) with location LOC,
7093 simplifying it first if possible. Returns the built
7094 expression value (or NULL_TREE if TYPE is void) and appends
7095 statements possibly defining it to SEQ. */
7097 tree
7098 gimple_build (gimple_seq *seq, location_t loc,
7099 enum built_in_function fn, tree type, tree arg0, tree arg1)
7101 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7102 if (!res)
7104 tree decl = builtin_decl_implicit (fn);
7105 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
7106 if (!VOID_TYPE_P (type))
7108 res = create_tmp_reg_or_ssa_name (type);
7109 gimple_call_set_lhs (stmt, res);
7111 gimple_set_location (stmt, loc);
7112 gimple_seq_add_stmt_without_update (seq, stmt);
7114 return res;
7117 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7118 (or no result if TYPE is void) with location LOC,
7119 simplifying it first if possible. Returns the built
7120 expression value (or NULL_TREE if TYPE is void) and appends
7121 statements possibly defining it to SEQ. */
7123 tree
7124 gimple_build (gimple_seq *seq, location_t loc,
7125 enum built_in_function fn, tree type,
7126 tree arg0, tree arg1, tree arg2)
7128 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7129 seq, gimple_build_valueize);
7130 if (!res)
7132 tree decl = builtin_decl_implicit (fn);
7133 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7134 if (!VOID_TYPE_P (type))
7136 res = create_tmp_reg_or_ssa_name (type);
7137 gimple_call_set_lhs (stmt, res);
7139 gimple_set_location (stmt, loc);
7140 gimple_seq_add_stmt_without_update (seq, stmt);
7142 return res;
7145 /* Build the conversion (TYPE) OP with a result of type TYPE
7146 with location LOC if such conversion is neccesary in GIMPLE,
7147 simplifying it first.
7148 Returns the built expression value and appends
7149 statements possibly defining it to SEQ. */
7151 tree
7152 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7154 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7155 return op;
7156 return gimple_build (seq, loc, NOP_EXPR, type, op);
7159 /* Build the conversion (ptrofftype) OP with a result of a type
7160 compatible with ptrofftype with location LOC if such conversion
7161 is neccesary in GIMPLE, simplifying it first.
7162 Returns the built expression value and appends
7163 statements possibly defining it to SEQ. */
7165 tree
7166 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7168 if (ptrofftype_p (TREE_TYPE (op)))
7169 return op;
7170 return gimple_convert (seq, loc, sizetype, op);
7173 /* Build a vector of type TYPE in which each element has the value OP.
7174 Return a gimple value for the result, appending any new statements
7175 to SEQ. */
7177 tree
7178 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7179 tree op)
7181 tree res, vec = build_vector_from_val (type, op);
7182 if (is_gimple_val (vec))
7183 return vec;
7184 if (gimple_in_ssa_p (cfun))
7185 res = make_ssa_name (type);
7186 else
7187 res = create_tmp_reg (type);
7188 gimple *stmt = gimple_build_assign (res, vec);
7189 gimple_set_location (stmt, loc);
7190 gimple_seq_add_stmt_without_update (seq, stmt);
7191 return res;
7194 /* Build a vector from BUILDER, handling the case in which some elements
7195 are non-constant. Return a gimple value for the result, appending any
7196 new instructions to SEQ.
7198 BUILDER must not have a stepped encoding on entry. This is because
7199 the function is not geared up to handle the arithmetic that would
7200 be needed in the variable case, and any code building a vector that
7201 is known to be constant should use BUILDER->build () directly. */
7203 tree
7204 gimple_build_vector (gimple_seq *seq, location_t loc,
7205 tree_vector_builder *builder)
7207 gcc_assert (builder->nelts_per_pattern () <= 2);
7208 unsigned int encoded_nelts = builder->encoded_nelts ();
7209 for (unsigned int i = 0; i < encoded_nelts; ++i)
7210 if (!TREE_CONSTANT ((*builder)[i]))
7212 tree type = builder->type ();
7213 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
7214 vec<constructor_elt, va_gc> *v;
7215 vec_alloc (v, nelts);
7216 for (i = 0; i < nelts; ++i)
7217 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7219 tree res;
7220 if (gimple_in_ssa_p (cfun))
7221 res = make_ssa_name (type);
7222 else
7223 res = create_tmp_reg (type);
7224 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7225 gimple_set_location (stmt, loc);
7226 gimple_seq_add_stmt_without_update (seq, stmt);
7227 return res;
7229 return builder->build ();
7232 /* Return true if the result of assignment STMT is known to be non-negative.
7233 If the return value is based on the assumption that signed overflow is
7234 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7235 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7237 static bool
7238 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7239 int depth)
7241 enum tree_code code = gimple_assign_rhs_code (stmt);
7242 switch (get_gimple_rhs_class (code))
7244 case GIMPLE_UNARY_RHS:
7245 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7246 gimple_expr_type (stmt),
7247 gimple_assign_rhs1 (stmt),
7248 strict_overflow_p, depth);
7249 case GIMPLE_BINARY_RHS:
7250 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7251 gimple_expr_type (stmt),
7252 gimple_assign_rhs1 (stmt),
7253 gimple_assign_rhs2 (stmt),
7254 strict_overflow_p, depth);
7255 case GIMPLE_TERNARY_RHS:
7256 return false;
7257 case GIMPLE_SINGLE_RHS:
7258 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7259 strict_overflow_p, depth);
7260 case GIMPLE_INVALID_RHS:
7261 break;
7263 gcc_unreachable ();
7266 /* Return true if return value of call STMT is known to be non-negative.
7267 If the return value is based on the assumption that signed overflow is
7268 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7269 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7271 static bool
7272 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7273 int depth)
7275 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7276 gimple_call_arg (stmt, 0) : NULL_TREE;
7277 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7278 gimple_call_arg (stmt, 1) : NULL_TREE;
7280 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7281 gimple_call_combined_fn (stmt),
7282 arg0,
7283 arg1,
7284 strict_overflow_p, depth);
7287 /* Return true if return value of call STMT is known to be non-negative.
7288 If the return value is based on the assumption that signed overflow is
7289 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7290 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7292 static bool
7293 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7294 int depth)
7296 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7298 tree arg = gimple_phi_arg_def (stmt, i);
7299 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7300 return false;
7302 return true;
7305 /* Return true if STMT is known to compute a non-negative value.
7306 If the return value is based on the assumption that signed overflow is
7307 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7308 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7310 bool
7311 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7312 int depth)
7314 switch (gimple_code (stmt))
7316 case GIMPLE_ASSIGN:
7317 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7318 depth);
7319 case GIMPLE_CALL:
7320 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7321 depth);
7322 case GIMPLE_PHI:
7323 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7324 depth);
7325 default:
7326 return false;
7330 /* Return true if the floating-point value computed by assignment STMT
7331 is known to have an integer value. We also allow +Inf, -Inf and NaN
7332 to be considered integer values. Return false for signaling NaN.
7334 DEPTH is the current nesting depth of the query. */
7336 static bool
7337 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7339 enum tree_code code = gimple_assign_rhs_code (stmt);
7340 switch (get_gimple_rhs_class (code))
7342 case GIMPLE_UNARY_RHS:
7343 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7344 gimple_assign_rhs1 (stmt), depth);
7345 case GIMPLE_BINARY_RHS:
7346 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7347 gimple_assign_rhs1 (stmt),
7348 gimple_assign_rhs2 (stmt), depth);
7349 case GIMPLE_TERNARY_RHS:
7350 return false;
7351 case GIMPLE_SINGLE_RHS:
7352 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7353 case GIMPLE_INVALID_RHS:
7354 break;
7356 gcc_unreachable ();
7359 /* Return true if the floating-point value computed by call STMT is known
7360 to have an integer value. We also allow +Inf, -Inf and NaN to be
7361 considered integer values. Return false for signaling NaN.
7363 DEPTH is the current nesting depth of the query. */
7365 static bool
7366 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7368 tree arg0 = (gimple_call_num_args (stmt) > 0
7369 ? gimple_call_arg (stmt, 0)
7370 : NULL_TREE);
7371 tree arg1 = (gimple_call_num_args (stmt) > 1
7372 ? gimple_call_arg (stmt, 1)
7373 : NULL_TREE);
7374 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7375 arg0, arg1, depth);
7378 /* Return true if the floating-point result of phi STMT is known to have
7379 an integer value. We also allow +Inf, -Inf and NaN to be considered
7380 integer values. Return false for signaling NaN.
7382 DEPTH is the current nesting depth of the query. */
7384 static bool
7385 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7387 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7389 tree arg = gimple_phi_arg_def (stmt, i);
7390 if (!integer_valued_real_single_p (arg, depth + 1))
7391 return false;
7393 return true;
7396 /* Return true if the floating-point value computed by STMT is known
7397 to have an integer value. We also allow +Inf, -Inf and NaN to be
7398 considered integer values. Return false for signaling NaN.
7400 DEPTH is the current nesting depth of the query. */
7402 bool
7403 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7405 switch (gimple_code (stmt))
7407 case GIMPLE_ASSIGN:
7408 return gimple_assign_integer_valued_real_p (stmt, depth);
7409 case GIMPLE_CALL:
7410 return gimple_call_integer_valued_real_p (stmt, depth);
7411 case GIMPLE_PHI:
7412 return gimple_phi_integer_valued_real_p (stmt, depth);
7413 default:
7414 return false;