* tree-vect-loop-manip.c (vect_do_peeling): Do not use
[official-gcc.git] / gcc / gimple-fold.c
blobadb6f3baf933445d1a559e4b39014f355eb66402
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2017 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stmt.h"
35 #include "expr.h"
36 #include "stor-layout.h"
37 #include "dumpfile.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-object-size.h"
44 #include "tree-ssa.h"
45 #include "tree-ssa-propagate.h"
46 #include "ipa-utils.h"
47 #include "tree-ssa-address.h"
48 #include "langhooks.h"
49 #include "gimplify-me.h"
50 #include "dbgcnt.h"
51 #include "builtins.h"
52 #include "tree-eh.h"
53 #include "gimple-match.h"
54 #include "gomp-constants.h"
55 #include "optabs-query.h"
56 #include "omp-general.h"
57 #include "ipa-chkp.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
66 /* Return true when DECL can be referenced from current unit.
67 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
68 We can get declarations that are not possible to reference for various
69 reasons:
71 1) When analyzing C++ virtual tables.
72 C++ virtual tables do have known constructors even
73 when they are keyed to other compilation unit.
74 Those tables can contain pointers to methods and vars
75 in other units. Those methods have both STATIC and EXTERNAL
76 set.
77 2) In WHOPR mode devirtualization might lead to reference
78 to method that was partitioned elsehwere.
79 In this case we have static VAR_DECL or FUNCTION_DECL
80 that has no corresponding callgraph/varpool node
81 declaring the body.
82 3) COMDAT functions referred by external vtables that
83 we devirtualize only during final compilation stage.
84 At this time we already decided that we will not output
85 the function body and thus we can't reference the symbol
86 directly. */
88 static bool
89 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
91 varpool_node *vnode;
92 struct cgraph_node *node;
93 symtab_node *snode;
95 if (DECL_ABSTRACT_P (decl))
96 return false;
98 /* We are concerned only about static/external vars and functions. */
99 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
100 || !VAR_OR_FUNCTION_DECL_P (decl))
101 return true;
103 /* Static objects can be referred only if they was not optimized out yet. */
104 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
106 /* Before we start optimizing unreachable code we can be sure all
107 static objects are defined. */
108 if (symtab->function_flags_ready)
109 return true;
110 snode = symtab_node::get (decl);
111 if (!snode || !snode->definition)
112 return false;
113 node = dyn_cast <cgraph_node *> (snode);
114 return !node || !node->global.inlined_to;
117 /* We will later output the initializer, so we can refer to it.
118 So we are concerned only when DECL comes from initializer of
119 external var or var that has been optimized out. */
120 if (!from_decl
121 || !VAR_P (from_decl)
122 || (!DECL_EXTERNAL (from_decl)
123 && (vnode = varpool_node::get (from_decl)) != NULL
124 && vnode->definition)
125 || (flag_ltrans
126 && (vnode = varpool_node::get (from_decl)) != NULL
127 && vnode->in_other_partition))
128 return true;
129 /* We are folding reference from external vtable. The vtable may reffer
130 to a symbol keyed to other compilation unit. The other compilation
131 unit may be in separate DSO and the symbol may be hidden. */
132 if (DECL_VISIBILITY_SPECIFIED (decl)
133 && DECL_EXTERNAL (decl)
134 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
135 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
136 return false;
137 /* When function is public, we always can introduce new reference.
138 Exception are the COMDAT functions where introducing a direct
139 reference imply need to include function body in the curren tunit. */
140 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
141 return true;
142 /* We have COMDAT. We are going to check if we still have definition
143 or if the definition is going to be output in other partition.
144 Bypass this when gimplifying; all needed functions will be produced.
146 As observed in PR20991 for already optimized out comdat virtual functions
147 it may be tempting to not necessarily give up because the copy will be
148 output elsewhere when corresponding vtable is output.
149 This is however not possible - ABI specify that COMDATs are output in
150 units where they are used and when the other unit was compiled with LTO
151 it is possible that vtable was kept public while the function itself
152 was privatized. */
153 if (!symtab->function_flags_ready)
154 return true;
156 snode = symtab_node::get (decl);
157 if (!snode
158 || ((!snode->definition || DECL_EXTERNAL (decl))
159 && (!snode->in_other_partition
160 || (!snode->forced_by_abi && !snode->force_output))))
161 return false;
162 node = dyn_cast <cgraph_node *> (snode);
163 return !node || !node->global.inlined_to;
166 /* Create a temporary for TYPE for a statement STMT. If the current function
167 is in SSA form, a SSA name is created. Otherwise a temporary register
168 is made. */
170 tree
171 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
173 if (gimple_in_ssa_p (cfun))
174 return make_ssa_name (type, stmt);
175 else
176 return create_tmp_reg (type);
179 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
180 acceptable form for is_gimple_min_invariant.
181 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
183 tree
184 canonicalize_constructor_val (tree cval, tree from_decl)
186 tree orig_cval = cval;
187 STRIP_NOPS (cval);
188 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
189 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
191 tree ptr = TREE_OPERAND (cval, 0);
192 if (is_gimple_min_invariant (ptr))
193 cval = build1_loc (EXPR_LOCATION (cval),
194 ADDR_EXPR, TREE_TYPE (ptr),
195 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
196 ptr,
197 fold_convert (ptr_type_node,
198 TREE_OPERAND (cval, 1))));
200 if (TREE_CODE (cval) == ADDR_EXPR)
202 tree base = NULL_TREE;
203 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
205 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
206 if (base)
207 TREE_OPERAND (cval, 0) = base;
209 else
210 base = get_base_address (TREE_OPERAND (cval, 0));
211 if (!base)
212 return NULL_TREE;
214 if (VAR_OR_FUNCTION_DECL_P (base)
215 && !can_refer_decl_in_current_unit_p (base, from_decl))
216 return NULL_TREE;
217 if (TREE_TYPE (base) == error_mark_node)
218 return NULL_TREE;
219 if (VAR_P (base))
220 TREE_ADDRESSABLE (base) = 1;
221 else if (TREE_CODE (base) == FUNCTION_DECL)
223 /* Make sure we create a cgraph node for functions we'll reference.
224 They can be non-existent if the reference comes from an entry
225 of an external vtable for example. */
226 cgraph_node::get_create (base);
228 /* Fixup types in global initializers. */
229 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
230 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
232 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
233 cval = fold_convert (TREE_TYPE (orig_cval), cval);
234 return cval;
236 if (TREE_OVERFLOW_P (cval))
237 return drop_tree_overflow (cval);
238 return orig_cval;
241 /* If SYM is a constant variable with known value, return the value.
242 NULL_TREE is returned otherwise. */
244 tree
245 get_symbol_constant_value (tree sym)
247 tree val = ctor_for_folding (sym);
248 if (val != error_mark_node)
250 if (val)
252 val = canonicalize_constructor_val (unshare_expr (val), sym);
253 if (val && is_gimple_min_invariant (val))
254 return val;
255 else
256 return NULL_TREE;
258 /* Variables declared 'const' without an initializer
259 have zero as the initializer if they may not be
260 overridden at link or run time. */
261 if (!val
262 && is_gimple_reg_type (TREE_TYPE (sym)))
263 return build_zero_cst (TREE_TYPE (sym));
266 return NULL_TREE;
271 /* Subroutine of fold_stmt. We perform several simplifications of the
272 memory reference tree EXPR and make sure to re-gimplify them properly
273 after propagation of constant addresses. IS_LHS is true if the
274 reference is supposed to be an lvalue. */
276 static tree
277 maybe_fold_reference (tree expr, bool is_lhs)
279 tree result;
281 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
282 || TREE_CODE (expr) == REALPART_EXPR
283 || TREE_CODE (expr) == IMAGPART_EXPR)
284 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
285 return fold_unary_loc (EXPR_LOCATION (expr),
286 TREE_CODE (expr),
287 TREE_TYPE (expr),
288 TREE_OPERAND (expr, 0));
289 else if (TREE_CODE (expr) == BIT_FIELD_REF
290 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
291 return fold_ternary_loc (EXPR_LOCATION (expr),
292 TREE_CODE (expr),
293 TREE_TYPE (expr),
294 TREE_OPERAND (expr, 0),
295 TREE_OPERAND (expr, 1),
296 TREE_OPERAND (expr, 2));
298 if (!is_lhs
299 && (result = fold_const_aggregate_ref (expr))
300 && is_gimple_min_invariant (result))
301 return result;
303 return NULL_TREE;
307 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
308 replacement rhs for the statement or NULL_TREE if no simplification
309 could be made. It is assumed that the operands have been previously
310 folded. */
312 static tree
313 fold_gimple_assign (gimple_stmt_iterator *si)
315 gimple *stmt = gsi_stmt (*si);
316 enum tree_code subcode = gimple_assign_rhs_code (stmt);
317 location_t loc = gimple_location (stmt);
319 tree result = NULL_TREE;
321 switch (get_gimple_rhs_class (subcode))
323 case GIMPLE_SINGLE_RHS:
325 tree rhs = gimple_assign_rhs1 (stmt);
327 if (TREE_CLOBBER_P (rhs))
328 return NULL_TREE;
330 if (REFERENCE_CLASS_P (rhs))
331 return maybe_fold_reference (rhs, false);
333 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
335 tree val = OBJ_TYPE_REF_EXPR (rhs);
336 if (is_gimple_min_invariant (val))
337 return val;
338 else if (flag_devirtualize && virtual_method_call_p (rhs))
340 bool final;
341 vec <cgraph_node *>targets
342 = possible_polymorphic_call_targets (rhs, stmt, &final);
343 if (final && targets.length () <= 1 && dbg_cnt (devirt))
345 if (dump_enabled_p ())
347 location_t loc = gimple_location_safe (stmt);
348 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
349 "resolving virtual function address "
350 "reference to function %s\n",
351 targets.length () == 1
352 ? targets[0]->name ()
353 : "NULL");
355 if (targets.length () == 1)
357 val = fold_convert (TREE_TYPE (val),
358 build_fold_addr_expr_loc
359 (loc, targets[0]->decl));
360 STRIP_USELESS_TYPE_CONVERSION (val);
362 else
363 /* We can not use __builtin_unreachable here because it
364 can not have address taken. */
365 val = build_int_cst (TREE_TYPE (val), 0);
366 return val;
371 else if (TREE_CODE (rhs) == ADDR_EXPR)
373 tree ref = TREE_OPERAND (rhs, 0);
374 tree tem = maybe_fold_reference (ref, true);
375 if (tem
376 && TREE_CODE (tem) == MEM_REF
377 && integer_zerop (TREE_OPERAND (tem, 1)))
378 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
379 else if (tem)
380 result = fold_convert (TREE_TYPE (rhs),
381 build_fold_addr_expr_loc (loc, tem));
382 else if (TREE_CODE (ref) == MEM_REF
383 && integer_zerop (TREE_OPERAND (ref, 1)))
384 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
386 if (result)
388 /* Strip away useless type conversions. Both the
389 NON_LVALUE_EXPR that may have been added by fold, and
390 "useless" type conversions that might now be apparent
391 due to propagation. */
392 STRIP_USELESS_TYPE_CONVERSION (result);
394 if (result != rhs && valid_gimple_rhs_p (result))
395 return result;
399 else if (TREE_CODE (rhs) == CONSTRUCTOR
400 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
402 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
403 unsigned i;
404 tree val;
406 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
407 if (! CONSTANT_CLASS_P (val))
408 return NULL_TREE;
410 return build_vector_from_ctor (TREE_TYPE (rhs),
411 CONSTRUCTOR_ELTS (rhs));
414 else if (DECL_P (rhs))
415 return get_symbol_constant_value (rhs);
417 break;
419 case GIMPLE_UNARY_RHS:
420 break;
422 case GIMPLE_BINARY_RHS:
423 break;
425 case GIMPLE_TERNARY_RHS:
426 result = fold_ternary_loc (loc, subcode,
427 TREE_TYPE (gimple_assign_lhs (stmt)),
428 gimple_assign_rhs1 (stmt),
429 gimple_assign_rhs2 (stmt),
430 gimple_assign_rhs3 (stmt));
432 if (result)
434 STRIP_USELESS_TYPE_CONVERSION (result);
435 if (valid_gimple_rhs_p (result))
436 return result;
438 break;
440 case GIMPLE_INVALID_RHS:
441 gcc_unreachable ();
444 return NULL_TREE;
448 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
449 adjusting the replacement stmts location and virtual operands.
450 If the statement has a lhs the last stmt in the sequence is expected
451 to assign to that lhs. */
453 static void
454 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
456 gimple *stmt = gsi_stmt (*si_p);
458 if (gimple_has_location (stmt))
459 annotate_all_with_location (stmts, gimple_location (stmt));
461 /* First iterate over the replacement statements backward, assigning
462 virtual operands to their defining statements. */
463 gimple *laststore = NULL;
464 for (gimple_stmt_iterator i = gsi_last (stmts);
465 !gsi_end_p (i); gsi_prev (&i))
467 gimple *new_stmt = gsi_stmt (i);
468 if ((gimple_assign_single_p (new_stmt)
469 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
470 || (is_gimple_call (new_stmt)
471 && (gimple_call_flags (new_stmt)
472 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
474 tree vdef;
475 if (!laststore)
476 vdef = gimple_vdef (stmt);
477 else
478 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
479 gimple_set_vdef (new_stmt, vdef);
480 if (vdef && TREE_CODE (vdef) == SSA_NAME)
481 SSA_NAME_DEF_STMT (vdef) = new_stmt;
482 laststore = new_stmt;
486 /* Second iterate over the statements forward, assigning virtual
487 operands to their uses. */
488 tree reaching_vuse = gimple_vuse (stmt);
489 for (gimple_stmt_iterator i = gsi_start (stmts);
490 !gsi_end_p (i); gsi_next (&i))
492 gimple *new_stmt = gsi_stmt (i);
493 /* If the new statement possibly has a VUSE, update it with exact SSA
494 name we know will reach this one. */
495 if (gimple_has_mem_ops (new_stmt))
496 gimple_set_vuse (new_stmt, reaching_vuse);
497 gimple_set_modified (new_stmt, true);
498 if (gimple_vdef (new_stmt))
499 reaching_vuse = gimple_vdef (new_stmt);
502 /* If the new sequence does not do a store release the virtual
503 definition of the original statement. */
504 if (reaching_vuse
505 && reaching_vuse == gimple_vuse (stmt))
507 tree vdef = gimple_vdef (stmt);
508 if (vdef
509 && TREE_CODE (vdef) == SSA_NAME)
511 unlink_stmt_vdef (stmt);
512 release_ssa_name (vdef);
516 /* Finally replace the original statement with the sequence. */
517 gsi_replace_with_seq (si_p, stmts, false);
520 /* Convert EXPR into a GIMPLE value suitable for substitution on the
521 RHS of an assignment. Insert the necessary statements before
522 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
523 is replaced. If the call is expected to produces a result, then it
524 is replaced by an assignment of the new RHS to the result variable.
525 If the result is to be ignored, then the call is replaced by a
526 GIMPLE_NOP. A proper VDEF chain is retained by making the first
527 VUSE and the last VDEF of the whole sequence be the same as the replaced
528 statement and using new SSA names for stores in between. */
530 void
531 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
533 tree lhs;
534 gimple *stmt, *new_stmt;
535 gimple_stmt_iterator i;
536 gimple_seq stmts = NULL;
538 stmt = gsi_stmt (*si_p);
540 gcc_assert (is_gimple_call (stmt));
542 push_gimplify_context (gimple_in_ssa_p (cfun));
544 lhs = gimple_call_lhs (stmt);
545 if (lhs == NULL_TREE)
547 gimplify_and_add (expr, &stmts);
548 /* We can end up with folding a memcpy of an empty class assignment
549 which gets optimized away by C++ gimplification. */
550 if (gimple_seq_empty_p (stmts))
552 pop_gimplify_context (NULL);
553 if (gimple_in_ssa_p (cfun))
555 unlink_stmt_vdef (stmt);
556 release_defs (stmt);
558 gsi_replace (si_p, gimple_build_nop (), false);
559 return;
562 else
564 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
565 new_stmt = gimple_build_assign (lhs, tmp);
566 i = gsi_last (stmts);
567 gsi_insert_after_without_update (&i, new_stmt,
568 GSI_CONTINUE_LINKING);
571 pop_gimplify_context (NULL);
573 gsi_replace_with_seq_vops (si_p, stmts);
577 /* Replace the call at *GSI with the gimple value VAL. */
579 void
580 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
582 gimple *stmt = gsi_stmt (*gsi);
583 tree lhs = gimple_call_lhs (stmt);
584 gimple *repl;
585 if (lhs)
587 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
588 val = fold_convert (TREE_TYPE (lhs), val);
589 repl = gimple_build_assign (lhs, val);
591 else
592 repl = gimple_build_nop ();
593 tree vdef = gimple_vdef (stmt);
594 if (vdef && TREE_CODE (vdef) == SSA_NAME)
596 unlink_stmt_vdef (stmt);
597 release_ssa_name (vdef);
599 gsi_replace (gsi, repl, false);
602 /* Replace the call at *GSI with the new call REPL and fold that
603 again. */
605 static void
606 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
608 gimple *stmt = gsi_stmt (*gsi);
609 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
610 gimple_set_location (repl, gimple_location (stmt));
611 if (gimple_vdef (stmt)
612 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
614 gimple_set_vdef (repl, gimple_vdef (stmt));
615 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
617 if (gimple_vuse (stmt))
618 gimple_set_vuse (repl, gimple_vuse (stmt));
619 gsi_replace (gsi, repl, false);
620 fold_stmt (gsi);
623 /* Return true if VAR is a VAR_DECL or a component thereof. */
625 static bool
626 var_decl_component_p (tree var)
628 tree inner = var;
629 while (handled_component_p (inner))
630 inner = TREE_OPERAND (inner, 0);
631 return SSA_VAR_P (inner);
634 /* If the SIZE argument representing the size of an object is in a range
635 of values of which exactly one is valid (and that is zero), return
636 true, otherwise false. */
638 static bool
639 size_must_be_zero_p (tree size)
641 if (integer_zerop (size))
642 return true;
644 if (TREE_CODE (size) != SSA_NAME)
645 return false;
647 wide_int min, max;
648 enum value_range_type rtype = get_range_info (size, &min, &max);
649 if (rtype != VR_ANTI_RANGE)
650 return false;
652 tree type = TREE_TYPE (size);
653 int prec = TYPE_PRECISION (type);
655 wide_int wone = wi::one (prec);
657 /* Compute the value of SSIZE_MAX, the largest positive value that
658 can be stored in ssize_t, the signed counterpart of size_t. */
659 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
661 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
664 /* Fold function call to builtin mem{{,p}cpy,move}. Return
665 false if no simplification can be made.
666 If ENDP is 0, return DEST (like memcpy).
667 If ENDP is 1, return DEST+LEN (like mempcpy).
668 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
669 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
670 (memmove). */
672 static bool
673 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
674 tree dest, tree src, int endp)
676 gimple *stmt = gsi_stmt (*gsi);
677 tree lhs = gimple_call_lhs (stmt);
678 tree len = gimple_call_arg (stmt, 2);
679 tree destvar, srcvar;
680 location_t loc = gimple_location (stmt);
682 /* If the LEN parameter is a constant zero or in range where
683 the only valid value is zero, return DEST. */
684 if (size_must_be_zero_p (len))
686 gimple *repl;
687 if (gimple_call_lhs (stmt))
688 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
689 else
690 repl = gimple_build_nop ();
691 tree vdef = gimple_vdef (stmt);
692 if (vdef && TREE_CODE (vdef) == SSA_NAME)
694 unlink_stmt_vdef (stmt);
695 release_ssa_name (vdef);
697 gsi_replace (gsi, repl, false);
698 return true;
701 /* If SRC and DEST are the same (and not volatile), return
702 DEST{,+LEN,+LEN-1}. */
703 if (operand_equal_p (src, dest, 0))
705 unlink_stmt_vdef (stmt);
706 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
707 release_ssa_name (gimple_vdef (stmt));
708 if (!lhs)
710 gsi_replace (gsi, gimple_build_nop (), false);
711 return true;
713 goto done;
715 else
717 tree srctype, desttype;
718 unsigned int src_align, dest_align;
719 tree off0;
721 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
722 pointers as wide integer) and also may result in huge function
723 size because of inlined bounds copy. Thus don't inline for
724 functions we want to instrument. */
725 if (flag_check_pointer_bounds
726 && chkp_instrumentable_p (cfun->decl)
727 /* Even if data may contain pointers we can inline if copy
728 less than a pointer size. */
729 && (!tree_fits_uhwi_p (len)
730 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
731 return false;
733 /* Build accesses at offset zero with a ref-all character type. */
734 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
735 ptr_mode, true), 0);
737 /* If we can perform the copy efficiently with first doing all loads
738 and then all stores inline it that way. Currently efficiently
739 means that we can load all the memory into a single integer
740 register which is what MOVE_MAX gives us. */
741 src_align = get_pointer_alignment (src);
742 dest_align = get_pointer_alignment (dest);
743 if (tree_fits_uhwi_p (len)
744 && compare_tree_int (len, MOVE_MAX) <= 0
745 /* ??? Don't transform copies from strings with known length this
746 confuses the tree-ssa-strlen.c. This doesn't handle
747 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
748 reason. */
749 && !c_strlen (src, 2))
751 unsigned ilen = tree_to_uhwi (len);
752 if (pow2p_hwi (ilen))
754 scalar_int_mode mode;
755 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
756 if (type
757 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
758 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
759 /* If the destination pointer is not aligned we must be able
760 to emit an unaligned store. */
761 && (dest_align >= GET_MODE_ALIGNMENT (mode)
762 || !targetm.slow_unaligned_access (mode, dest_align)
763 || (optab_handler (movmisalign_optab, mode)
764 != CODE_FOR_nothing)))
766 tree srctype = type;
767 tree desttype = type;
768 if (src_align < GET_MODE_ALIGNMENT (mode))
769 srctype = build_aligned_type (type, src_align);
770 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
771 tree tem = fold_const_aggregate_ref (srcmem);
772 if (tem)
773 srcmem = tem;
774 else if (src_align < GET_MODE_ALIGNMENT (mode)
775 && targetm.slow_unaligned_access (mode, src_align)
776 && (optab_handler (movmisalign_optab, mode)
777 == CODE_FOR_nothing))
778 srcmem = NULL_TREE;
779 if (srcmem)
781 gimple *new_stmt;
782 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
784 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
785 srcmem
786 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
787 new_stmt);
788 gimple_assign_set_lhs (new_stmt, srcmem);
789 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
790 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
792 if (dest_align < GET_MODE_ALIGNMENT (mode))
793 desttype = build_aligned_type (type, dest_align);
794 new_stmt
795 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
796 dest, off0),
797 srcmem);
798 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
799 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
800 if (gimple_vdef (new_stmt)
801 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
802 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
803 if (!lhs)
805 gsi_replace (gsi, new_stmt, false);
806 return true;
808 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
809 goto done;
815 if (endp == 3)
817 /* Both DEST and SRC must be pointer types.
818 ??? This is what old code did. Is the testing for pointer types
819 really mandatory?
821 If either SRC is readonly or length is 1, we can use memcpy. */
822 if (!dest_align || !src_align)
823 return false;
824 if (readonly_data_expr (src)
825 || (tree_fits_uhwi_p (len)
826 && (MIN (src_align, dest_align) / BITS_PER_UNIT
827 >= tree_to_uhwi (len))))
829 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
830 if (!fn)
831 return false;
832 gimple_call_set_fndecl (stmt, fn);
833 gimple_call_set_arg (stmt, 0, dest);
834 gimple_call_set_arg (stmt, 1, src);
835 fold_stmt (gsi);
836 return true;
839 /* If *src and *dest can't overlap, optimize into memcpy as well. */
840 if (TREE_CODE (src) == ADDR_EXPR
841 && TREE_CODE (dest) == ADDR_EXPR)
843 tree src_base, dest_base, fn;
844 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
845 HOST_WIDE_INT maxsize;
847 srcvar = TREE_OPERAND (src, 0);
848 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
849 if (src_base == NULL)
850 src_base = srcvar;
851 destvar = TREE_OPERAND (dest, 0);
852 dest_base = get_addr_base_and_unit_offset (destvar,
853 &dest_offset);
854 if (dest_base == NULL)
855 dest_base = destvar;
856 if (tree_fits_uhwi_p (len))
857 maxsize = tree_to_uhwi (len);
858 else
859 maxsize = -1;
860 if (SSA_VAR_P (src_base)
861 && SSA_VAR_P (dest_base))
863 if (operand_equal_p (src_base, dest_base, 0)
864 && ranges_overlap_p (src_offset, maxsize,
865 dest_offset, maxsize))
866 return false;
868 else if (TREE_CODE (src_base) == MEM_REF
869 && TREE_CODE (dest_base) == MEM_REF)
871 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
872 TREE_OPERAND (dest_base, 0), 0))
873 return false;
874 offset_int off = mem_ref_offset (src_base) + src_offset;
875 if (!wi::fits_shwi_p (off))
876 return false;
877 src_offset = off.to_shwi ();
879 off = mem_ref_offset (dest_base) + dest_offset;
880 if (!wi::fits_shwi_p (off))
881 return false;
882 dest_offset = off.to_shwi ();
883 if (ranges_overlap_p (src_offset, maxsize,
884 dest_offset, maxsize))
885 return false;
887 else
888 return false;
890 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
891 if (!fn)
892 return false;
893 gimple_call_set_fndecl (stmt, fn);
894 gimple_call_set_arg (stmt, 0, dest);
895 gimple_call_set_arg (stmt, 1, src);
896 fold_stmt (gsi);
897 return true;
900 /* If the destination and source do not alias optimize into
901 memcpy as well. */
902 if ((is_gimple_min_invariant (dest)
903 || TREE_CODE (dest) == SSA_NAME)
904 && (is_gimple_min_invariant (src)
905 || TREE_CODE (src) == SSA_NAME))
907 ao_ref destr, srcr;
908 ao_ref_init_from_ptr_and_size (&destr, dest, len);
909 ao_ref_init_from_ptr_and_size (&srcr, src, len);
910 if (!refs_may_alias_p_1 (&destr, &srcr, false))
912 tree fn;
913 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
914 if (!fn)
915 return false;
916 gimple_call_set_fndecl (stmt, fn);
917 gimple_call_set_arg (stmt, 0, dest);
918 gimple_call_set_arg (stmt, 1, src);
919 fold_stmt (gsi);
920 return true;
924 return false;
927 if (!tree_fits_shwi_p (len))
928 return false;
929 /* FIXME:
930 This logic lose for arguments like (type *)malloc (sizeof (type)),
931 since we strip the casts of up to VOID return value from malloc.
932 Perhaps we ought to inherit type from non-VOID argument here? */
933 STRIP_NOPS (src);
934 STRIP_NOPS (dest);
935 if (!POINTER_TYPE_P (TREE_TYPE (src))
936 || !POINTER_TYPE_P (TREE_TYPE (dest)))
937 return false;
938 /* In the following try to find a type that is most natural to be
939 used for the memcpy source and destination and that allows
940 the most optimization when memcpy is turned into a plain assignment
941 using that type. In theory we could always use a char[len] type
942 but that only gains us that the destination and source possibly
943 no longer will have their address taken. */
944 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
945 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
947 tree tem = TREE_OPERAND (src, 0);
948 STRIP_NOPS (tem);
949 if (tem != TREE_OPERAND (src, 0))
950 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
952 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
954 tree tem = TREE_OPERAND (dest, 0);
955 STRIP_NOPS (tem);
956 if (tem != TREE_OPERAND (dest, 0))
957 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
959 srctype = TREE_TYPE (TREE_TYPE (src));
960 if (TREE_CODE (srctype) == ARRAY_TYPE
961 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
963 srctype = TREE_TYPE (srctype);
964 STRIP_NOPS (src);
965 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
967 desttype = TREE_TYPE (TREE_TYPE (dest));
968 if (TREE_CODE (desttype) == ARRAY_TYPE
969 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
971 desttype = TREE_TYPE (desttype);
972 STRIP_NOPS (dest);
973 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
975 if (TREE_ADDRESSABLE (srctype)
976 || TREE_ADDRESSABLE (desttype))
977 return false;
979 /* Make sure we are not copying using a floating-point mode or
980 a type whose size possibly does not match its precision. */
981 if (FLOAT_MODE_P (TYPE_MODE (desttype))
982 || TREE_CODE (desttype) == BOOLEAN_TYPE
983 || TREE_CODE (desttype) == ENUMERAL_TYPE)
984 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
985 if (FLOAT_MODE_P (TYPE_MODE (srctype))
986 || TREE_CODE (srctype) == BOOLEAN_TYPE
987 || TREE_CODE (srctype) == ENUMERAL_TYPE)
988 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
989 if (!srctype)
990 srctype = desttype;
991 if (!desttype)
992 desttype = srctype;
993 if (!srctype)
994 return false;
996 src_align = get_pointer_alignment (src);
997 dest_align = get_pointer_alignment (dest);
998 if (dest_align < TYPE_ALIGN (desttype)
999 || src_align < TYPE_ALIGN (srctype))
1000 return false;
1002 destvar = dest;
1003 STRIP_NOPS (destvar);
1004 if (TREE_CODE (destvar) == ADDR_EXPR
1005 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1006 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1007 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1008 else
1009 destvar = NULL_TREE;
1011 srcvar = src;
1012 STRIP_NOPS (srcvar);
1013 if (TREE_CODE (srcvar) == ADDR_EXPR
1014 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1015 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1017 if (!destvar
1018 || src_align >= TYPE_ALIGN (desttype))
1019 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1020 srcvar, off0);
1021 else if (!STRICT_ALIGNMENT)
1023 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1024 src_align);
1025 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1027 else
1028 srcvar = NULL_TREE;
1030 else
1031 srcvar = NULL_TREE;
1033 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1034 return false;
1036 if (srcvar == NULL_TREE)
1038 STRIP_NOPS (src);
1039 if (src_align >= TYPE_ALIGN (desttype))
1040 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1041 else
1043 if (STRICT_ALIGNMENT)
1044 return false;
1045 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1046 src_align);
1047 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1050 else if (destvar == NULL_TREE)
1052 STRIP_NOPS (dest);
1053 if (dest_align >= TYPE_ALIGN (srctype))
1054 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1055 else
1057 if (STRICT_ALIGNMENT)
1058 return false;
1059 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1060 dest_align);
1061 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1065 gimple *new_stmt;
1066 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1068 tree tem = fold_const_aggregate_ref (srcvar);
1069 if (tem)
1070 srcvar = tem;
1071 if (! is_gimple_min_invariant (srcvar))
1073 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1074 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1075 new_stmt);
1076 gimple_assign_set_lhs (new_stmt, srcvar);
1077 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1078 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1081 new_stmt = gimple_build_assign (destvar, srcvar);
1082 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1083 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1084 if (gimple_vdef (new_stmt)
1085 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1086 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1087 if (!lhs)
1089 gsi_replace (gsi, new_stmt, false);
1090 return true;
1092 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1095 done:
1096 gimple_seq stmts = NULL;
1097 if (endp == 0 || endp == 3)
1098 len = NULL_TREE;
1099 else if (endp == 2)
1100 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1101 ssize_int (1));
1102 if (endp == 2 || endp == 1)
1104 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1105 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1106 TREE_TYPE (dest), dest, len);
1109 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1110 gimple *repl = gimple_build_assign (lhs, dest);
1111 gsi_replace (gsi, repl, false);
1112 return true;
1115 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1116 to built-in memcmp (a, b, len). */
1118 static bool
1119 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1121 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1123 if (!fn)
1124 return false;
1126 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1128 gimple *stmt = gsi_stmt (*gsi);
1129 tree a = gimple_call_arg (stmt, 0);
1130 tree b = gimple_call_arg (stmt, 1);
1131 tree len = gimple_call_arg (stmt, 2);
1133 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1134 replace_call_with_call_and_fold (gsi, repl);
1136 return true;
1139 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1140 to built-in memmove (dest, src, len). */
1142 static bool
1143 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1145 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1147 if (!fn)
1148 return false;
1150 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1151 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1152 len) into memmove (dest, src, len). */
1154 gimple *stmt = gsi_stmt (*gsi);
1155 tree src = gimple_call_arg (stmt, 0);
1156 tree dest = gimple_call_arg (stmt, 1);
1157 tree len = gimple_call_arg (stmt, 2);
1159 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1160 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1161 replace_call_with_call_and_fold (gsi, repl);
1163 return true;
1166 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1167 to built-in memset (dest, 0, len). */
1169 static bool
1170 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1172 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1174 if (!fn)
1175 return false;
1177 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1179 gimple *stmt = gsi_stmt (*gsi);
1180 tree dest = gimple_call_arg (stmt, 0);
1181 tree len = gimple_call_arg (stmt, 1);
1183 gimple_seq seq = NULL;
1184 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1185 gimple_seq_add_stmt_without_update (&seq, repl);
1186 gsi_replace_with_seq_vops (gsi, seq);
1187 fold_stmt (gsi);
1189 return true;
1192 /* Fold function call to builtin memset or bzero at *GSI setting the
1193 memory of size LEN to VAL. Return whether a simplification was made. */
1195 static bool
1196 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1198 gimple *stmt = gsi_stmt (*gsi);
1199 tree etype;
1200 unsigned HOST_WIDE_INT length, cval;
1202 /* If the LEN parameter is zero, return DEST. */
1203 if (integer_zerop (len))
1205 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1206 return true;
1209 if (! tree_fits_uhwi_p (len))
1210 return false;
1212 if (TREE_CODE (c) != INTEGER_CST)
1213 return false;
1215 tree dest = gimple_call_arg (stmt, 0);
1216 tree var = dest;
1217 if (TREE_CODE (var) != ADDR_EXPR)
1218 return false;
1220 var = TREE_OPERAND (var, 0);
1221 if (TREE_THIS_VOLATILE (var))
1222 return false;
1224 etype = TREE_TYPE (var);
1225 if (TREE_CODE (etype) == ARRAY_TYPE)
1226 etype = TREE_TYPE (etype);
1228 if (!INTEGRAL_TYPE_P (etype)
1229 && !POINTER_TYPE_P (etype))
1230 return NULL_TREE;
1232 if (! var_decl_component_p (var))
1233 return NULL_TREE;
1235 length = tree_to_uhwi (len);
1236 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1237 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1238 return NULL_TREE;
1240 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1241 return NULL_TREE;
1243 if (integer_zerop (c))
1244 cval = 0;
1245 else
1247 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1248 return NULL_TREE;
1250 cval = TREE_INT_CST_LOW (c);
1251 cval &= 0xff;
1252 cval |= cval << 8;
1253 cval |= cval << 16;
1254 cval |= (cval << 31) << 1;
1257 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1258 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1259 gimple_set_vuse (store, gimple_vuse (stmt));
1260 tree vdef = gimple_vdef (stmt);
1261 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1263 gimple_set_vdef (store, gimple_vdef (stmt));
1264 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1266 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1267 if (gimple_call_lhs (stmt))
1269 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1270 gsi_replace (gsi, asgn, false);
1272 else
1274 gimple_stmt_iterator gsi2 = *gsi;
1275 gsi_prev (gsi);
1276 gsi_remove (&gsi2, true);
1279 return true;
1283 /* Obtain the minimum and maximum string length or minimum and maximum
1284 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1285 If ARG is an SSA name variable, follow its use-def chains. When
1286 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1287 if we are unable to determine the length or value, return False.
1288 VISITED is a bitmap of visited variables.
1289 TYPE is 0 if string length should be obtained, 1 for maximum string
1290 length and 2 for maximum value ARG can have.
1291 When FUZZY is set and the length of a string cannot be determined,
1292 the function instead considers as the maximum possible length the
1293 size of a character array it may refer to.
1294 Set *FLEXP to true if the range of the string lengths has been
1295 obtained from the upper bound of an array at the end of a struct.
1296 Such an array may hold a string that's longer than its upper bound
1297 due to it being used as a poor-man's flexible array member. */
1299 static bool
1300 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1301 bool fuzzy, bool *flexp)
1303 tree var, val;
1304 gimple *def_stmt;
1306 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1307 but MINLEN may be cleared during the execution of the function. */
1308 tree *minlen = length;
1309 tree *const maxlen = length + 1;
1311 if (TREE_CODE (arg) != SSA_NAME)
1313 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1314 if (TREE_CODE (arg) == ADDR_EXPR
1315 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1316 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1318 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1319 if (TREE_CODE (aop0) == INDIRECT_REF
1320 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1321 return get_range_strlen (TREE_OPERAND (aop0, 0),
1322 length, visited, type, fuzzy, flexp);
1325 if (type == 2)
1327 val = arg;
1328 if (TREE_CODE (val) != INTEGER_CST
1329 || tree_int_cst_sgn (val) < 0)
1330 return false;
1332 else
1333 val = c_strlen (arg, 1);
1335 if (!val && fuzzy)
1337 if (TREE_CODE (arg) == ADDR_EXPR)
1338 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1339 visited, type, fuzzy, flexp);
1341 if (TREE_CODE (arg) == COMPONENT_REF
1342 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE)
1344 /* Use the type of the member array to determine the upper
1345 bound on the length of the array. This may be overly
1346 optimistic if the array itself isn't NUL-terminated and
1347 the caller relies on the subsequent member to contain
1348 the NUL.
1349 Set *FLEXP to true if the array whose bound is being
1350 used is at the end of a struct. */
1351 if (array_at_struct_end_p (arg))
1352 *flexp = true;
1354 arg = TREE_OPERAND (arg, 1);
1355 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1356 if (!val || integer_zerop (val))
1357 return false;
1358 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1359 integer_one_node);
1360 /* Set the minimum size to zero since the string in
1361 the array could have zero length. */
1362 *minlen = ssize_int (0);
1366 if (!val)
1367 return false;
1369 if (minlen
1370 && (!*minlen
1371 || (type > 0
1372 && TREE_CODE (*minlen) == INTEGER_CST
1373 && TREE_CODE (val) == INTEGER_CST
1374 && tree_int_cst_lt (val, *minlen))))
1375 *minlen = val;
1377 if (*maxlen)
1379 if (type > 0)
1381 if (TREE_CODE (*maxlen) != INTEGER_CST
1382 || TREE_CODE (val) != INTEGER_CST)
1383 return false;
1385 if (tree_int_cst_lt (*maxlen, val))
1386 *maxlen = val;
1387 return true;
1389 else if (simple_cst_equal (val, *maxlen) != 1)
1390 return false;
1393 *maxlen = val;
1394 return true;
1397 /* If ARG is registered for SSA update we cannot look at its defining
1398 statement. */
1399 if (name_registered_for_update_p (arg))
1400 return false;
1402 /* If we were already here, break the infinite cycle. */
1403 if (!*visited)
1404 *visited = BITMAP_ALLOC (NULL);
1405 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1406 return true;
1408 var = arg;
1409 def_stmt = SSA_NAME_DEF_STMT (var);
1411 switch (gimple_code (def_stmt))
1413 case GIMPLE_ASSIGN:
1414 /* The RHS of the statement defining VAR must either have a
1415 constant length or come from another SSA_NAME with a constant
1416 length. */
1417 if (gimple_assign_single_p (def_stmt)
1418 || gimple_assign_unary_nop_p (def_stmt))
1420 tree rhs = gimple_assign_rhs1 (def_stmt);
1421 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
1423 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1425 tree op2 = gimple_assign_rhs2 (def_stmt);
1426 tree op3 = gimple_assign_rhs3 (def_stmt);
1427 return get_range_strlen (op2, length, visited, type, fuzzy, flexp)
1428 && get_range_strlen (op3, length, visited, type, fuzzy, flexp);
1430 return false;
1432 case GIMPLE_PHI:
1434 /* All the arguments of the PHI node must have the same constant
1435 length. */
1436 unsigned i;
1438 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1440 tree arg = gimple_phi_arg (def_stmt, i)->def;
1442 /* If this PHI has itself as an argument, we cannot
1443 determine the string length of this argument. However,
1444 if we can find a constant string length for the other
1445 PHI args then we can still be sure that this is a
1446 constant string length. So be optimistic and just
1447 continue with the next argument. */
1448 if (arg == gimple_phi_result (def_stmt))
1449 continue;
1451 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
1453 if (fuzzy)
1454 *maxlen = build_all_ones_cst (size_type_node);
1455 else
1456 return false;
1460 return true;
1462 default:
1463 return false;
1467 /* Determine the minimum and maximum value or string length that ARG
1468 refers to and store each in the first two elements of MINMAXLEN.
1469 For expressions that point to strings of unknown lengths that are
1470 character arrays, use the upper bound of the array as the maximum
1471 length. For example, given an expression like 'x ? array : "xyz"'
1472 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1473 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1474 stored in array.
1475 Return true if the range of the string lengths has been obtained
1476 from the upper bound of an array at the end of a struct. Such
1477 an array may hold a string that's longer than its upper bound
1478 due to it being used as a poor-man's flexible array member. */
1480 bool
1481 get_range_strlen (tree arg, tree minmaxlen[2])
1483 bitmap visited = NULL;
1485 minmaxlen[0] = NULL_TREE;
1486 minmaxlen[1] = NULL_TREE;
1488 bool flexarray = false;
1489 get_range_strlen (arg, minmaxlen, &visited, 1, true, &flexarray);
1491 if (visited)
1492 BITMAP_FREE (visited);
1494 return flexarray;
1497 tree
1498 get_maxval_strlen (tree arg, int type)
1500 bitmap visited = NULL;
1501 tree len[2] = { NULL_TREE, NULL_TREE };
1503 bool dummy;
1504 if (!get_range_strlen (arg, len, &visited, type, false, &dummy))
1505 len[1] = NULL_TREE;
1506 if (visited)
1507 BITMAP_FREE (visited);
1509 return len[1];
1513 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1514 If LEN is not NULL, it represents the length of the string to be
1515 copied. Return NULL_TREE if no simplification can be made. */
1517 static bool
1518 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1519 tree dest, tree src)
1521 location_t loc = gimple_location (gsi_stmt (*gsi));
1522 tree fn;
1524 /* If SRC and DEST are the same (and not volatile), return DEST. */
1525 if (operand_equal_p (src, dest, 0))
1527 replace_call_with_value (gsi, dest);
1528 return true;
1531 if (optimize_function_for_size_p (cfun))
1532 return false;
1534 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1535 if (!fn)
1536 return false;
1538 tree len = get_maxval_strlen (src, 0);
1539 if (!len)
1540 return false;
1542 len = fold_convert_loc (loc, size_type_node, len);
1543 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1544 len = force_gimple_operand_gsi (gsi, len, true,
1545 NULL_TREE, true, GSI_SAME_STMT);
1546 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1547 replace_call_with_call_and_fold (gsi, repl);
1548 return true;
1551 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1552 If SLEN is not NULL, it represents the length of the source string.
1553 Return NULL_TREE if no simplification can be made. */
1555 static bool
1556 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1557 tree dest, tree src, tree len)
1559 gimple *stmt = gsi_stmt (*gsi);
1560 location_t loc = gimple_location (stmt);
1562 /* If the LEN parameter is zero, return DEST. */
1563 if (integer_zerop (len))
1565 tree fndecl = gimple_call_fndecl (stmt);
1566 gcall *call = as_a <gcall *> (stmt);
1568 /* Warn about the lack of nul termination: the result is not
1569 a (nul-terminated) string. */
1570 tree slen = get_maxval_strlen (src, 0);
1571 if (slen && !integer_zerop (slen))
1572 warning_at (loc, OPT_Wstringop_truncation,
1573 "%G%qD destination unchanged after copying no bytes "
1574 "from a string of length %E",
1575 call, fndecl, slen);
1576 else
1577 warning_at (loc, OPT_Wstringop_truncation,
1578 "%G%qD destination unchanged after copying no bytes",
1579 call, fndecl);
1581 replace_call_with_value (gsi, dest);
1582 return true;
1585 /* We can't compare slen with len as constants below if len is not a
1586 constant. */
1587 if (TREE_CODE (len) != INTEGER_CST)
1588 return false;
1590 /* Now, we must be passed a constant src ptr parameter. */
1591 tree slen = get_maxval_strlen (src, 0);
1592 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1593 return false;
1595 /* The size of the source string including the terminating nul. */
1596 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1598 /* We do not support simplification of this case, though we do
1599 support it when expanding trees into RTL. */
1600 /* FIXME: generate a call to __builtin_memset. */
1601 if (tree_int_cst_lt (ssize, len))
1602 return false;
1604 if (tree_int_cst_lt (len, slen))
1606 tree fndecl = gimple_call_fndecl (stmt);
1607 gcall *call = as_a <gcall *> (stmt);
1609 warning_at (loc, OPT_Wstringop_truncation,
1610 (tree_int_cst_equal (size_one_node, len)
1611 ? G_("%G%qD output truncated copying %E byte "
1612 "from a string of length %E")
1613 : G_("%G%qD output truncated copying %E bytes "
1614 "from a string of length %E")),
1615 call, fndecl, len, slen);
1617 else if (tree_int_cst_equal (len, slen))
1619 tree decl = dest;
1620 if (TREE_CODE (decl) == SSA_NAME)
1622 gimple *def_stmt = SSA_NAME_DEF_STMT (decl);
1623 if (is_gimple_assign (def_stmt))
1625 tree_code code = gimple_assign_rhs_code (def_stmt);
1626 if (code == ADDR_EXPR || code == VAR_DECL)
1627 decl = gimple_assign_rhs1 (def_stmt);
1631 if (TREE_CODE (decl) == ADDR_EXPR)
1632 decl = TREE_OPERAND (decl, 0);
1634 if (TREE_CODE (decl) == COMPONENT_REF)
1635 decl = TREE_OPERAND (decl, 1);
1637 tree fndecl = gimple_call_fndecl (stmt);
1638 gcall *call = as_a <gcall *> (stmt);
1640 if (!DECL_P (decl)
1641 || !lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
1642 warning_at (loc, OPT_Wstringop_truncation,
1643 (tree_int_cst_equal (size_one_node, len)
1644 ? G_("%G%qD output truncated before terminating nul "
1645 "copying %E byte from a string of the same "
1646 "length")
1647 : G_("%G%qD output truncated before terminating nul "
1648 "copying %E bytes from a string of the same "
1649 "length")),
1650 call, fndecl, len);
1653 /* OK transform into builtin memcpy. */
1654 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1655 if (!fn)
1656 return false;
1658 len = fold_convert_loc (loc, size_type_node, len);
1659 len = force_gimple_operand_gsi (gsi, len, true,
1660 NULL_TREE, true, GSI_SAME_STMT);
1661 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1662 replace_call_with_call_and_fold (gsi, repl);
1664 return true;
1667 /* Fold function call to builtin strchr or strrchr.
1668 If both arguments are constant, evaluate and fold the result,
1669 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1670 In general strlen is significantly faster than strchr
1671 due to being a simpler operation. */
1672 static bool
1673 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1675 gimple *stmt = gsi_stmt (*gsi);
1676 tree str = gimple_call_arg (stmt, 0);
1677 tree c = gimple_call_arg (stmt, 1);
1678 location_t loc = gimple_location (stmt);
1679 const char *p;
1680 char ch;
1682 if (!gimple_call_lhs (stmt))
1683 return false;
1685 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1687 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1689 if (p1 == NULL)
1691 replace_call_with_value (gsi, integer_zero_node);
1692 return true;
1695 tree len = build_int_cst (size_type_node, p1 - p);
1696 gimple_seq stmts = NULL;
1697 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1698 POINTER_PLUS_EXPR, str, len);
1699 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1700 gsi_replace_with_seq_vops (gsi, stmts);
1701 return true;
1704 if (!integer_zerop (c))
1705 return false;
1707 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1708 if (is_strrchr && optimize_function_for_size_p (cfun))
1710 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1712 if (strchr_fn)
1714 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1715 replace_call_with_call_and_fold (gsi, repl);
1716 return true;
1719 return false;
1722 tree len;
1723 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1725 if (!strlen_fn)
1726 return false;
1728 /* Create newstr = strlen (str). */
1729 gimple_seq stmts = NULL;
1730 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1731 gimple_set_location (new_stmt, loc);
1732 len = create_tmp_reg_or_ssa_name (size_type_node);
1733 gimple_call_set_lhs (new_stmt, len);
1734 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1736 /* Create (str p+ strlen (str)). */
1737 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1738 POINTER_PLUS_EXPR, str, len);
1739 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1740 gsi_replace_with_seq_vops (gsi, stmts);
1741 /* gsi now points at the assignment to the lhs, get a
1742 stmt iterator to the strlen.
1743 ??? We can't use gsi_for_stmt as that doesn't work when the
1744 CFG isn't built yet. */
1745 gimple_stmt_iterator gsi2 = *gsi;
1746 gsi_prev (&gsi2);
1747 fold_stmt (&gsi2);
1748 return true;
1751 /* Fold function call to builtin strstr.
1752 If both arguments are constant, evaluate and fold the result,
1753 additionally fold strstr (x, "") into x and strstr (x, "c")
1754 into strchr (x, 'c'). */
1755 static bool
1756 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1758 gimple *stmt = gsi_stmt (*gsi);
1759 tree haystack = gimple_call_arg (stmt, 0);
1760 tree needle = gimple_call_arg (stmt, 1);
1761 const char *p, *q;
1763 if (!gimple_call_lhs (stmt))
1764 return false;
1766 q = c_getstr (needle);
1767 if (q == NULL)
1768 return false;
1770 if ((p = c_getstr (haystack)))
1772 const char *r = strstr (p, q);
1774 if (r == NULL)
1776 replace_call_with_value (gsi, integer_zero_node);
1777 return true;
1780 tree len = build_int_cst (size_type_node, r - p);
1781 gimple_seq stmts = NULL;
1782 gimple *new_stmt
1783 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1784 haystack, len);
1785 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1786 gsi_replace_with_seq_vops (gsi, stmts);
1787 return true;
1790 /* For strstr (x, "") return x. */
1791 if (q[0] == '\0')
1793 replace_call_with_value (gsi, haystack);
1794 return true;
1797 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1798 if (q[1] == '\0')
1800 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1801 if (strchr_fn)
1803 tree c = build_int_cst (integer_type_node, q[0]);
1804 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1805 replace_call_with_call_and_fold (gsi, repl);
1806 return true;
1810 return false;
1813 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1814 to the call.
1816 Return NULL_TREE if no simplification was possible, otherwise return the
1817 simplified form of the call as a tree.
1819 The simplified form may be a constant or other expression which
1820 computes the same value, but in a more efficient manner (including
1821 calls to other builtin functions).
1823 The call may contain arguments which need to be evaluated, but
1824 which are not useful to determine the result of the call. In
1825 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1826 COMPOUND_EXPR will be an argument which must be evaluated.
1827 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1828 COMPOUND_EXPR in the chain will contain the tree for the simplified
1829 form of the builtin function call. */
1831 static bool
1832 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1834 gimple *stmt = gsi_stmt (*gsi);
1835 location_t loc = gimple_location (stmt);
1837 const char *p = c_getstr (src);
1839 /* If the string length is zero, return the dst parameter. */
1840 if (p && *p == '\0')
1842 replace_call_with_value (gsi, dst);
1843 return true;
1846 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1847 return false;
1849 /* See if we can store by pieces into (dst + strlen(dst)). */
1850 tree newdst;
1851 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1852 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1854 if (!strlen_fn || !memcpy_fn)
1855 return false;
1857 /* If the length of the source string isn't computable don't
1858 split strcat into strlen and memcpy. */
1859 tree len = get_maxval_strlen (src, 0);
1860 if (! len)
1861 return false;
1863 /* Create strlen (dst). */
1864 gimple_seq stmts = NULL, stmts2;
1865 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1866 gimple_set_location (repl, loc);
1867 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1868 gimple_call_set_lhs (repl, newdst);
1869 gimple_seq_add_stmt_without_update (&stmts, repl);
1871 /* Create (dst p+ strlen (dst)). */
1872 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1873 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1874 gimple_seq_add_seq_without_update (&stmts, stmts2);
1876 len = fold_convert_loc (loc, size_type_node, len);
1877 len = size_binop_loc (loc, PLUS_EXPR, len,
1878 build_int_cst (size_type_node, 1));
1879 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1880 gimple_seq_add_seq_without_update (&stmts, stmts2);
1882 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1883 gimple_seq_add_stmt_without_update (&stmts, repl);
1884 if (gimple_call_lhs (stmt))
1886 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1887 gimple_seq_add_stmt_without_update (&stmts, repl);
1888 gsi_replace_with_seq_vops (gsi, stmts);
1889 /* gsi now points at the assignment to the lhs, get a
1890 stmt iterator to the memcpy call.
1891 ??? We can't use gsi_for_stmt as that doesn't work when the
1892 CFG isn't built yet. */
1893 gimple_stmt_iterator gsi2 = *gsi;
1894 gsi_prev (&gsi2);
1895 fold_stmt (&gsi2);
1897 else
1899 gsi_replace_with_seq_vops (gsi, stmts);
1900 fold_stmt (gsi);
1902 return true;
1905 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1906 are the arguments to the call. */
1908 static bool
1909 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1911 gimple *stmt = gsi_stmt (*gsi);
1912 tree dest = gimple_call_arg (stmt, 0);
1913 tree src = gimple_call_arg (stmt, 1);
1914 tree size = gimple_call_arg (stmt, 2);
1915 tree fn;
1916 const char *p;
1919 p = c_getstr (src);
1920 /* If the SRC parameter is "", return DEST. */
1921 if (p && *p == '\0')
1923 replace_call_with_value (gsi, dest);
1924 return true;
1927 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1928 return false;
1930 /* If __builtin_strcat_chk is used, assume strcat is available. */
1931 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1932 if (!fn)
1933 return false;
1935 gimple *repl = gimple_build_call (fn, 2, dest, src);
1936 replace_call_with_call_and_fold (gsi, repl);
1937 return true;
1940 /* Simplify a call to the strncat builtin. */
1942 static bool
1943 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1945 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1946 tree dst = gimple_call_arg (stmt, 0);
1947 tree src = gimple_call_arg (stmt, 1);
1948 tree len = gimple_call_arg (stmt, 2);
1950 const char *p = c_getstr (src);
1952 /* If the requested length is zero, or the src parameter string
1953 length is zero, return the dst parameter. */
1954 if (integer_zerop (len) || (p && *p == '\0'))
1956 replace_call_with_value (gsi, dst);
1957 return true;
1960 if (TREE_CODE (len) != INTEGER_CST || !p)
1961 return false;
1963 unsigned srclen = strlen (p);
1965 int cmpsrc = compare_tree_int (len, srclen);
1967 /* Return early if the requested len is less than the string length.
1968 Warnings will be issued elsewhere later. */
1969 if (cmpsrc < 0)
1970 return false;
1972 unsigned HOST_WIDE_INT dstsize;
1974 bool nowarn = gimple_no_warning_p (stmt);
1976 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
1978 int cmpdst = compare_tree_int (len, dstsize);
1980 if (cmpdst >= 0)
1982 tree fndecl = gimple_call_fndecl (stmt);
1984 /* Strncat copies (at most) LEN bytes and always appends
1985 the terminating NUL so the specified bound should never
1986 be equal to (or greater than) the size of the destination.
1987 If it is, the copy could overflow. */
1988 location_t loc = gimple_location (stmt);
1989 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
1990 cmpdst == 0
1991 ? G_("%G%qD specified bound %E equals "
1992 "destination size")
1993 : G_("%G%qD specified bound %E exceeds "
1994 "destination size %wu"),
1995 stmt, fndecl, len, dstsize);
1996 if (nowarn)
1997 gimple_set_no_warning (stmt, true);
2001 if (!nowarn && cmpsrc == 0)
2003 tree fndecl = gimple_call_fndecl (stmt);
2005 /* To avoid certain truncation the specified bound should also
2006 not be equal to (or less than) the length of the source. */
2007 location_t loc = gimple_location (stmt);
2008 if (warning_at (loc, OPT_Wstringop_overflow_,
2009 "%G%qD specified bound %E equals source length",
2010 stmt, fndecl, len))
2011 gimple_set_no_warning (stmt, true);
2014 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2016 /* If the replacement _DECL isn't initialized, don't do the
2017 transformation. */
2018 if (!fn)
2019 return false;
2021 /* Otherwise, emit a call to strcat. */
2022 gcall *repl = gimple_build_call (fn, 2, dst, src);
2023 replace_call_with_call_and_fold (gsi, repl);
2024 return true;
2027 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2028 LEN, and SIZE. */
2030 static bool
2031 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2033 gimple *stmt = gsi_stmt (*gsi);
2034 tree dest = gimple_call_arg (stmt, 0);
2035 tree src = gimple_call_arg (stmt, 1);
2036 tree len = gimple_call_arg (stmt, 2);
2037 tree size = gimple_call_arg (stmt, 3);
2038 tree fn;
2039 const char *p;
2041 p = c_getstr (src);
2042 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2043 if ((p && *p == '\0')
2044 || integer_zerop (len))
2046 replace_call_with_value (gsi, dest);
2047 return true;
2050 if (! tree_fits_uhwi_p (size))
2051 return false;
2053 if (! integer_all_onesp (size))
2055 tree src_len = c_strlen (src, 1);
2056 if (src_len
2057 && tree_fits_uhwi_p (src_len)
2058 && tree_fits_uhwi_p (len)
2059 && ! tree_int_cst_lt (len, src_len))
2061 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2062 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2063 if (!fn)
2064 return false;
2066 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2067 replace_call_with_call_and_fold (gsi, repl);
2068 return true;
2070 return false;
2073 /* If __builtin_strncat_chk is used, assume strncat is available. */
2074 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2075 if (!fn)
2076 return false;
2078 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2079 replace_call_with_call_and_fold (gsi, repl);
2080 return true;
2083 /* Build and append gimple statements to STMTS that would load a first
2084 character of a memory location identified by STR. LOC is location
2085 of the statement. */
2087 static tree
2088 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2090 tree var;
2092 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2093 tree cst_uchar_ptr_node
2094 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2095 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2097 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2098 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2099 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2101 gimple_assign_set_lhs (stmt, var);
2102 gimple_seq_add_stmt_without_update (stmts, stmt);
2104 return var;
2107 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2108 FCODE is the name of the builtin. */
2110 static bool
2111 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2113 gimple *stmt = gsi_stmt (*gsi);
2114 tree callee = gimple_call_fndecl (stmt);
2115 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2117 tree type = integer_type_node;
2118 tree str1 = gimple_call_arg (stmt, 0);
2119 tree str2 = gimple_call_arg (stmt, 1);
2120 tree lhs = gimple_call_lhs (stmt);
2121 HOST_WIDE_INT length = -1;
2123 /* Handle strncmp and strncasecmp functions. */
2124 if (gimple_call_num_args (stmt) == 3)
2126 tree len = gimple_call_arg (stmt, 2);
2127 if (tree_fits_uhwi_p (len))
2128 length = tree_to_uhwi (len);
2131 /* If the LEN parameter is zero, return zero. */
2132 if (length == 0)
2134 replace_call_with_value (gsi, integer_zero_node);
2135 return true;
2138 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2139 if (operand_equal_p (str1, str2, 0))
2141 replace_call_with_value (gsi, integer_zero_node);
2142 return true;
2145 const char *p1 = c_getstr (str1);
2146 const char *p2 = c_getstr (str2);
2148 /* For known strings, return an immediate value. */
2149 if (p1 && p2)
2151 int r = 0;
2152 bool known_result = false;
2154 switch (fcode)
2156 case BUILT_IN_STRCMP:
2158 r = strcmp (p1, p2);
2159 known_result = true;
2160 break;
2162 case BUILT_IN_STRNCMP:
2164 if (length == -1)
2165 break;
2166 r = strncmp (p1, p2, length);
2167 known_result = true;
2168 break;
2170 /* Only handleable situation is where the string are equal (result 0),
2171 which is already handled by operand_equal_p case. */
2172 case BUILT_IN_STRCASECMP:
2173 break;
2174 case BUILT_IN_STRNCASECMP:
2176 if (length == -1)
2177 break;
2178 r = strncmp (p1, p2, length);
2179 if (r == 0)
2180 known_result = true;
2181 break;;
2183 default:
2184 gcc_unreachable ();
2187 if (known_result)
2189 replace_call_with_value (gsi, build_cmp_result (type, r));
2190 return true;
2194 bool nonzero_length = length >= 1
2195 || fcode == BUILT_IN_STRCMP
2196 || fcode == BUILT_IN_STRCASECMP;
2198 location_t loc = gimple_location (stmt);
2200 /* If the second arg is "", return *(const unsigned char*)arg1. */
2201 if (p2 && *p2 == '\0' && nonzero_length)
2203 gimple_seq stmts = NULL;
2204 tree var = gimple_load_first_char (loc, str1, &stmts);
2205 if (lhs)
2207 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2208 gimple_seq_add_stmt_without_update (&stmts, stmt);
2211 gsi_replace_with_seq_vops (gsi, stmts);
2212 return true;
2215 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2216 if (p1 && *p1 == '\0' && nonzero_length)
2218 gimple_seq stmts = NULL;
2219 tree var = gimple_load_first_char (loc, str2, &stmts);
2221 if (lhs)
2223 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2224 stmt = gimple_build_assign (c, NOP_EXPR, var);
2225 gimple_seq_add_stmt_without_update (&stmts, stmt);
2227 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2228 gimple_seq_add_stmt_without_update (&stmts, stmt);
2231 gsi_replace_with_seq_vops (gsi, stmts);
2232 return true;
2235 /* If len parameter is one, return an expression corresponding to
2236 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2237 if (fcode == BUILT_IN_STRNCMP && length == 1)
2239 gimple_seq stmts = NULL;
2240 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2241 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2243 if (lhs)
2245 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2246 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2247 gimple_seq_add_stmt_without_update (&stmts, convert1);
2249 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2250 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2251 gimple_seq_add_stmt_without_update (&stmts, convert2);
2253 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2254 gimple_seq_add_stmt_without_update (&stmts, stmt);
2257 gsi_replace_with_seq_vops (gsi, stmts);
2258 return true;
2261 return false;
2264 /* Fold a call to the memchr pointed by GSI iterator. */
2266 static bool
2267 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2269 gimple *stmt = gsi_stmt (*gsi);
2270 tree lhs = gimple_call_lhs (stmt);
2271 tree arg1 = gimple_call_arg (stmt, 0);
2272 tree arg2 = gimple_call_arg (stmt, 1);
2273 tree len = gimple_call_arg (stmt, 2);
2275 /* If the LEN parameter is zero, return zero. */
2276 if (integer_zerop (len))
2278 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2279 return true;
2282 char c;
2283 if (TREE_CODE (arg2) != INTEGER_CST
2284 || !tree_fits_uhwi_p (len)
2285 || !target_char_cst_p (arg2, &c))
2286 return false;
2288 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2289 unsigned HOST_WIDE_INT string_length;
2290 const char *p1 = c_getstr (arg1, &string_length);
2292 if (p1)
2294 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2295 if (r == NULL)
2297 if (length <= string_length)
2299 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2300 return true;
2303 else
2305 unsigned HOST_WIDE_INT offset = r - p1;
2306 gimple_seq stmts = NULL;
2307 if (lhs != NULL_TREE)
2309 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2310 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2311 arg1, offset_cst);
2312 gimple_seq_add_stmt_without_update (&stmts, stmt);
2314 else
2315 gimple_seq_add_stmt_without_update (&stmts,
2316 gimple_build_nop ());
2318 gsi_replace_with_seq_vops (gsi, stmts);
2319 return true;
2323 return false;
2326 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2327 to the call. IGNORE is true if the value returned
2328 by the builtin will be ignored. UNLOCKED is true is true if this
2329 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2330 the known length of the string. Return NULL_TREE if no simplification
2331 was possible. */
2333 static bool
2334 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2335 tree arg0, tree arg1,
2336 bool unlocked)
2338 gimple *stmt = gsi_stmt (*gsi);
2340 /* If we're using an unlocked function, assume the other unlocked
2341 functions exist explicitly. */
2342 tree const fn_fputc = (unlocked
2343 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2344 : builtin_decl_implicit (BUILT_IN_FPUTC));
2345 tree const fn_fwrite = (unlocked
2346 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2347 : builtin_decl_implicit (BUILT_IN_FWRITE));
2349 /* If the return value is used, don't do the transformation. */
2350 if (gimple_call_lhs (stmt))
2351 return false;
2353 /* Get the length of the string passed to fputs. If the length
2354 can't be determined, punt. */
2355 tree len = get_maxval_strlen (arg0, 0);
2356 if (!len
2357 || TREE_CODE (len) != INTEGER_CST)
2358 return false;
2360 switch (compare_tree_int (len, 1))
2362 case -1: /* length is 0, delete the call entirely . */
2363 replace_call_with_value (gsi, integer_zero_node);
2364 return true;
2366 case 0: /* length is 1, call fputc. */
2368 const char *p = c_getstr (arg0);
2369 if (p != NULL)
2371 if (!fn_fputc)
2372 return false;
2374 gimple *repl = gimple_build_call (fn_fputc, 2,
2375 build_int_cst
2376 (integer_type_node, p[0]), arg1);
2377 replace_call_with_call_and_fold (gsi, repl);
2378 return true;
2381 /* FALLTHROUGH */
2382 case 1: /* length is greater than 1, call fwrite. */
2384 /* If optimizing for size keep fputs. */
2385 if (optimize_function_for_size_p (cfun))
2386 return false;
2387 /* New argument list transforming fputs(string, stream) to
2388 fwrite(string, 1, len, stream). */
2389 if (!fn_fwrite)
2390 return false;
2392 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2393 size_one_node, len, arg1);
2394 replace_call_with_call_and_fold (gsi, repl);
2395 return true;
2397 default:
2398 gcc_unreachable ();
2400 return false;
2403 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2404 DEST, SRC, LEN, and SIZE are the arguments to the call.
2405 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2406 code of the builtin. If MAXLEN is not NULL, it is maximum length
2407 passed as third argument. */
2409 static bool
2410 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2411 tree dest, tree src, tree len, tree size,
2412 enum built_in_function fcode)
2414 gimple *stmt = gsi_stmt (*gsi);
2415 location_t loc = gimple_location (stmt);
2416 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2417 tree fn;
2419 /* If SRC and DEST are the same (and not volatile), return DEST
2420 (resp. DEST+LEN for __mempcpy_chk). */
2421 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2423 if (fcode != BUILT_IN_MEMPCPY_CHK)
2425 replace_call_with_value (gsi, dest);
2426 return true;
2428 else
2430 gimple_seq stmts = NULL;
2431 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2432 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2433 TREE_TYPE (dest), dest, len);
2434 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2435 replace_call_with_value (gsi, temp);
2436 return true;
2440 if (! tree_fits_uhwi_p (size))
2441 return false;
2443 tree maxlen = get_maxval_strlen (len, 2);
2444 if (! integer_all_onesp (size))
2446 if (! tree_fits_uhwi_p (len))
2448 /* If LEN is not constant, try MAXLEN too.
2449 For MAXLEN only allow optimizing into non-_ocs function
2450 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2451 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2453 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2455 /* (void) __mempcpy_chk () can be optimized into
2456 (void) __memcpy_chk (). */
2457 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2458 if (!fn)
2459 return false;
2461 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2462 replace_call_with_call_and_fold (gsi, repl);
2463 return true;
2465 return false;
2468 else
2469 maxlen = len;
2471 if (tree_int_cst_lt (size, maxlen))
2472 return false;
2475 fn = NULL_TREE;
2476 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2477 mem{cpy,pcpy,move,set} is available. */
2478 switch (fcode)
2480 case BUILT_IN_MEMCPY_CHK:
2481 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2482 break;
2483 case BUILT_IN_MEMPCPY_CHK:
2484 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2485 break;
2486 case BUILT_IN_MEMMOVE_CHK:
2487 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2488 break;
2489 case BUILT_IN_MEMSET_CHK:
2490 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2491 break;
2492 default:
2493 break;
2496 if (!fn)
2497 return false;
2499 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2500 replace_call_with_call_and_fold (gsi, repl);
2501 return true;
2504 /* Fold a call to the __st[rp]cpy_chk builtin.
2505 DEST, SRC, and SIZE are the arguments to the call.
2506 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2507 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2508 strings passed as second argument. */
2510 static bool
2511 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2512 tree dest,
2513 tree src, tree size,
2514 enum built_in_function fcode)
2516 gimple *stmt = gsi_stmt (*gsi);
2517 location_t loc = gimple_location (stmt);
2518 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2519 tree len, fn;
2521 /* If SRC and DEST are the same (and not volatile), return DEST. */
2522 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2524 replace_call_with_value (gsi, dest);
2525 return true;
2528 if (! tree_fits_uhwi_p (size))
2529 return false;
2531 tree maxlen = get_maxval_strlen (src, 1);
2532 if (! integer_all_onesp (size))
2534 len = c_strlen (src, 1);
2535 if (! len || ! tree_fits_uhwi_p (len))
2537 /* If LEN is not constant, try MAXLEN too.
2538 For MAXLEN only allow optimizing into non-_ocs function
2539 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2540 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2542 if (fcode == BUILT_IN_STPCPY_CHK)
2544 if (! ignore)
2545 return false;
2547 /* If return value of __stpcpy_chk is ignored,
2548 optimize into __strcpy_chk. */
2549 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2550 if (!fn)
2551 return false;
2553 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2554 replace_call_with_call_and_fold (gsi, repl);
2555 return true;
2558 if (! len || TREE_SIDE_EFFECTS (len))
2559 return false;
2561 /* If c_strlen returned something, but not a constant,
2562 transform __strcpy_chk into __memcpy_chk. */
2563 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2564 if (!fn)
2565 return false;
2567 gimple_seq stmts = NULL;
2568 len = gimple_convert (&stmts, loc, size_type_node, len);
2569 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2570 build_int_cst (size_type_node, 1));
2571 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2572 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2573 replace_call_with_call_and_fold (gsi, repl);
2574 return true;
2577 else
2578 maxlen = len;
2580 if (! tree_int_cst_lt (maxlen, size))
2581 return false;
2584 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2585 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2586 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2587 if (!fn)
2588 return false;
2590 gimple *repl = gimple_build_call (fn, 2, dest, src);
2591 replace_call_with_call_and_fold (gsi, repl);
2592 return true;
2595 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2596 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2597 length passed as third argument. IGNORE is true if return value can be
2598 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2600 static bool
2601 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2602 tree dest, tree src,
2603 tree len, tree size,
2604 enum built_in_function fcode)
2606 gimple *stmt = gsi_stmt (*gsi);
2607 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2608 tree fn;
2610 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2612 /* If return value of __stpncpy_chk is ignored,
2613 optimize into __strncpy_chk. */
2614 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2615 if (fn)
2617 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2618 replace_call_with_call_and_fold (gsi, repl);
2619 return true;
2623 if (! tree_fits_uhwi_p (size))
2624 return false;
2626 tree maxlen = get_maxval_strlen (len, 2);
2627 if (! integer_all_onesp (size))
2629 if (! tree_fits_uhwi_p (len))
2631 /* If LEN is not constant, try MAXLEN too.
2632 For MAXLEN only allow optimizing into non-_ocs function
2633 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2634 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2635 return false;
2637 else
2638 maxlen = len;
2640 if (tree_int_cst_lt (size, maxlen))
2641 return false;
2644 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2645 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2646 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2647 if (!fn)
2648 return false;
2650 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2651 replace_call_with_call_and_fold (gsi, repl);
2652 return true;
2655 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2656 Return NULL_TREE if no simplification can be made. */
2658 static bool
2659 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2661 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2662 location_t loc = gimple_location (stmt);
2663 tree dest = gimple_call_arg (stmt, 0);
2664 tree src = gimple_call_arg (stmt, 1);
2665 tree fn, len, lenp1;
2667 /* If the result is unused, replace stpcpy with strcpy. */
2668 if (gimple_call_lhs (stmt) == NULL_TREE)
2670 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2671 if (!fn)
2672 return false;
2673 gimple_call_set_fndecl (stmt, fn);
2674 fold_stmt (gsi);
2675 return true;
2678 len = c_strlen (src, 1);
2679 if (!len
2680 || TREE_CODE (len) != INTEGER_CST)
2681 return false;
2683 if (optimize_function_for_size_p (cfun)
2684 /* If length is zero it's small enough. */
2685 && !integer_zerop (len))
2686 return false;
2688 /* If the source has a known length replace stpcpy with memcpy. */
2689 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2690 if (!fn)
2691 return false;
2693 gimple_seq stmts = NULL;
2694 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2695 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2696 tem, build_int_cst (size_type_node, 1));
2697 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2698 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2699 gimple_set_vuse (repl, gimple_vuse (stmt));
2700 gimple_set_vdef (repl, gimple_vdef (stmt));
2701 if (gimple_vdef (repl)
2702 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2703 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2704 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2705 /* Replace the result with dest + len. */
2706 stmts = NULL;
2707 tem = gimple_convert (&stmts, loc, sizetype, len);
2708 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2709 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2710 POINTER_PLUS_EXPR, dest, tem);
2711 gsi_replace (gsi, ret, false);
2712 /* Finally fold the memcpy call. */
2713 gimple_stmt_iterator gsi2 = *gsi;
2714 gsi_prev (&gsi2);
2715 fold_stmt (&gsi2);
2716 return true;
2719 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2720 NULL_TREE if a normal call should be emitted rather than expanding
2721 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2722 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2723 passed as second argument. */
2725 static bool
2726 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2727 enum built_in_function fcode)
2729 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2730 tree dest, size, len, fn, fmt, flag;
2731 const char *fmt_str;
2733 /* Verify the required arguments in the original call. */
2734 if (gimple_call_num_args (stmt) < 5)
2735 return false;
2737 dest = gimple_call_arg (stmt, 0);
2738 len = gimple_call_arg (stmt, 1);
2739 flag = gimple_call_arg (stmt, 2);
2740 size = gimple_call_arg (stmt, 3);
2741 fmt = gimple_call_arg (stmt, 4);
2743 if (! tree_fits_uhwi_p (size))
2744 return false;
2746 if (! integer_all_onesp (size))
2748 tree maxlen = get_maxval_strlen (len, 2);
2749 if (! tree_fits_uhwi_p (len))
2751 /* If LEN is not constant, try MAXLEN too.
2752 For MAXLEN only allow optimizing into non-_ocs function
2753 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2754 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2755 return false;
2757 else
2758 maxlen = len;
2760 if (tree_int_cst_lt (size, maxlen))
2761 return false;
2764 if (!init_target_chars ())
2765 return false;
2767 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2768 or if format doesn't contain % chars or is "%s". */
2769 if (! integer_zerop (flag))
2771 fmt_str = c_getstr (fmt);
2772 if (fmt_str == NULL)
2773 return false;
2774 if (strchr (fmt_str, target_percent) != NULL
2775 && strcmp (fmt_str, target_percent_s))
2776 return false;
2779 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2780 available. */
2781 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2782 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2783 if (!fn)
2784 return false;
2786 /* Replace the called function and the first 5 argument by 3 retaining
2787 trailing varargs. */
2788 gimple_call_set_fndecl (stmt, fn);
2789 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2790 gimple_call_set_arg (stmt, 0, dest);
2791 gimple_call_set_arg (stmt, 1, len);
2792 gimple_call_set_arg (stmt, 2, fmt);
2793 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2794 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2795 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2796 fold_stmt (gsi);
2797 return true;
2800 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2801 Return NULL_TREE if a normal call should be emitted rather than
2802 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2803 or BUILT_IN_VSPRINTF_CHK. */
2805 static bool
2806 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2807 enum built_in_function fcode)
2809 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2810 tree dest, size, len, fn, fmt, flag;
2811 const char *fmt_str;
2812 unsigned nargs = gimple_call_num_args (stmt);
2814 /* Verify the required arguments in the original call. */
2815 if (nargs < 4)
2816 return false;
2817 dest = gimple_call_arg (stmt, 0);
2818 flag = gimple_call_arg (stmt, 1);
2819 size = gimple_call_arg (stmt, 2);
2820 fmt = gimple_call_arg (stmt, 3);
2822 if (! tree_fits_uhwi_p (size))
2823 return false;
2825 len = NULL_TREE;
2827 if (!init_target_chars ())
2828 return false;
2830 /* Check whether the format is a literal string constant. */
2831 fmt_str = c_getstr (fmt);
2832 if (fmt_str != NULL)
2834 /* If the format doesn't contain % args or %%, we know the size. */
2835 if (strchr (fmt_str, target_percent) == 0)
2837 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2838 len = build_int_cstu (size_type_node, strlen (fmt_str));
2840 /* If the format is "%s" and first ... argument is a string literal,
2841 we know the size too. */
2842 else if (fcode == BUILT_IN_SPRINTF_CHK
2843 && strcmp (fmt_str, target_percent_s) == 0)
2845 tree arg;
2847 if (nargs == 5)
2849 arg = gimple_call_arg (stmt, 4);
2850 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2852 len = c_strlen (arg, 1);
2853 if (! len || ! tree_fits_uhwi_p (len))
2854 len = NULL_TREE;
2860 if (! integer_all_onesp (size))
2862 if (! len || ! tree_int_cst_lt (len, size))
2863 return false;
2866 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2867 or if format doesn't contain % chars or is "%s". */
2868 if (! integer_zerop (flag))
2870 if (fmt_str == NULL)
2871 return false;
2872 if (strchr (fmt_str, target_percent) != NULL
2873 && strcmp (fmt_str, target_percent_s))
2874 return false;
2877 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2878 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2879 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2880 if (!fn)
2881 return false;
2883 /* Replace the called function and the first 4 argument by 2 retaining
2884 trailing varargs. */
2885 gimple_call_set_fndecl (stmt, fn);
2886 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2887 gimple_call_set_arg (stmt, 0, dest);
2888 gimple_call_set_arg (stmt, 1, fmt);
2889 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2890 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2891 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2892 fold_stmt (gsi);
2893 return true;
2896 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2897 ORIG may be null if this is a 2-argument call. We don't attempt to
2898 simplify calls with more than 3 arguments.
2900 Return true if simplification was possible, otherwise false. */
2902 bool
2903 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2905 gimple *stmt = gsi_stmt (*gsi);
2906 tree dest = gimple_call_arg (stmt, 0);
2907 tree fmt = gimple_call_arg (stmt, 1);
2908 tree orig = NULL_TREE;
2909 const char *fmt_str = NULL;
2911 /* Verify the required arguments in the original call. We deal with two
2912 types of sprintf() calls: 'sprintf (str, fmt)' and
2913 'sprintf (dest, "%s", orig)'. */
2914 if (gimple_call_num_args (stmt) > 3)
2915 return false;
2917 if (gimple_call_num_args (stmt) == 3)
2918 orig = gimple_call_arg (stmt, 2);
2920 /* Check whether the format is a literal string constant. */
2921 fmt_str = c_getstr (fmt);
2922 if (fmt_str == NULL)
2923 return false;
2925 if (!init_target_chars ())
2926 return false;
2928 /* If the format doesn't contain % args or %%, use strcpy. */
2929 if (strchr (fmt_str, target_percent) == NULL)
2931 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2933 if (!fn)
2934 return false;
2936 /* Don't optimize sprintf (buf, "abc", ptr++). */
2937 if (orig)
2938 return false;
2940 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2941 'format' is known to contain no % formats. */
2942 gimple_seq stmts = NULL;
2943 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2944 gimple_seq_add_stmt_without_update (&stmts, repl);
2945 if (gimple_call_lhs (stmt))
2947 repl = gimple_build_assign (gimple_call_lhs (stmt),
2948 build_int_cst (integer_type_node,
2949 strlen (fmt_str)));
2950 gimple_seq_add_stmt_without_update (&stmts, repl);
2951 gsi_replace_with_seq_vops (gsi, stmts);
2952 /* gsi now points at the assignment to the lhs, get a
2953 stmt iterator to the memcpy call.
2954 ??? We can't use gsi_for_stmt as that doesn't work when the
2955 CFG isn't built yet. */
2956 gimple_stmt_iterator gsi2 = *gsi;
2957 gsi_prev (&gsi2);
2958 fold_stmt (&gsi2);
2960 else
2962 gsi_replace_with_seq_vops (gsi, stmts);
2963 fold_stmt (gsi);
2965 return true;
2968 /* If the format is "%s", use strcpy if the result isn't used. */
2969 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2971 tree fn;
2972 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2974 if (!fn)
2975 return false;
2977 /* Don't crash on sprintf (str1, "%s"). */
2978 if (!orig)
2979 return false;
2981 tree orig_len = NULL_TREE;
2982 if (gimple_call_lhs (stmt))
2984 orig_len = get_maxval_strlen (orig, 0);
2985 if (!orig_len)
2986 return false;
2989 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2990 gimple_seq stmts = NULL;
2991 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2992 gimple_seq_add_stmt_without_update (&stmts, repl);
2993 if (gimple_call_lhs (stmt))
2995 if (!useless_type_conversion_p (integer_type_node,
2996 TREE_TYPE (orig_len)))
2997 orig_len = fold_convert (integer_type_node, orig_len);
2998 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2999 gimple_seq_add_stmt_without_update (&stmts, repl);
3000 gsi_replace_with_seq_vops (gsi, stmts);
3001 /* gsi now points at the assignment to the lhs, get a
3002 stmt iterator to the memcpy call.
3003 ??? We can't use gsi_for_stmt as that doesn't work when the
3004 CFG isn't built yet. */
3005 gimple_stmt_iterator gsi2 = *gsi;
3006 gsi_prev (&gsi2);
3007 fold_stmt (&gsi2);
3009 else
3011 gsi_replace_with_seq_vops (gsi, stmts);
3012 fold_stmt (gsi);
3014 return true;
3016 return false;
3019 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3020 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3021 attempt to simplify calls with more than 4 arguments.
3023 Return true if simplification was possible, otherwise false. */
3025 bool
3026 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3028 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3029 tree dest = gimple_call_arg (stmt, 0);
3030 tree destsize = gimple_call_arg (stmt, 1);
3031 tree fmt = gimple_call_arg (stmt, 2);
3032 tree orig = NULL_TREE;
3033 const char *fmt_str = NULL;
3035 if (gimple_call_num_args (stmt) > 4)
3036 return false;
3038 if (gimple_call_num_args (stmt) == 4)
3039 orig = gimple_call_arg (stmt, 3);
3041 if (!tree_fits_uhwi_p (destsize))
3042 return false;
3043 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3045 /* Check whether the format is a literal string constant. */
3046 fmt_str = c_getstr (fmt);
3047 if (fmt_str == NULL)
3048 return false;
3050 if (!init_target_chars ())
3051 return false;
3053 /* If the format doesn't contain % args or %%, use strcpy. */
3054 if (strchr (fmt_str, target_percent) == NULL)
3056 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3057 if (!fn)
3058 return false;
3060 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3061 if (orig)
3062 return false;
3064 /* We could expand this as
3065 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3066 or to
3067 memcpy (str, fmt_with_nul_at_cstm1, cst);
3068 but in the former case that might increase code size
3069 and in the latter case grow .rodata section too much.
3070 So punt for now. */
3071 size_t len = strlen (fmt_str);
3072 if (len >= destlen)
3073 return false;
3075 gimple_seq stmts = NULL;
3076 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3077 gimple_seq_add_stmt_without_update (&stmts, repl);
3078 if (gimple_call_lhs (stmt))
3080 repl = gimple_build_assign (gimple_call_lhs (stmt),
3081 build_int_cst (integer_type_node, len));
3082 gimple_seq_add_stmt_without_update (&stmts, repl);
3083 gsi_replace_with_seq_vops (gsi, stmts);
3084 /* gsi now points at the assignment to the lhs, get a
3085 stmt iterator to the memcpy call.
3086 ??? We can't use gsi_for_stmt as that doesn't work when the
3087 CFG isn't built yet. */
3088 gimple_stmt_iterator gsi2 = *gsi;
3089 gsi_prev (&gsi2);
3090 fold_stmt (&gsi2);
3092 else
3094 gsi_replace_with_seq_vops (gsi, stmts);
3095 fold_stmt (gsi);
3097 return true;
3100 /* If the format is "%s", use strcpy if the result isn't used. */
3101 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3103 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3104 if (!fn)
3105 return false;
3107 /* Don't crash on snprintf (str1, cst, "%s"). */
3108 if (!orig)
3109 return false;
3111 tree orig_len = get_maxval_strlen (orig, 0);
3112 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3113 return false;
3115 /* We could expand this as
3116 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3117 or to
3118 memcpy (str1, str2_with_nul_at_cstm1, cst);
3119 but in the former case that might increase code size
3120 and in the latter case grow .rodata section too much.
3121 So punt for now. */
3122 if (compare_tree_int (orig_len, destlen) >= 0)
3123 return false;
3125 /* Convert snprintf (str1, cst, "%s", str2) into
3126 strcpy (str1, str2) if strlen (str2) < cst. */
3127 gimple_seq stmts = NULL;
3128 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3129 gimple_seq_add_stmt_without_update (&stmts, repl);
3130 if (gimple_call_lhs (stmt))
3132 if (!useless_type_conversion_p (integer_type_node,
3133 TREE_TYPE (orig_len)))
3134 orig_len = fold_convert (integer_type_node, orig_len);
3135 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3136 gimple_seq_add_stmt_without_update (&stmts, repl);
3137 gsi_replace_with_seq_vops (gsi, stmts);
3138 /* gsi now points at the assignment to the lhs, get a
3139 stmt iterator to the memcpy call.
3140 ??? We can't use gsi_for_stmt as that doesn't work when the
3141 CFG isn't built yet. */
3142 gimple_stmt_iterator gsi2 = *gsi;
3143 gsi_prev (&gsi2);
3144 fold_stmt (&gsi2);
3146 else
3148 gsi_replace_with_seq_vops (gsi, stmts);
3149 fold_stmt (gsi);
3151 return true;
3153 return false;
3156 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3157 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3158 more than 3 arguments, and ARG may be null in the 2-argument case.
3160 Return NULL_TREE if no simplification was possible, otherwise return the
3161 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3162 code of the function to be simplified. */
3164 static bool
3165 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3166 tree fp, tree fmt, tree arg,
3167 enum built_in_function fcode)
3169 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3170 tree fn_fputc, fn_fputs;
3171 const char *fmt_str = NULL;
3173 /* If the return value is used, don't do the transformation. */
3174 if (gimple_call_lhs (stmt) != NULL_TREE)
3175 return false;
3177 /* Check whether the format is a literal string constant. */
3178 fmt_str = c_getstr (fmt);
3179 if (fmt_str == NULL)
3180 return false;
3182 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3184 /* If we're using an unlocked function, assume the other
3185 unlocked functions exist explicitly. */
3186 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3187 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3189 else
3191 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3192 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3195 if (!init_target_chars ())
3196 return false;
3198 /* If the format doesn't contain % args or %%, use strcpy. */
3199 if (strchr (fmt_str, target_percent) == NULL)
3201 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3202 && arg)
3203 return false;
3205 /* If the format specifier was "", fprintf does nothing. */
3206 if (fmt_str[0] == '\0')
3208 replace_call_with_value (gsi, NULL_TREE);
3209 return true;
3212 /* When "string" doesn't contain %, replace all cases of
3213 fprintf (fp, string) with fputs (string, fp). The fputs
3214 builtin will take care of special cases like length == 1. */
3215 if (fn_fputs)
3217 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3218 replace_call_with_call_and_fold (gsi, repl);
3219 return true;
3223 /* The other optimizations can be done only on the non-va_list variants. */
3224 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3225 return false;
3227 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3228 else if (strcmp (fmt_str, target_percent_s) == 0)
3230 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3231 return false;
3232 if (fn_fputs)
3234 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3235 replace_call_with_call_and_fold (gsi, repl);
3236 return true;
3240 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3241 else if (strcmp (fmt_str, target_percent_c) == 0)
3243 if (!arg
3244 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3245 return false;
3246 if (fn_fputc)
3248 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3249 replace_call_with_call_and_fold (gsi, repl);
3250 return true;
3254 return false;
3257 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3258 FMT and ARG are the arguments to the call; we don't fold cases with
3259 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3261 Return NULL_TREE if no simplification was possible, otherwise return the
3262 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3263 code of the function to be simplified. */
3265 static bool
3266 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3267 tree arg, enum built_in_function fcode)
3269 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3270 tree fn_putchar, fn_puts, newarg;
3271 const char *fmt_str = NULL;
3273 /* If the return value is used, don't do the transformation. */
3274 if (gimple_call_lhs (stmt) != NULL_TREE)
3275 return false;
3277 /* Check whether the format is a literal string constant. */
3278 fmt_str = c_getstr (fmt);
3279 if (fmt_str == NULL)
3280 return false;
3282 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3284 /* If we're using an unlocked function, assume the other
3285 unlocked functions exist explicitly. */
3286 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3287 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3289 else
3291 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3292 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3295 if (!init_target_chars ())
3296 return false;
3298 if (strcmp (fmt_str, target_percent_s) == 0
3299 || strchr (fmt_str, target_percent) == NULL)
3301 const char *str;
3303 if (strcmp (fmt_str, target_percent_s) == 0)
3305 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3306 return false;
3308 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3309 return false;
3311 str = c_getstr (arg);
3312 if (str == NULL)
3313 return false;
3315 else
3317 /* The format specifier doesn't contain any '%' characters. */
3318 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3319 && arg)
3320 return false;
3321 str = fmt_str;
3324 /* If the string was "", printf does nothing. */
3325 if (str[0] == '\0')
3327 replace_call_with_value (gsi, NULL_TREE);
3328 return true;
3331 /* If the string has length of 1, call putchar. */
3332 if (str[1] == '\0')
3334 /* Given printf("c"), (where c is any one character,)
3335 convert "c"[0] to an int and pass that to the replacement
3336 function. */
3337 newarg = build_int_cst (integer_type_node, str[0]);
3338 if (fn_putchar)
3340 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3341 replace_call_with_call_and_fold (gsi, repl);
3342 return true;
3345 else
3347 /* If the string was "string\n", call puts("string"). */
3348 size_t len = strlen (str);
3349 if ((unsigned char)str[len - 1] == target_newline
3350 && (size_t) (int) len == len
3351 && (int) len > 0)
3353 char *newstr;
3354 tree offset_node, string_cst;
3356 /* Create a NUL-terminated string that's one char shorter
3357 than the original, stripping off the trailing '\n'. */
3358 newarg = build_string_literal (len, str);
3359 string_cst = string_constant (newarg, &offset_node);
3360 gcc_checking_assert (string_cst
3361 && (TREE_STRING_LENGTH (string_cst)
3362 == (int) len)
3363 && integer_zerop (offset_node)
3364 && (unsigned char)
3365 TREE_STRING_POINTER (string_cst)[len - 1]
3366 == target_newline);
3367 /* build_string_literal creates a new STRING_CST,
3368 modify it in place to avoid double copying. */
3369 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3370 newstr[len - 1] = '\0';
3371 if (fn_puts)
3373 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3374 replace_call_with_call_and_fold (gsi, repl);
3375 return true;
3378 else
3379 /* We'd like to arrange to call fputs(string,stdout) here,
3380 but we need stdout and don't have a way to get it yet. */
3381 return false;
3385 /* The other optimizations can be done only on the non-va_list variants. */
3386 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3387 return false;
3389 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3390 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3392 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3393 return false;
3394 if (fn_puts)
3396 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3397 replace_call_with_call_and_fold (gsi, repl);
3398 return true;
3402 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3403 else if (strcmp (fmt_str, target_percent_c) == 0)
3405 if (!arg || ! useless_type_conversion_p (integer_type_node,
3406 TREE_TYPE (arg)))
3407 return false;
3408 if (fn_putchar)
3410 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3411 replace_call_with_call_and_fold (gsi, repl);
3412 return true;
3416 return false;
3421 /* Fold a call to __builtin_strlen with known length LEN. */
3423 static bool
3424 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3426 gimple *stmt = gsi_stmt (*gsi);
3427 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
3428 if (!len)
3429 return false;
3430 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
3431 replace_call_with_value (gsi, len);
3432 return true;
3435 /* Fold a call to __builtin_acc_on_device. */
3437 static bool
3438 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3440 /* Defer folding until we know which compiler we're in. */
3441 if (symtab->state != EXPANSION)
3442 return false;
3444 unsigned val_host = GOMP_DEVICE_HOST;
3445 unsigned val_dev = GOMP_DEVICE_NONE;
3447 #ifdef ACCEL_COMPILER
3448 val_host = GOMP_DEVICE_NOT_HOST;
3449 val_dev = ACCEL_COMPILER_acc_device;
3450 #endif
3452 location_t loc = gimple_location (gsi_stmt (*gsi));
3454 tree host_eq = make_ssa_name (boolean_type_node);
3455 gimple *host_ass = gimple_build_assign
3456 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3457 gimple_set_location (host_ass, loc);
3458 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3460 tree dev_eq = make_ssa_name (boolean_type_node);
3461 gimple *dev_ass = gimple_build_assign
3462 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3463 gimple_set_location (dev_ass, loc);
3464 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3466 tree result = make_ssa_name (boolean_type_node);
3467 gimple *result_ass = gimple_build_assign
3468 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3469 gimple_set_location (result_ass, loc);
3470 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3472 replace_call_with_value (gsi, result);
3474 return true;
3477 /* Fold realloc (0, n) -> malloc (n). */
3479 static bool
3480 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3482 gimple *stmt = gsi_stmt (*gsi);
3483 tree arg = gimple_call_arg (stmt, 0);
3484 tree size = gimple_call_arg (stmt, 1);
3486 if (operand_equal_p (arg, null_pointer_node, 0))
3488 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3489 if (fn_malloc)
3491 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3492 replace_call_with_call_and_fold (gsi, repl);
3493 return true;
3496 return false;
3499 /* Fold the non-target builtin at *GSI and return whether any simplification
3500 was made. */
3502 static bool
3503 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3505 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3506 tree callee = gimple_call_fndecl (stmt);
3508 /* Give up for always_inline inline builtins until they are
3509 inlined. */
3510 if (avoid_folding_inline_builtin (callee))
3511 return false;
3513 unsigned n = gimple_call_num_args (stmt);
3514 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3515 switch (fcode)
3517 case BUILT_IN_BCMP:
3518 return gimple_fold_builtin_bcmp (gsi);
3519 case BUILT_IN_BCOPY:
3520 return gimple_fold_builtin_bcopy (gsi);
3521 case BUILT_IN_BZERO:
3522 return gimple_fold_builtin_bzero (gsi);
3524 case BUILT_IN_MEMSET:
3525 return gimple_fold_builtin_memset (gsi,
3526 gimple_call_arg (stmt, 1),
3527 gimple_call_arg (stmt, 2));
3528 case BUILT_IN_MEMCPY:
3529 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3530 gimple_call_arg (stmt, 1), 0);
3531 case BUILT_IN_MEMPCPY:
3532 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3533 gimple_call_arg (stmt, 1), 1);
3534 case BUILT_IN_MEMMOVE:
3535 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3536 gimple_call_arg (stmt, 1), 3);
3537 case BUILT_IN_SPRINTF_CHK:
3538 case BUILT_IN_VSPRINTF_CHK:
3539 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3540 case BUILT_IN_STRCAT_CHK:
3541 return gimple_fold_builtin_strcat_chk (gsi);
3542 case BUILT_IN_STRNCAT_CHK:
3543 return gimple_fold_builtin_strncat_chk (gsi);
3544 case BUILT_IN_STRLEN:
3545 return gimple_fold_builtin_strlen (gsi);
3546 case BUILT_IN_STRCPY:
3547 return gimple_fold_builtin_strcpy (gsi,
3548 gimple_call_arg (stmt, 0),
3549 gimple_call_arg (stmt, 1));
3550 case BUILT_IN_STRNCPY:
3551 return gimple_fold_builtin_strncpy (gsi,
3552 gimple_call_arg (stmt, 0),
3553 gimple_call_arg (stmt, 1),
3554 gimple_call_arg (stmt, 2));
3555 case BUILT_IN_STRCAT:
3556 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3557 gimple_call_arg (stmt, 1));
3558 case BUILT_IN_STRNCAT:
3559 return gimple_fold_builtin_strncat (gsi);
3560 case BUILT_IN_INDEX:
3561 case BUILT_IN_STRCHR:
3562 return gimple_fold_builtin_strchr (gsi, false);
3563 case BUILT_IN_RINDEX:
3564 case BUILT_IN_STRRCHR:
3565 return gimple_fold_builtin_strchr (gsi, true);
3566 case BUILT_IN_STRSTR:
3567 return gimple_fold_builtin_strstr (gsi);
3568 case BUILT_IN_STRCMP:
3569 case BUILT_IN_STRCASECMP:
3570 case BUILT_IN_STRNCMP:
3571 case BUILT_IN_STRNCASECMP:
3572 return gimple_fold_builtin_string_compare (gsi);
3573 case BUILT_IN_MEMCHR:
3574 return gimple_fold_builtin_memchr (gsi);
3575 case BUILT_IN_FPUTS:
3576 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3577 gimple_call_arg (stmt, 1), false);
3578 case BUILT_IN_FPUTS_UNLOCKED:
3579 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3580 gimple_call_arg (stmt, 1), true);
3581 case BUILT_IN_MEMCPY_CHK:
3582 case BUILT_IN_MEMPCPY_CHK:
3583 case BUILT_IN_MEMMOVE_CHK:
3584 case BUILT_IN_MEMSET_CHK:
3585 return gimple_fold_builtin_memory_chk (gsi,
3586 gimple_call_arg (stmt, 0),
3587 gimple_call_arg (stmt, 1),
3588 gimple_call_arg (stmt, 2),
3589 gimple_call_arg (stmt, 3),
3590 fcode);
3591 case BUILT_IN_STPCPY:
3592 return gimple_fold_builtin_stpcpy (gsi);
3593 case BUILT_IN_STRCPY_CHK:
3594 case BUILT_IN_STPCPY_CHK:
3595 return gimple_fold_builtin_stxcpy_chk (gsi,
3596 gimple_call_arg (stmt, 0),
3597 gimple_call_arg (stmt, 1),
3598 gimple_call_arg (stmt, 2),
3599 fcode);
3600 case BUILT_IN_STRNCPY_CHK:
3601 case BUILT_IN_STPNCPY_CHK:
3602 return gimple_fold_builtin_stxncpy_chk (gsi,
3603 gimple_call_arg (stmt, 0),
3604 gimple_call_arg (stmt, 1),
3605 gimple_call_arg (stmt, 2),
3606 gimple_call_arg (stmt, 3),
3607 fcode);
3608 case BUILT_IN_SNPRINTF_CHK:
3609 case BUILT_IN_VSNPRINTF_CHK:
3610 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3612 case BUILT_IN_FPRINTF:
3613 case BUILT_IN_FPRINTF_UNLOCKED:
3614 case BUILT_IN_VFPRINTF:
3615 if (n == 2 || n == 3)
3616 return gimple_fold_builtin_fprintf (gsi,
3617 gimple_call_arg (stmt, 0),
3618 gimple_call_arg (stmt, 1),
3619 n == 3
3620 ? gimple_call_arg (stmt, 2)
3621 : NULL_TREE,
3622 fcode);
3623 break;
3624 case BUILT_IN_FPRINTF_CHK:
3625 case BUILT_IN_VFPRINTF_CHK:
3626 if (n == 3 || n == 4)
3627 return gimple_fold_builtin_fprintf (gsi,
3628 gimple_call_arg (stmt, 0),
3629 gimple_call_arg (stmt, 2),
3630 n == 4
3631 ? gimple_call_arg (stmt, 3)
3632 : NULL_TREE,
3633 fcode);
3634 break;
3635 case BUILT_IN_PRINTF:
3636 case BUILT_IN_PRINTF_UNLOCKED:
3637 case BUILT_IN_VPRINTF:
3638 if (n == 1 || n == 2)
3639 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3640 n == 2
3641 ? gimple_call_arg (stmt, 1)
3642 : NULL_TREE, fcode);
3643 break;
3644 case BUILT_IN_PRINTF_CHK:
3645 case BUILT_IN_VPRINTF_CHK:
3646 if (n == 2 || n == 3)
3647 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3648 n == 3
3649 ? gimple_call_arg (stmt, 2)
3650 : NULL_TREE, fcode);
3651 break;
3652 case BUILT_IN_ACC_ON_DEVICE:
3653 return gimple_fold_builtin_acc_on_device (gsi,
3654 gimple_call_arg (stmt, 0));
3655 case BUILT_IN_REALLOC:
3656 return gimple_fold_builtin_realloc (gsi);
3658 default:;
3661 /* Try the generic builtin folder. */
3662 bool ignore = (gimple_call_lhs (stmt) == NULL);
3663 tree result = fold_call_stmt (stmt, ignore);
3664 if (result)
3666 if (ignore)
3667 STRIP_NOPS (result);
3668 else
3669 result = fold_convert (gimple_call_return_type (stmt), result);
3670 if (!update_call_from_tree (gsi, result))
3671 gimplify_and_update_call_from_tree (gsi, result);
3672 return true;
3675 return false;
3678 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3679 function calls to constants, where possible. */
3681 static tree
3682 fold_internal_goacc_dim (const gimple *call)
3684 int axis = oacc_get_ifn_dim_arg (call);
3685 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3686 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
3687 tree result = NULL_TREE;
3689 /* If the size is 1, or we only want the size and it is not dynamic,
3690 we know the answer. */
3691 if (size == 1 || (!is_pos && size))
3693 tree type = TREE_TYPE (gimple_call_lhs (call));
3694 result = build_int_cst (type, size - is_pos);
3697 return result;
3700 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3701 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3702 &var where var is only addressable because of such calls. */
3704 bool
3705 optimize_atomic_compare_exchange_p (gimple *stmt)
3707 if (gimple_call_num_args (stmt) != 6
3708 || !flag_inline_atomics
3709 || !optimize
3710 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3711 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3712 || !gimple_vdef (stmt)
3713 || !gimple_vuse (stmt))
3714 return false;
3716 tree fndecl = gimple_call_fndecl (stmt);
3717 switch (DECL_FUNCTION_CODE (fndecl))
3719 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3720 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3721 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3722 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3723 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3724 break;
3725 default:
3726 return false;
3729 tree expected = gimple_call_arg (stmt, 1);
3730 if (TREE_CODE (expected) != ADDR_EXPR
3731 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3732 return false;
3734 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3735 if (!is_gimple_reg_type (etype)
3736 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3737 || TREE_THIS_VOLATILE (etype)
3738 || VECTOR_TYPE_P (etype)
3739 || TREE_CODE (etype) == COMPLEX_TYPE
3740 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3741 might not preserve all the bits. See PR71716. */
3742 || SCALAR_FLOAT_TYPE_P (etype)
3743 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype)))
3744 return false;
3746 tree weak = gimple_call_arg (stmt, 3);
3747 if (!integer_zerop (weak) && !integer_onep (weak))
3748 return false;
3750 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3751 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3752 machine_mode mode = TYPE_MODE (itype);
3754 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3755 == CODE_FOR_nothing
3756 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3757 return false;
3759 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode))
3760 return false;
3762 return true;
3765 /* Fold
3766 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3767 into
3768 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3769 i = IMAGPART_EXPR <t>;
3770 r = (_Bool) i;
3771 e = REALPART_EXPR <t>; */
3773 void
3774 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3776 gimple *stmt = gsi_stmt (*gsi);
3777 tree fndecl = gimple_call_fndecl (stmt);
3778 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3779 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3780 tree ctype = build_complex_type (itype);
3781 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3782 bool throws = false;
3783 edge e = NULL;
3784 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3785 expected);
3786 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3787 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3788 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3790 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3791 build1 (VIEW_CONVERT_EXPR, itype,
3792 gimple_assign_lhs (g)));
3793 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3795 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3796 + int_size_in_bytes (itype);
3797 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3798 gimple_call_arg (stmt, 0),
3799 gimple_assign_lhs (g),
3800 gimple_call_arg (stmt, 2),
3801 build_int_cst (integer_type_node, flag),
3802 gimple_call_arg (stmt, 4),
3803 gimple_call_arg (stmt, 5));
3804 tree lhs = make_ssa_name (ctype);
3805 gimple_call_set_lhs (g, lhs);
3806 gimple_set_vdef (g, gimple_vdef (stmt));
3807 gimple_set_vuse (g, gimple_vuse (stmt));
3808 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3809 tree oldlhs = gimple_call_lhs (stmt);
3810 if (stmt_can_throw_internal (stmt))
3812 throws = true;
3813 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3815 gimple_call_set_nothrow (as_a <gcall *> (g),
3816 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3817 gimple_call_set_lhs (stmt, NULL_TREE);
3818 gsi_replace (gsi, g, true);
3819 if (oldlhs)
3821 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3822 build1 (IMAGPART_EXPR, itype, lhs));
3823 if (throws)
3825 gsi_insert_on_edge_immediate (e, g);
3826 *gsi = gsi_for_stmt (g);
3828 else
3829 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3830 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3831 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3833 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3834 build1 (REALPART_EXPR, itype, lhs));
3835 if (throws && oldlhs == NULL_TREE)
3837 gsi_insert_on_edge_immediate (e, g);
3838 *gsi = gsi_for_stmt (g);
3840 else
3841 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3842 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3844 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3845 VIEW_CONVERT_EXPR,
3846 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3847 gimple_assign_lhs (g)));
3848 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3850 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3851 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3852 *gsi = gsiret;
3855 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3856 doesn't fit into TYPE. The test for overflow should be regardless of
3857 -fwrapv, and even for unsigned types. */
3859 bool
3860 arith_overflowed_p (enum tree_code code, const_tree type,
3861 const_tree arg0, const_tree arg1)
3863 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3864 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3865 widest2_int_cst;
3866 widest2_int warg0 = widest2_int_cst (arg0);
3867 widest2_int warg1 = widest2_int_cst (arg1);
3868 widest2_int wres;
3869 switch (code)
3871 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3872 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3873 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3874 default: gcc_unreachable ();
3876 signop sign = TYPE_SIGN (type);
3877 if (sign == UNSIGNED && wi::neg_p (wres))
3878 return true;
3879 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3882 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3883 The statement may be replaced by another statement, e.g., if the call
3884 simplifies to a constant value. Return true if any changes were made.
3885 It is assumed that the operands have been previously folded. */
3887 static bool
3888 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3890 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3891 tree callee;
3892 bool changed = false;
3893 unsigned i;
3895 /* Fold *& in call arguments. */
3896 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3897 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3899 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3900 if (tmp)
3902 gimple_call_set_arg (stmt, i, tmp);
3903 changed = true;
3907 /* Check for virtual calls that became direct calls. */
3908 callee = gimple_call_fn (stmt);
3909 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3911 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3913 if (dump_file && virtual_method_call_p (callee)
3914 && !possible_polymorphic_call_target_p
3915 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3916 (OBJ_TYPE_REF_EXPR (callee)))))
3918 fprintf (dump_file,
3919 "Type inheritance inconsistent devirtualization of ");
3920 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3921 fprintf (dump_file, " to ");
3922 print_generic_expr (dump_file, callee, TDF_SLIM);
3923 fprintf (dump_file, "\n");
3926 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3927 changed = true;
3929 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3931 bool final;
3932 vec <cgraph_node *>targets
3933 = possible_polymorphic_call_targets (callee, stmt, &final);
3934 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3936 tree lhs = gimple_call_lhs (stmt);
3937 if (dump_enabled_p ())
3939 location_t loc = gimple_location_safe (stmt);
3940 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3941 "folding virtual function call to %s\n",
3942 targets.length () == 1
3943 ? targets[0]->name ()
3944 : "__builtin_unreachable");
3946 if (targets.length () == 1)
3948 tree fndecl = targets[0]->decl;
3949 gimple_call_set_fndecl (stmt, fndecl);
3950 changed = true;
3951 /* If changing the call to __cxa_pure_virtual
3952 or similar noreturn function, adjust gimple_call_fntype
3953 too. */
3954 if (gimple_call_noreturn_p (stmt)
3955 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3956 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3957 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3958 == void_type_node))
3959 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
3960 /* If the call becomes noreturn, remove the lhs. */
3961 if (lhs
3962 && gimple_call_noreturn_p (stmt)
3963 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
3964 || should_remove_lhs_p (lhs)))
3966 if (TREE_CODE (lhs) == SSA_NAME)
3968 tree var = create_tmp_var (TREE_TYPE (lhs));
3969 tree def = get_or_create_ssa_default_def (cfun, var);
3970 gimple *new_stmt = gimple_build_assign (lhs, def);
3971 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3973 gimple_call_set_lhs (stmt, NULL_TREE);
3975 maybe_remove_unused_call_args (cfun, stmt);
3977 else
3979 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3980 gimple *new_stmt = gimple_build_call (fndecl, 0);
3981 gimple_set_location (new_stmt, gimple_location (stmt));
3982 /* If the call had a SSA name as lhs morph that into
3983 an uninitialized value. */
3984 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3986 tree var = create_tmp_var (TREE_TYPE (lhs));
3987 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
3988 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
3989 set_ssa_default_def (cfun, var, lhs);
3991 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3992 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3993 gsi_replace (gsi, new_stmt, false);
3994 return true;
4000 /* Check for indirect calls that became direct calls, and then
4001 no longer require a static chain. */
4002 if (gimple_call_chain (stmt))
4004 tree fn = gimple_call_fndecl (stmt);
4005 if (fn && !DECL_STATIC_CHAIN (fn))
4007 gimple_call_set_chain (stmt, NULL);
4008 changed = true;
4010 else
4012 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4013 if (tmp)
4015 gimple_call_set_chain (stmt, tmp);
4016 changed = true;
4021 if (inplace)
4022 return changed;
4024 /* Check for builtins that CCP can handle using information not
4025 available in the generic fold routines. */
4026 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4028 if (gimple_fold_builtin (gsi))
4029 changed = true;
4031 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4033 changed |= targetm.gimple_fold_builtin (gsi);
4035 else if (gimple_call_internal_p (stmt))
4037 enum tree_code subcode = ERROR_MARK;
4038 tree result = NULL_TREE;
4039 bool cplx_result = false;
4040 tree overflow = NULL_TREE;
4041 switch (gimple_call_internal_fn (stmt))
4043 case IFN_BUILTIN_EXPECT:
4044 result = fold_builtin_expect (gimple_location (stmt),
4045 gimple_call_arg (stmt, 0),
4046 gimple_call_arg (stmt, 1),
4047 gimple_call_arg (stmt, 2));
4048 break;
4049 case IFN_UBSAN_OBJECT_SIZE:
4051 tree offset = gimple_call_arg (stmt, 1);
4052 tree objsize = gimple_call_arg (stmt, 2);
4053 if (integer_all_onesp (objsize)
4054 || (TREE_CODE (offset) == INTEGER_CST
4055 && TREE_CODE (objsize) == INTEGER_CST
4056 && tree_int_cst_le (offset, objsize)))
4058 replace_call_with_value (gsi, NULL_TREE);
4059 return true;
4062 break;
4063 case IFN_UBSAN_PTR:
4064 if (integer_zerop (gimple_call_arg (stmt, 1)))
4066 replace_call_with_value (gsi, NULL_TREE);
4067 return true;
4069 break;
4070 case IFN_UBSAN_BOUNDS:
4072 tree index = gimple_call_arg (stmt, 1);
4073 tree bound = gimple_call_arg (stmt, 2);
4074 if (TREE_CODE (index) == INTEGER_CST
4075 && TREE_CODE (bound) == INTEGER_CST)
4077 index = fold_convert (TREE_TYPE (bound), index);
4078 if (TREE_CODE (index) == INTEGER_CST
4079 && tree_int_cst_le (index, bound))
4081 replace_call_with_value (gsi, NULL_TREE);
4082 return true;
4086 break;
4087 case IFN_GOACC_DIM_SIZE:
4088 case IFN_GOACC_DIM_POS:
4089 result = fold_internal_goacc_dim (stmt);
4090 break;
4091 case IFN_UBSAN_CHECK_ADD:
4092 subcode = PLUS_EXPR;
4093 break;
4094 case IFN_UBSAN_CHECK_SUB:
4095 subcode = MINUS_EXPR;
4096 break;
4097 case IFN_UBSAN_CHECK_MUL:
4098 subcode = MULT_EXPR;
4099 break;
4100 case IFN_ADD_OVERFLOW:
4101 subcode = PLUS_EXPR;
4102 cplx_result = true;
4103 break;
4104 case IFN_SUB_OVERFLOW:
4105 subcode = MINUS_EXPR;
4106 cplx_result = true;
4107 break;
4108 case IFN_MUL_OVERFLOW:
4109 subcode = MULT_EXPR;
4110 cplx_result = true;
4111 break;
4112 default:
4113 break;
4115 if (subcode != ERROR_MARK)
4117 tree arg0 = gimple_call_arg (stmt, 0);
4118 tree arg1 = gimple_call_arg (stmt, 1);
4119 tree type = TREE_TYPE (arg0);
4120 if (cplx_result)
4122 tree lhs = gimple_call_lhs (stmt);
4123 if (lhs == NULL_TREE)
4124 type = NULL_TREE;
4125 else
4126 type = TREE_TYPE (TREE_TYPE (lhs));
4128 if (type == NULL_TREE)
4130 /* x = y + 0; x = y - 0; x = y * 0; */
4131 else if (integer_zerop (arg1))
4132 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4133 /* x = 0 + y; x = 0 * y; */
4134 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4135 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4136 /* x = y - y; */
4137 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4138 result = integer_zero_node;
4139 /* x = y * 1; x = 1 * y; */
4140 else if (subcode == MULT_EXPR && integer_onep (arg1))
4141 result = arg0;
4142 else if (subcode == MULT_EXPR && integer_onep (arg0))
4143 result = arg1;
4144 else if (TREE_CODE (arg0) == INTEGER_CST
4145 && TREE_CODE (arg1) == INTEGER_CST)
4147 if (cplx_result)
4148 result = int_const_binop (subcode, fold_convert (type, arg0),
4149 fold_convert (type, arg1));
4150 else
4151 result = int_const_binop (subcode, arg0, arg1);
4152 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4154 if (cplx_result)
4155 overflow = build_one_cst (type);
4156 else
4157 result = NULL_TREE;
4160 if (result)
4162 if (result == integer_zero_node)
4163 result = build_zero_cst (type);
4164 else if (cplx_result && TREE_TYPE (result) != type)
4166 if (TREE_CODE (result) == INTEGER_CST)
4168 if (arith_overflowed_p (PLUS_EXPR, type, result,
4169 integer_zero_node))
4170 overflow = build_one_cst (type);
4172 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4173 && TYPE_UNSIGNED (type))
4174 || (TYPE_PRECISION (type)
4175 < (TYPE_PRECISION (TREE_TYPE (result))
4176 + (TYPE_UNSIGNED (TREE_TYPE (result))
4177 && !TYPE_UNSIGNED (type)))))
4178 result = NULL_TREE;
4179 if (result)
4180 result = fold_convert (type, result);
4185 if (result)
4187 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4188 result = drop_tree_overflow (result);
4189 if (cplx_result)
4191 if (overflow == NULL_TREE)
4192 overflow = build_zero_cst (TREE_TYPE (result));
4193 tree ctype = build_complex_type (TREE_TYPE (result));
4194 if (TREE_CODE (result) == INTEGER_CST
4195 && TREE_CODE (overflow) == INTEGER_CST)
4196 result = build_complex (ctype, result, overflow);
4197 else
4198 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4199 ctype, result, overflow);
4201 if (!update_call_from_tree (gsi, result))
4202 gimplify_and_update_call_from_tree (gsi, result);
4203 changed = true;
4207 return changed;
4211 /* Return true whether NAME has a use on STMT. */
4213 static bool
4214 has_use_on_stmt (tree name, gimple *stmt)
4216 imm_use_iterator iter;
4217 use_operand_p use_p;
4218 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4219 if (USE_STMT (use_p) == stmt)
4220 return true;
4221 return false;
4224 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4225 gimple_simplify.
4227 Replaces *GSI with the simplification result in RCODE and OPS
4228 and the associated statements in *SEQ. Does the replacement
4229 according to INPLACE and returns true if the operation succeeded. */
4231 static bool
4232 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4233 code_helper rcode, tree *ops,
4234 gimple_seq *seq, bool inplace)
4236 gimple *stmt = gsi_stmt (*gsi);
4238 /* Play safe and do not allow abnormals to be mentioned in
4239 newly created statements. See also maybe_push_res_to_seq.
4240 As an exception allow such uses if there was a use of the
4241 same SSA name on the old stmt. */
4242 if ((TREE_CODE (ops[0]) == SSA_NAME
4243 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
4244 && !has_use_on_stmt (ops[0], stmt))
4245 || (ops[1]
4246 && TREE_CODE (ops[1]) == SSA_NAME
4247 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
4248 && !has_use_on_stmt (ops[1], stmt))
4249 || (ops[2]
4250 && TREE_CODE (ops[2]) == SSA_NAME
4251 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
4252 && !has_use_on_stmt (ops[2], stmt))
4253 || (COMPARISON_CLASS_P (ops[0])
4254 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
4255 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
4256 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
4257 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
4258 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
4259 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
4260 return false;
4262 /* Don't insert new statements when INPLACE is true, even if we could
4263 reuse STMT for the final statement. */
4264 if (inplace && !gimple_seq_empty_p (*seq))
4265 return false;
4267 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4269 gcc_assert (rcode.is_tree_code ());
4270 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
4271 /* GIMPLE_CONDs condition may not throw. */
4272 && (!flag_exceptions
4273 || !cfun->can_throw_non_call_exceptions
4274 || !operation_could_trap_p (rcode,
4275 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4276 false, NULL_TREE)))
4277 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
4278 else if (rcode == SSA_NAME)
4279 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4280 build_zero_cst (TREE_TYPE (ops[0])));
4281 else if (rcode == INTEGER_CST)
4283 if (integer_zerop (ops[0]))
4284 gimple_cond_make_false (cond_stmt);
4285 else
4286 gimple_cond_make_true (cond_stmt);
4288 else if (!inplace)
4290 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
4291 ops, seq);
4292 if (!res)
4293 return false;
4294 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4295 build_zero_cst (TREE_TYPE (res)));
4297 else
4298 return false;
4299 if (dump_file && (dump_flags & TDF_DETAILS))
4301 fprintf (dump_file, "gimple_simplified to ");
4302 if (!gimple_seq_empty_p (*seq))
4303 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4304 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4305 0, TDF_SLIM);
4307 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4308 return true;
4310 else if (is_gimple_assign (stmt)
4311 && rcode.is_tree_code ())
4313 if (!inplace
4314 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
4316 maybe_build_generic_op (rcode,
4317 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
4318 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
4319 if (dump_file && (dump_flags & TDF_DETAILS))
4321 fprintf (dump_file, "gimple_simplified to ");
4322 if (!gimple_seq_empty_p (*seq))
4323 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4324 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4325 0, TDF_SLIM);
4327 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4328 return true;
4331 else if (rcode.is_fn_code ()
4332 && gimple_call_combined_fn (stmt) == rcode)
4334 unsigned i;
4335 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4337 gcc_assert (ops[i] != NULL_TREE);
4338 gimple_call_set_arg (stmt, i, ops[i]);
4340 if (i < 3)
4341 gcc_assert (ops[i] == NULL_TREE);
4342 if (dump_file && (dump_flags & TDF_DETAILS))
4344 fprintf (dump_file, "gimple_simplified to ");
4345 if (!gimple_seq_empty_p (*seq))
4346 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4347 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4349 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4350 return true;
4352 else if (!inplace)
4354 if (gimple_has_lhs (stmt))
4356 tree lhs = gimple_get_lhs (stmt);
4357 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
4358 ops, seq, lhs))
4359 return false;
4360 if (dump_file && (dump_flags & TDF_DETAILS))
4362 fprintf (dump_file, "gimple_simplified to ");
4363 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4365 gsi_replace_with_seq_vops (gsi, *seq);
4366 return true;
4368 else
4369 gcc_unreachable ();
4372 return false;
4375 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4377 static bool
4378 maybe_canonicalize_mem_ref_addr (tree *t)
4380 bool res = false;
4382 if (TREE_CODE (*t) == ADDR_EXPR)
4383 t = &TREE_OPERAND (*t, 0);
4385 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4386 generic vector extension. The actual vector referenced is
4387 view-converted to an array type for this purpose. If the index
4388 is constant the canonical representation in the middle-end is a
4389 BIT_FIELD_REF so re-write the former to the latter here. */
4390 if (TREE_CODE (*t) == ARRAY_REF
4391 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4392 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4393 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4395 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4396 if (VECTOR_TYPE_P (vtype))
4398 tree low = array_ref_low_bound (*t);
4399 if (TREE_CODE (low) == INTEGER_CST)
4401 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4403 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4404 wi::to_widest (low));
4405 idx = wi::mul (idx, wi::to_widest
4406 (TYPE_SIZE (TREE_TYPE (*t))));
4407 widest_int ext
4408 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4409 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4411 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4412 TREE_TYPE (*t),
4413 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4414 TYPE_SIZE (TREE_TYPE (*t)),
4415 wide_int_to_tree (bitsizetype, idx));
4416 res = true;
4423 while (handled_component_p (*t))
4424 t = &TREE_OPERAND (*t, 0);
4426 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4427 of invariant addresses into a SSA name MEM_REF address. */
4428 if (TREE_CODE (*t) == MEM_REF
4429 || TREE_CODE (*t) == TARGET_MEM_REF)
4431 tree addr = TREE_OPERAND (*t, 0);
4432 if (TREE_CODE (addr) == ADDR_EXPR
4433 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4434 || handled_component_p (TREE_OPERAND (addr, 0))))
4436 tree base;
4437 HOST_WIDE_INT coffset;
4438 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4439 &coffset);
4440 if (!base)
4441 gcc_unreachable ();
4443 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4444 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4445 TREE_OPERAND (*t, 1),
4446 size_int (coffset));
4447 res = true;
4449 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4450 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4453 /* Canonicalize back MEM_REFs to plain reference trees if the object
4454 accessed is a decl that has the same access semantics as the MEM_REF. */
4455 if (TREE_CODE (*t) == MEM_REF
4456 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4457 && integer_zerop (TREE_OPERAND (*t, 1))
4458 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4460 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4461 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4462 if (/* Same volatile qualification. */
4463 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4464 /* Same TBAA behavior with -fstrict-aliasing. */
4465 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4466 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4467 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4468 /* Same alignment. */
4469 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4470 /* We have to look out here to not drop a required conversion
4471 from the rhs to the lhs if *t appears on the lhs or vice-versa
4472 if it appears on the rhs. Thus require strict type
4473 compatibility. */
4474 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4476 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4477 res = true;
4481 /* Canonicalize TARGET_MEM_REF in particular with respect to
4482 the indexes becoming constant. */
4483 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4485 tree tem = maybe_fold_tmr (*t);
4486 if (tem)
4488 *t = tem;
4489 res = true;
4493 return res;
4496 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4497 distinguishes both cases. */
4499 static bool
4500 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4502 bool changed = false;
4503 gimple *stmt = gsi_stmt (*gsi);
4504 bool nowarning = gimple_no_warning_p (stmt);
4505 unsigned i;
4506 fold_defer_overflow_warnings ();
4508 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4509 after propagation.
4510 ??? This shouldn't be done in generic folding but in the
4511 propagation helpers which also know whether an address was
4512 propagated.
4513 Also canonicalize operand order. */
4514 switch (gimple_code (stmt))
4516 case GIMPLE_ASSIGN:
4517 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4519 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4520 if ((REFERENCE_CLASS_P (*rhs)
4521 || TREE_CODE (*rhs) == ADDR_EXPR)
4522 && maybe_canonicalize_mem_ref_addr (rhs))
4523 changed = true;
4524 tree *lhs = gimple_assign_lhs_ptr (stmt);
4525 if (REFERENCE_CLASS_P (*lhs)
4526 && maybe_canonicalize_mem_ref_addr (lhs))
4527 changed = true;
4529 else
4531 /* Canonicalize operand order. */
4532 enum tree_code code = gimple_assign_rhs_code (stmt);
4533 if (TREE_CODE_CLASS (code) == tcc_comparison
4534 || commutative_tree_code (code)
4535 || commutative_ternary_tree_code (code))
4537 tree rhs1 = gimple_assign_rhs1 (stmt);
4538 tree rhs2 = gimple_assign_rhs2 (stmt);
4539 if (tree_swap_operands_p (rhs1, rhs2))
4541 gimple_assign_set_rhs1 (stmt, rhs2);
4542 gimple_assign_set_rhs2 (stmt, rhs1);
4543 if (TREE_CODE_CLASS (code) == tcc_comparison)
4544 gimple_assign_set_rhs_code (stmt,
4545 swap_tree_comparison (code));
4546 changed = true;
4550 break;
4551 case GIMPLE_CALL:
4553 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4555 tree *arg = gimple_call_arg_ptr (stmt, i);
4556 if (REFERENCE_CLASS_P (*arg)
4557 && maybe_canonicalize_mem_ref_addr (arg))
4558 changed = true;
4560 tree *lhs = gimple_call_lhs_ptr (stmt);
4561 if (*lhs
4562 && REFERENCE_CLASS_P (*lhs)
4563 && maybe_canonicalize_mem_ref_addr (lhs))
4564 changed = true;
4565 break;
4567 case GIMPLE_ASM:
4569 gasm *asm_stmt = as_a <gasm *> (stmt);
4570 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4572 tree link = gimple_asm_output_op (asm_stmt, i);
4573 tree op = TREE_VALUE (link);
4574 if (REFERENCE_CLASS_P (op)
4575 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4576 changed = true;
4578 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4580 tree link = gimple_asm_input_op (asm_stmt, i);
4581 tree op = TREE_VALUE (link);
4582 if ((REFERENCE_CLASS_P (op)
4583 || TREE_CODE (op) == ADDR_EXPR)
4584 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4585 changed = true;
4588 break;
4589 case GIMPLE_DEBUG:
4590 if (gimple_debug_bind_p (stmt))
4592 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4593 if (*val
4594 && (REFERENCE_CLASS_P (*val)
4595 || TREE_CODE (*val) == ADDR_EXPR)
4596 && maybe_canonicalize_mem_ref_addr (val))
4597 changed = true;
4599 break;
4600 case GIMPLE_COND:
4602 /* Canonicalize operand order. */
4603 tree lhs = gimple_cond_lhs (stmt);
4604 tree rhs = gimple_cond_rhs (stmt);
4605 if (tree_swap_operands_p (lhs, rhs))
4607 gcond *gc = as_a <gcond *> (stmt);
4608 gimple_cond_set_lhs (gc, rhs);
4609 gimple_cond_set_rhs (gc, lhs);
4610 gimple_cond_set_code (gc,
4611 swap_tree_comparison (gimple_cond_code (gc)));
4612 changed = true;
4615 default:;
4618 /* Dispatch to pattern-based folding. */
4619 if (!inplace
4620 || is_gimple_assign (stmt)
4621 || gimple_code (stmt) == GIMPLE_COND)
4623 gimple_seq seq = NULL;
4624 code_helper rcode;
4625 tree ops[3] = {};
4626 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4627 valueize, valueize))
4629 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4630 changed = true;
4631 else
4632 gimple_seq_discard (seq);
4636 stmt = gsi_stmt (*gsi);
4638 /* Fold the main computation performed by the statement. */
4639 switch (gimple_code (stmt))
4641 case GIMPLE_ASSIGN:
4643 /* Try to canonicalize for boolean-typed X the comparisons
4644 X == 0, X == 1, X != 0, and X != 1. */
4645 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4646 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4648 tree lhs = gimple_assign_lhs (stmt);
4649 tree op1 = gimple_assign_rhs1 (stmt);
4650 tree op2 = gimple_assign_rhs2 (stmt);
4651 tree type = TREE_TYPE (op1);
4653 /* Check whether the comparison operands are of the same boolean
4654 type as the result type is.
4655 Check that second operand is an integer-constant with value
4656 one or zero. */
4657 if (TREE_CODE (op2) == INTEGER_CST
4658 && (integer_zerop (op2) || integer_onep (op2))
4659 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4661 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4662 bool is_logical_not = false;
4664 /* X == 0 and X != 1 is a logical-not.of X
4665 X == 1 and X != 0 is X */
4666 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4667 || (cmp_code == NE_EXPR && integer_onep (op2)))
4668 is_logical_not = true;
4670 if (is_logical_not == false)
4671 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4672 /* Only for one-bit precision typed X the transformation
4673 !X -> ~X is valied. */
4674 else if (TYPE_PRECISION (type) == 1)
4675 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4676 /* Otherwise we use !X -> X ^ 1. */
4677 else
4678 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4679 build_int_cst (type, 1));
4680 changed = true;
4681 break;
4685 unsigned old_num_ops = gimple_num_ops (stmt);
4686 tree lhs = gimple_assign_lhs (stmt);
4687 tree new_rhs = fold_gimple_assign (gsi);
4688 if (new_rhs
4689 && !useless_type_conversion_p (TREE_TYPE (lhs),
4690 TREE_TYPE (new_rhs)))
4691 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4692 if (new_rhs
4693 && (!inplace
4694 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4696 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4697 changed = true;
4699 break;
4702 case GIMPLE_CALL:
4703 changed |= gimple_fold_call (gsi, inplace);
4704 break;
4706 case GIMPLE_ASM:
4707 /* Fold *& in asm operands. */
4709 gasm *asm_stmt = as_a <gasm *> (stmt);
4710 size_t noutputs;
4711 const char **oconstraints;
4712 const char *constraint;
4713 bool allows_mem, allows_reg;
4715 noutputs = gimple_asm_noutputs (asm_stmt);
4716 oconstraints = XALLOCAVEC (const char *, noutputs);
4718 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4720 tree link = gimple_asm_output_op (asm_stmt, i);
4721 tree op = TREE_VALUE (link);
4722 oconstraints[i]
4723 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4724 if (REFERENCE_CLASS_P (op)
4725 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4727 TREE_VALUE (link) = op;
4728 changed = true;
4731 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4733 tree link = gimple_asm_input_op (asm_stmt, i);
4734 tree op = TREE_VALUE (link);
4735 constraint
4736 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4737 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4738 oconstraints, &allows_mem, &allows_reg);
4739 if (REFERENCE_CLASS_P (op)
4740 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4741 != NULL_TREE)
4743 TREE_VALUE (link) = op;
4744 changed = true;
4748 break;
4750 case GIMPLE_DEBUG:
4751 if (gimple_debug_bind_p (stmt))
4753 tree val = gimple_debug_bind_get_value (stmt);
4754 if (val
4755 && REFERENCE_CLASS_P (val))
4757 tree tem = maybe_fold_reference (val, false);
4758 if (tem)
4760 gimple_debug_bind_set_value (stmt, tem);
4761 changed = true;
4764 else if (val
4765 && TREE_CODE (val) == ADDR_EXPR)
4767 tree ref = TREE_OPERAND (val, 0);
4768 tree tem = maybe_fold_reference (ref, false);
4769 if (tem)
4771 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4772 gimple_debug_bind_set_value (stmt, tem);
4773 changed = true;
4777 break;
4779 case GIMPLE_RETURN:
4781 greturn *ret_stmt = as_a<greturn *> (stmt);
4782 tree ret = gimple_return_retval(ret_stmt);
4784 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4786 tree val = valueize (ret);
4787 if (val && val != ret
4788 && may_propagate_copy (ret, val))
4790 gimple_return_set_retval (ret_stmt, val);
4791 changed = true;
4795 break;
4797 default:;
4800 stmt = gsi_stmt (*gsi);
4802 /* Fold *& on the lhs. */
4803 if (gimple_has_lhs (stmt))
4805 tree lhs = gimple_get_lhs (stmt);
4806 if (lhs && REFERENCE_CLASS_P (lhs))
4808 tree new_lhs = maybe_fold_reference (lhs, true);
4809 if (new_lhs)
4811 gimple_set_lhs (stmt, new_lhs);
4812 changed = true;
4817 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4818 return changed;
4821 /* Valueziation callback that ends up not following SSA edges. */
4823 tree
4824 no_follow_ssa_edges (tree)
4826 return NULL_TREE;
4829 /* Valueization callback that ends up following single-use SSA edges only. */
4831 tree
4832 follow_single_use_edges (tree val)
4834 if (TREE_CODE (val) == SSA_NAME
4835 && !has_single_use (val))
4836 return NULL_TREE;
4837 return val;
4840 /* Fold the statement pointed to by GSI. In some cases, this function may
4841 replace the whole statement with a new one. Returns true iff folding
4842 makes any changes.
4843 The statement pointed to by GSI should be in valid gimple form but may
4844 be in unfolded state as resulting from for example constant propagation
4845 which can produce *&x = 0. */
4847 bool
4848 fold_stmt (gimple_stmt_iterator *gsi)
4850 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4853 bool
4854 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4856 return fold_stmt_1 (gsi, false, valueize);
4859 /* Perform the minimal folding on statement *GSI. Only operations like
4860 *&x created by constant propagation are handled. The statement cannot
4861 be replaced with a new one. Return true if the statement was
4862 changed, false otherwise.
4863 The statement *GSI should be in valid gimple form but may
4864 be in unfolded state as resulting from for example constant propagation
4865 which can produce *&x = 0. */
4867 bool
4868 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4870 gimple *stmt = gsi_stmt (*gsi);
4871 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4872 gcc_assert (gsi_stmt (*gsi) == stmt);
4873 return changed;
4876 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4877 if EXPR is null or we don't know how.
4878 If non-null, the result always has boolean type. */
4880 static tree
4881 canonicalize_bool (tree expr, bool invert)
4883 if (!expr)
4884 return NULL_TREE;
4885 else if (invert)
4887 if (integer_nonzerop (expr))
4888 return boolean_false_node;
4889 else if (integer_zerop (expr))
4890 return boolean_true_node;
4891 else if (TREE_CODE (expr) == SSA_NAME)
4892 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4893 build_int_cst (TREE_TYPE (expr), 0));
4894 else if (COMPARISON_CLASS_P (expr))
4895 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4896 boolean_type_node,
4897 TREE_OPERAND (expr, 0),
4898 TREE_OPERAND (expr, 1));
4899 else
4900 return NULL_TREE;
4902 else
4904 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4905 return expr;
4906 if (integer_nonzerop (expr))
4907 return boolean_true_node;
4908 else if (integer_zerop (expr))
4909 return boolean_false_node;
4910 else if (TREE_CODE (expr) == SSA_NAME)
4911 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4912 build_int_cst (TREE_TYPE (expr), 0));
4913 else if (COMPARISON_CLASS_P (expr))
4914 return fold_build2 (TREE_CODE (expr),
4915 boolean_type_node,
4916 TREE_OPERAND (expr, 0),
4917 TREE_OPERAND (expr, 1));
4918 else
4919 return NULL_TREE;
4923 /* Check to see if a boolean expression EXPR is logically equivalent to the
4924 comparison (OP1 CODE OP2). Check for various identities involving
4925 SSA_NAMEs. */
4927 static bool
4928 same_bool_comparison_p (const_tree expr, enum tree_code code,
4929 const_tree op1, const_tree op2)
4931 gimple *s;
4933 /* The obvious case. */
4934 if (TREE_CODE (expr) == code
4935 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4936 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4937 return true;
4939 /* Check for comparing (name, name != 0) and the case where expr
4940 is an SSA_NAME with a definition matching the comparison. */
4941 if (TREE_CODE (expr) == SSA_NAME
4942 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4944 if (operand_equal_p (expr, op1, 0))
4945 return ((code == NE_EXPR && integer_zerop (op2))
4946 || (code == EQ_EXPR && integer_nonzerop (op2)));
4947 s = SSA_NAME_DEF_STMT (expr);
4948 if (is_gimple_assign (s)
4949 && gimple_assign_rhs_code (s) == code
4950 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4951 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4952 return true;
4955 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4956 of name is a comparison, recurse. */
4957 if (TREE_CODE (op1) == SSA_NAME
4958 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4960 s = SSA_NAME_DEF_STMT (op1);
4961 if (is_gimple_assign (s)
4962 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
4964 enum tree_code c = gimple_assign_rhs_code (s);
4965 if ((c == NE_EXPR && integer_zerop (op2))
4966 || (c == EQ_EXPR && integer_nonzerop (op2)))
4967 return same_bool_comparison_p (expr, c,
4968 gimple_assign_rhs1 (s),
4969 gimple_assign_rhs2 (s));
4970 if ((c == EQ_EXPR && integer_zerop (op2))
4971 || (c == NE_EXPR && integer_nonzerop (op2)))
4972 return same_bool_comparison_p (expr,
4973 invert_tree_comparison (c, false),
4974 gimple_assign_rhs1 (s),
4975 gimple_assign_rhs2 (s));
4978 return false;
4981 /* Check to see if two boolean expressions OP1 and OP2 are logically
4982 equivalent. */
4984 static bool
4985 same_bool_result_p (const_tree op1, const_tree op2)
4987 /* Simple cases first. */
4988 if (operand_equal_p (op1, op2, 0))
4989 return true;
4991 /* Check the cases where at least one of the operands is a comparison.
4992 These are a bit smarter than operand_equal_p in that they apply some
4993 identifies on SSA_NAMEs. */
4994 if (COMPARISON_CLASS_P (op2)
4995 && same_bool_comparison_p (op1, TREE_CODE (op2),
4996 TREE_OPERAND (op2, 0),
4997 TREE_OPERAND (op2, 1)))
4998 return true;
4999 if (COMPARISON_CLASS_P (op1)
5000 && same_bool_comparison_p (op2, TREE_CODE (op1),
5001 TREE_OPERAND (op1, 0),
5002 TREE_OPERAND (op1, 1)))
5003 return true;
5005 /* Default case. */
5006 return false;
5009 /* Forward declarations for some mutually recursive functions. */
5011 static tree
5012 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5013 enum tree_code code2, tree op2a, tree op2b);
5014 static tree
5015 and_var_with_comparison (tree var, bool invert,
5016 enum tree_code code2, tree op2a, tree op2b);
5017 static tree
5018 and_var_with_comparison_1 (gimple *stmt,
5019 enum tree_code code2, tree op2a, tree op2b);
5020 static tree
5021 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5022 enum tree_code code2, tree op2a, tree op2b);
5023 static tree
5024 or_var_with_comparison (tree var, bool invert,
5025 enum tree_code code2, tree op2a, tree op2b);
5026 static tree
5027 or_var_with_comparison_1 (gimple *stmt,
5028 enum tree_code code2, tree op2a, tree op2b);
5030 /* Helper function for and_comparisons_1: try to simplify the AND of the
5031 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5032 If INVERT is true, invert the value of the VAR before doing the AND.
5033 Return NULL_EXPR if we can't simplify this to a single expression. */
5035 static tree
5036 and_var_with_comparison (tree var, bool invert,
5037 enum tree_code code2, tree op2a, tree op2b)
5039 tree t;
5040 gimple *stmt = SSA_NAME_DEF_STMT (var);
5042 /* We can only deal with variables whose definitions are assignments. */
5043 if (!is_gimple_assign (stmt))
5044 return NULL_TREE;
5046 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5047 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5048 Then we only have to consider the simpler non-inverted cases. */
5049 if (invert)
5050 t = or_var_with_comparison_1 (stmt,
5051 invert_tree_comparison (code2, false),
5052 op2a, op2b);
5053 else
5054 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5055 return canonicalize_bool (t, invert);
5058 /* Try to simplify the AND of the ssa variable defined by the assignment
5059 STMT with the comparison specified by (OP2A CODE2 OP2B).
5060 Return NULL_EXPR if we can't simplify this to a single expression. */
5062 static tree
5063 and_var_with_comparison_1 (gimple *stmt,
5064 enum tree_code code2, tree op2a, tree op2b)
5066 tree var = gimple_assign_lhs (stmt);
5067 tree true_test_var = NULL_TREE;
5068 tree false_test_var = NULL_TREE;
5069 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5071 /* Check for identities like (var AND (var == 0)) => false. */
5072 if (TREE_CODE (op2a) == SSA_NAME
5073 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5075 if ((code2 == NE_EXPR && integer_zerop (op2b))
5076 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5078 true_test_var = op2a;
5079 if (var == true_test_var)
5080 return var;
5082 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5083 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5085 false_test_var = op2a;
5086 if (var == false_test_var)
5087 return boolean_false_node;
5091 /* If the definition is a comparison, recurse on it. */
5092 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5094 tree t = and_comparisons_1 (innercode,
5095 gimple_assign_rhs1 (stmt),
5096 gimple_assign_rhs2 (stmt),
5097 code2,
5098 op2a,
5099 op2b);
5100 if (t)
5101 return t;
5104 /* If the definition is an AND or OR expression, we may be able to
5105 simplify by reassociating. */
5106 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5107 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5109 tree inner1 = gimple_assign_rhs1 (stmt);
5110 tree inner2 = gimple_assign_rhs2 (stmt);
5111 gimple *s;
5112 tree t;
5113 tree partial = NULL_TREE;
5114 bool is_and = (innercode == BIT_AND_EXPR);
5116 /* Check for boolean identities that don't require recursive examination
5117 of inner1/inner2:
5118 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5119 inner1 AND (inner1 OR inner2) => inner1
5120 !inner1 AND (inner1 AND inner2) => false
5121 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5122 Likewise for similar cases involving inner2. */
5123 if (inner1 == true_test_var)
5124 return (is_and ? var : inner1);
5125 else if (inner2 == true_test_var)
5126 return (is_and ? var : inner2);
5127 else if (inner1 == false_test_var)
5128 return (is_and
5129 ? boolean_false_node
5130 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5131 else if (inner2 == false_test_var)
5132 return (is_and
5133 ? boolean_false_node
5134 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5136 /* Next, redistribute/reassociate the AND across the inner tests.
5137 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5138 if (TREE_CODE (inner1) == SSA_NAME
5139 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5140 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5141 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5142 gimple_assign_rhs1 (s),
5143 gimple_assign_rhs2 (s),
5144 code2, op2a, op2b)))
5146 /* Handle the AND case, where we are reassociating:
5147 (inner1 AND inner2) AND (op2a code2 op2b)
5148 => (t AND inner2)
5149 If the partial result t is a constant, we win. Otherwise
5150 continue on to try reassociating with the other inner test. */
5151 if (is_and)
5153 if (integer_onep (t))
5154 return inner2;
5155 else if (integer_zerop (t))
5156 return boolean_false_node;
5159 /* Handle the OR case, where we are redistributing:
5160 (inner1 OR inner2) AND (op2a code2 op2b)
5161 => (t OR (inner2 AND (op2a code2 op2b))) */
5162 else if (integer_onep (t))
5163 return boolean_true_node;
5165 /* Save partial result for later. */
5166 partial = t;
5169 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5170 if (TREE_CODE (inner2) == SSA_NAME
5171 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5172 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5173 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5174 gimple_assign_rhs1 (s),
5175 gimple_assign_rhs2 (s),
5176 code2, op2a, op2b)))
5178 /* Handle the AND case, where we are reassociating:
5179 (inner1 AND inner2) AND (op2a code2 op2b)
5180 => (inner1 AND t) */
5181 if (is_and)
5183 if (integer_onep (t))
5184 return inner1;
5185 else if (integer_zerop (t))
5186 return boolean_false_node;
5187 /* If both are the same, we can apply the identity
5188 (x AND x) == x. */
5189 else if (partial && same_bool_result_p (t, partial))
5190 return t;
5193 /* Handle the OR case. where we are redistributing:
5194 (inner1 OR inner2) AND (op2a code2 op2b)
5195 => (t OR (inner1 AND (op2a code2 op2b)))
5196 => (t OR partial) */
5197 else
5199 if (integer_onep (t))
5200 return boolean_true_node;
5201 else if (partial)
5203 /* We already got a simplification for the other
5204 operand to the redistributed OR expression. The
5205 interesting case is when at least one is false.
5206 Or, if both are the same, we can apply the identity
5207 (x OR x) == x. */
5208 if (integer_zerop (partial))
5209 return t;
5210 else if (integer_zerop (t))
5211 return partial;
5212 else if (same_bool_result_p (t, partial))
5213 return t;
5218 return NULL_TREE;
5221 /* Try to simplify the AND of two comparisons defined by
5222 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5223 If this can be done without constructing an intermediate value,
5224 return the resulting tree; otherwise NULL_TREE is returned.
5225 This function is deliberately asymmetric as it recurses on SSA_DEFs
5226 in the first comparison but not the second. */
5228 static tree
5229 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5230 enum tree_code code2, tree op2a, tree op2b)
5232 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5234 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5235 if (operand_equal_p (op1a, op2a, 0)
5236 && operand_equal_p (op1b, op2b, 0))
5238 /* Result will be either NULL_TREE, or a combined comparison. */
5239 tree t = combine_comparisons (UNKNOWN_LOCATION,
5240 TRUTH_ANDIF_EXPR, code1, code2,
5241 truth_type, op1a, op1b);
5242 if (t)
5243 return t;
5246 /* Likewise the swapped case of the above. */
5247 if (operand_equal_p (op1a, op2b, 0)
5248 && operand_equal_p (op1b, op2a, 0))
5250 /* Result will be either NULL_TREE, or a combined comparison. */
5251 tree t = combine_comparisons (UNKNOWN_LOCATION,
5252 TRUTH_ANDIF_EXPR, code1,
5253 swap_tree_comparison (code2),
5254 truth_type, op1a, op1b);
5255 if (t)
5256 return t;
5259 /* If both comparisons are of the same value against constants, we might
5260 be able to merge them. */
5261 if (operand_equal_p (op1a, op2a, 0)
5262 && TREE_CODE (op1b) == INTEGER_CST
5263 && TREE_CODE (op2b) == INTEGER_CST)
5265 int cmp = tree_int_cst_compare (op1b, op2b);
5267 /* If we have (op1a == op1b), we should either be able to
5268 return that or FALSE, depending on whether the constant op1b
5269 also satisfies the other comparison against op2b. */
5270 if (code1 == EQ_EXPR)
5272 bool done = true;
5273 bool val;
5274 switch (code2)
5276 case EQ_EXPR: val = (cmp == 0); break;
5277 case NE_EXPR: val = (cmp != 0); break;
5278 case LT_EXPR: val = (cmp < 0); break;
5279 case GT_EXPR: val = (cmp > 0); break;
5280 case LE_EXPR: val = (cmp <= 0); break;
5281 case GE_EXPR: val = (cmp >= 0); break;
5282 default: done = false;
5284 if (done)
5286 if (val)
5287 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5288 else
5289 return boolean_false_node;
5292 /* Likewise if the second comparison is an == comparison. */
5293 else if (code2 == EQ_EXPR)
5295 bool done = true;
5296 bool val;
5297 switch (code1)
5299 case EQ_EXPR: val = (cmp == 0); break;
5300 case NE_EXPR: val = (cmp != 0); break;
5301 case LT_EXPR: val = (cmp > 0); break;
5302 case GT_EXPR: val = (cmp < 0); break;
5303 case LE_EXPR: val = (cmp >= 0); break;
5304 case GE_EXPR: val = (cmp <= 0); break;
5305 default: done = false;
5307 if (done)
5309 if (val)
5310 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5311 else
5312 return boolean_false_node;
5316 /* Same business with inequality tests. */
5317 else if (code1 == NE_EXPR)
5319 bool val;
5320 switch (code2)
5322 case EQ_EXPR: val = (cmp != 0); break;
5323 case NE_EXPR: val = (cmp == 0); break;
5324 case LT_EXPR: val = (cmp >= 0); break;
5325 case GT_EXPR: val = (cmp <= 0); break;
5326 case LE_EXPR: val = (cmp > 0); break;
5327 case GE_EXPR: val = (cmp < 0); break;
5328 default:
5329 val = false;
5331 if (val)
5332 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5334 else if (code2 == NE_EXPR)
5336 bool val;
5337 switch (code1)
5339 case EQ_EXPR: val = (cmp == 0); break;
5340 case NE_EXPR: val = (cmp != 0); break;
5341 case LT_EXPR: val = (cmp <= 0); break;
5342 case GT_EXPR: val = (cmp >= 0); break;
5343 case LE_EXPR: val = (cmp < 0); break;
5344 case GE_EXPR: val = (cmp > 0); break;
5345 default:
5346 val = false;
5348 if (val)
5349 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5352 /* Chose the more restrictive of two < or <= comparisons. */
5353 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5354 && (code2 == LT_EXPR || code2 == LE_EXPR))
5356 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5357 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5358 else
5359 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5362 /* Likewise chose the more restrictive of two > or >= comparisons. */
5363 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5364 && (code2 == GT_EXPR || code2 == GE_EXPR))
5366 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5367 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5368 else
5369 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5372 /* Check for singleton ranges. */
5373 else if (cmp == 0
5374 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5375 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5376 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5378 /* Check for disjoint ranges. */
5379 else if (cmp <= 0
5380 && (code1 == LT_EXPR || code1 == LE_EXPR)
5381 && (code2 == GT_EXPR || code2 == GE_EXPR))
5382 return boolean_false_node;
5383 else if (cmp >= 0
5384 && (code1 == GT_EXPR || code1 == GE_EXPR)
5385 && (code2 == LT_EXPR || code2 == LE_EXPR))
5386 return boolean_false_node;
5389 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5390 NAME's definition is a truth value. See if there are any simplifications
5391 that can be done against the NAME's definition. */
5392 if (TREE_CODE (op1a) == SSA_NAME
5393 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5394 && (integer_zerop (op1b) || integer_onep (op1b)))
5396 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5397 || (code1 == NE_EXPR && integer_onep (op1b)));
5398 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5399 switch (gimple_code (stmt))
5401 case GIMPLE_ASSIGN:
5402 /* Try to simplify by copy-propagating the definition. */
5403 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5405 case GIMPLE_PHI:
5406 /* If every argument to the PHI produces the same result when
5407 ANDed with the second comparison, we win.
5408 Do not do this unless the type is bool since we need a bool
5409 result here anyway. */
5410 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5412 tree result = NULL_TREE;
5413 unsigned i;
5414 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5416 tree arg = gimple_phi_arg_def (stmt, i);
5418 /* If this PHI has itself as an argument, ignore it.
5419 If all the other args produce the same result,
5420 we're still OK. */
5421 if (arg == gimple_phi_result (stmt))
5422 continue;
5423 else if (TREE_CODE (arg) == INTEGER_CST)
5425 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5427 if (!result)
5428 result = boolean_false_node;
5429 else if (!integer_zerop (result))
5430 return NULL_TREE;
5432 else if (!result)
5433 result = fold_build2 (code2, boolean_type_node,
5434 op2a, op2b);
5435 else if (!same_bool_comparison_p (result,
5436 code2, op2a, op2b))
5437 return NULL_TREE;
5439 else if (TREE_CODE (arg) == SSA_NAME
5440 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5442 tree temp;
5443 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5444 /* In simple cases we can look through PHI nodes,
5445 but we have to be careful with loops.
5446 See PR49073. */
5447 if (! dom_info_available_p (CDI_DOMINATORS)
5448 || gimple_bb (def_stmt) == gimple_bb (stmt)
5449 || dominated_by_p (CDI_DOMINATORS,
5450 gimple_bb (def_stmt),
5451 gimple_bb (stmt)))
5452 return NULL_TREE;
5453 temp = and_var_with_comparison (arg, invert, code2,
5454 op2a, op2b);
5455 if (!temp)
5456 return NULL_TREE;
5457 else if (!result)
5458 result = temp;
5459 else if (!same_bool_result_p (result, temp))
5460 return NULL_TREE;
5462 else
5463 return NULL_TREE;
5465 return result;
5468 default:
5469 break;
5472 return NULL_TREE;
5475 /* Try to simplify the AND of two comparisons, specified by
5476 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5477 If this can be simplified to a single expression (without requiring
5478 introducing more SSA variables to hold intermediate values),
5479 return the resulting tree. Otherwise return NULL_TREE.
5480 If the result expression is non-null, it has boolean type. */
5482 tree
5483 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5484 enum tree_code code2, tree op2a, tree op2b)
5486 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5487 if (t)
5488 return t;
5489 else
5490 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5493 /* Helper function for or_comparisons_1: try to simplify the OR of the
5494 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5495 If INVERT is true, invert the value of VAR before doing the OR.
5496 Return NULL_EXPR if we can't simplify this to a single expression. */
5498 static tree
5499 or_var_with_comparison (tree var, bool invert,
5500 enum tree_code code2, tree op2a, tree op2b)
5502 tree t;
5503 gimple *stmt = SSA_NAME_DEF_STMT (var);
5505 /* We can only deal with variables whose definitions are assignments. */
5506 if (!is_gimple_assign (stmt))
5507 return NULL_TREE;
5509 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5510 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5511 Then we only have to consider the simpler non-inverted cases. */
5512 if (invert)
5513 t = and_var_with_comparison_1 (stmt,
5514 invert_tree_comparison (code2, false),
5515 op2a, op2b);
5516 else
5517 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5518 return canonicalize_bool (t, invert);
5521 /* Try to simplify the OR of the ssa variable defined by the assignment
5522 STMT with the comparison specified by (OP2A CODE2 OP2B).
5523 Return NULL_EXPR if we can't simplify this to a single expression. */
5525 static tree
5526 or_var_with_comparison_1 (gimple *stmt,
5527 enum tree_code code2, tree op2a, tree op2b)
5529 tree var = gimple_assign_lhs (stmt);
5530 tree true_test_var = NULL_TREE;
5531 tree false_test_var = NULL_TREE;
5532 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5534 /* Check for identities like (var OR (var != 0)) => true . */
5535 if (TREE_CODE (op2a) == SSA_NAME
5536 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5538 if ((code2 == NE_EXPR && integer_zerop (op2b))
5539 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5541 true_test_var = op2a;
5542 if (var == true_test_var)
5543 return var;
5545 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5546 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5548 false_test_var = op2a;
5549 if (var == false_test_var)
5550 return boolean_true_node;
5554 /* If the definition is a comparison, recurse on it. */
5555 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5557 tree t = or_comparisons_1 (innercode,
5558 gimple_assign_rhs1 (stmt),
5559 gimple_assign_rhs2 (stmt),
5560 code2,
5561 op2a,
5562 op2b);
5563 if (t)
5564 return t;
5567 /* If the definition is an AND or OR expression, we may be able to
5568 simplify by reassociating. */
5569 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5570 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5572 tree inner1 = gimple_assign_rhs1 (stmt);
5573 tree inner2 = gimple_assign_rhs2 (stmt);
5574 gimple *s;
5575 tree t;
5576 tree partial = NULL_TREE;
5577 bool is_or = (innercode == BIT_IOR_EXPR);
5579 /* Check for boolean identities that don't require recursive examination
5580 of inner1/inner2:
5581 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5582 inner1 OR (inner1 AND inner2) => inner1
5583 !inner1 OR (inner1 OR inner2) => true
5584 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5586 if (inner1 == true_test_var)
5587 return (is_or ? var : inner1);
5588 else if (inner2 == true_test_var)
5589 return (is_or ? var : inner2);
5590 else if (inner1 == false_test_var)
5591 return (is_or
5592 ? boolean_true_node
5593 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5594 else if (inner2 == false_test_var)
5595 return (is_or
5596 ? boolean_true_node
5597 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5599 /* Next, redistribute/reassociate the OR across the inner tests.
5600 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5601 if (TREE_CODE (inner1) == SSA_NAME
5602 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5603 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5604 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5605 gimple_assign_rhs1 (s),
5606 gimple_assign_rhs2 (s),
5607 code2, op2a, op2b)))
5609 /* Handle the OR case, where we are reassociating:
5610 (inner1 OR inner2) OR (op2a code2 op2b)
5611 => (t OR inner2)
5612 If the partial result t is a constant, we win. Otherwise
5613 continue on to try reassociating with the other inner test. */
5614 if (is_or)
5616 if (integer_onep (t))
5617 return boolean_true_node;
5618 else if (integer_zerop (t))
5619 return inner2;
5622 /* Handle the AND case, where we are redistributing:
5623 (inner1 AND inner2) OR (op2a code2 op2b)
5624 => (t AND (inner2 OR (op2a code op2b))) */
5625 else if (integer_zerop (t))
5626 return boolean_false_node;
5628 /* Save partial result for later. */
5629 partial = t;
5632 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5633 if (TREE_CODE (inner2) == SSA_NAME
5634 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5635 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5636 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5637 gimple_assign_rhs1 (s),
5638 gimple_assign_rhs2 (s),
5639 code2, op2a, op2b)))
5641 /* Handle the OR case, where we are reassociating:
5642 (inner1 OR inner2) OR (op2a code2 op2b)
5643 => (inner1 OR t)
5644 => (t OR partial) */
5645 if (is_or)
5647 if (integer_zerop (t))
5648 return inner1;
5649 else if (integer_onep (t))
5650 return boolean_true_node;
5651 /* If both are the same, we can apply the identity
5652 (x OR x) == x. */
5653 else if (partial && same_bool_result_p (t, partial))
5654 return t;
5657 /* Handle the AND case, where we are redistributing:
5658 (inner1 AND inner2) OR (op2a code2 op2b)
5659 => (t AND (inner1 OR (op2a code2 op2b)))
5660 => (t AND partial) */
5661 else
5663 if (integer_zerop (t))
5664 return boolean_false_node;
5665 else if (partial)
5667 /* We already got a simplification for the other
5668 operand to the redistributed AND expression. The
5669 interesting case is when at least one is true.
5670 Or, if both are the same, we can apply the identity
5671 (x AND x) == x. */
5672 if (integer_onep (partial))
5673 return t;
5674 else if (integer_onep (t))
5675 return partial;
5676 else if (same_bool_result_p (t, partial))
5677 return t;
5682 return NULL_TREE;
5685 /* Try to simplify the OR of two comparisons defined by
5686 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5687 If this can be done without constructing an intermediate value,
5688 return the resulting tree; otherwise NULL_TREE is returned.
5689 This function is deliberately asymmetric as it recurses on SSA_DEFs
5690 in the first comparison but not the second. */
5692 static tree
5693 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5694 enum tree_code code2, tree op2a, tree op2b)
5696 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5698 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5699 if (operand_equal_p (op1a, op2a, 0)
5700 && operand_equal_p (op1b, op2b, 0))
5702 /* Result will be either NULL_TREE, or a combined comparison. */
5703 tree t = combine_comparisons (UNKNOWN_LOCATION,
5704 TRUTH_ORIF_EXPR, code1, code2,
5705 truth_type, op1a, op1b);
5706 if (t)
5707 return t;
5710 /* Likewise the swapped case of the above. */
5711 if (operand_equal_p (op1a, op2b, 0)
5712 && operand_equal_p (op1b, op2a, 0))
5714 /* Result will be either NULL_TREE, or a combined comparison. */
5715 tree t = combine_comparisons (UNKNOWN_LOCATION,
5716 TRUTH_ORIF_EXPR, code1,
5717 swap_tree_comparison (code2),
5718 truth_type, op1a, op1b);
5719 if (t)
5720 return t;
5723 /* If both comparisons are of the same value against constants, we might
5724 be able to merge them. */
5725 if (operand_equal_p (op1a, op2a, 0)
5726 && TREE_CODE (op1b) == INTEGER_CST
5727 && TREE_CODE (op2b) == INTEGER_CST)
5729 int cmp = tree_int_cst_compare (op1b, op2b);
5731 /* If we have (op1a != op1b), we should either be able to
5732 return that or TRUE, depending on whether the constant op1b
5733 also satisfies the other comparison against op2b. */
5734 if (code1 == NE_EXPR)
5736 bool done = true;
5737 bool val;
5738 switch (code2)
5740 case EQ_EXPR: val = (cmp == 0); break;
5741 case NE_EXPR: val = (cmp != 0); break;
5742 case LT_EXPR: val = (cmp < 0); break;
5743 case GT_EXPR: val = (cmp > 0); break;
5744 case LE_EXPR: val = (cmp <= 0); break;
5745 case GE_EXPR: val = (cmp >= 0); break;
5746 default: done = false;
5748 if (done)
5750 if (val)
5751 return boolean_true_node;
5752 else
5753 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5756 /* Likewise if the second comparison is a != comparison. */
5757 else if (code2 == NE_EXPR)
5759 bool done = true;
5760 bool val;
5761 switch (code1)
5763 case EQ_EXPR: val = (cmp == 0); break;
5764 case NE_EXPR: val = (cmp != 0); break;
5765 case LT_EXPR: val = (cmp > 0); break;
5766 case GT_EXPR: val = (cmp < 0); break;
5767 case LE_EXPR: val = (cmp >= 0); break;
5768 case GE_EXPR: val = (cmp <= 0); break;
5769 default: done = false;
5771 if (done)
5773 if (val)
5774 return boolean_true_node;
5775 else
5776 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5780 /* See if an equality test is redundant with the other comparison. */
5781 else if (code1 == EQ_EXPR)
5783 bool val;
5784 switch (code2)
5786 case EQ_EXPR: val = (cmp == 0); break;
5787 case NE_EXPR: val = (cmp != 0); break;
5788 case LT_EXPR: val = (cmp < 0); break;
5789 case GT_EXPR: val = (cmp > 0); break;
5790 case LE_EXPR: val = (cmp <= 0); break;
5791 case GE_EXPR: val = (cmp >= 0); break;
5792 default:
5793 val = false;
5795 if (val)
5796 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5798 else if (code2 == EQ_EXPR)
5800 bool val;
5801 switch (code1)
5803 case EQ_EXPR: val = (cmp == 0); break;
5804 case NE_EXPR: val = (cmp != 0); break;
5805 case LT_EXPR: val = (cmp > 0); break;
5806 case GT_EXPR: val = (cmp < 0); break;
5807 case LE_EXPR: val = (cmp >= 0); break;
5808 case GE_EXPR: val = (cmp <= 0); break;
5809 default:
5810 val = false;
5812 if (val)
5813 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5816 /* Chose the less restrictive of two < or <= comparisons. */
5817 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5818 && (code2 == LT_EXPR || code2 == LE_EXPR))
5820 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5821 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5822 else
5823 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5826 /* Likewise chose the less restrictive of two > or >= comparisons. */
5827 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5828 && (code2 == GT_EXPR || code2 == GE_EXPR))
5830 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5831 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5832 else
5833 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5836 /* Check for singleton ranges. */
5837 else if (cmp == 0
5838 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5839 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5840 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5842 /* Check for less/greater pairs that don't restrict the range at all. */
5843 else if (cmp >= 0
5844 && (code1 == LT_EXPR || code1 == LE_EXPR)
5845 && (code2 == GT_EXPR || code2 == GE_EXPR))
5846 return boolean_true_node;
5847 else if (cmp <= 0
5848 && (code1 == GT_EXPR || code1 == GE_EXPR)
5849 && (code2 == LT_EXPR || code2 == LE_EXPR))
5850 return boolean_true_node;
5853 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5854 NAME's definition is a truth value. See if there are any simplifications
5855 that can be done against the NAME's definition. */
5856 if (TREE_CODE (op1a) == SSA_NAME
5857 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5858 && (integer_zerop (op1b) || integer_onep (op1b)))
5860 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5861 || (code1 == NE_EXPR && integer_onep (op1b)));
5862 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5863 switch (gimple_code (stmt))
5865 case GIMPLE_ASSIGN:
5866 /* Try to simplify by copy-propagating the definition. */
5867 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5869 case GIMPLE_PHI:
5870 /* If every argument to the PHI produces the same result when
5871 ORed with the second comparison, we win.
5872 Do not do this unless the type is bool since we need a bool
5873 result here anyway. */
5874 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5876 tree result = NULL_TREE;
5877 unsigned i;
5878 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5880 tree arg = gimple_phi_arg_def (stmt, i);
5882 /* If this PHI has itself as an argument, ignore it.
5883 If all the other args produce the same result,
5884 we're still OK. */
5885 if (arg == gimple_phi_result (stmt))
5886 continue;
5887 else if (TREE_CODE (arg) == INTEGER_CST)
5889 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5891 if (!result)
5892 result = boolean_true_node;
5893 else if (!integer_onep (result))
5894 return NULL_TREE;
5896 else if (!result)
5897 result = fold_build2 (code2, boolean_type_node,
5898 op2a, op2b);
5899 else if (!same_bool_comparison_p (result,
5900 code2, op2a, op2b))
5901 return NULL_TREE;
5903 else if (TREE_CODE (arg) == SSA_NAME
5904 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5906 tree temp;
5907 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5908 /* In simple cases we can look through PHI nodes,
5909 but we have to be careful with loops.
5910 See PR49073. */
5911 if (! dom_info_available_p (CDI_DOMINATORS)
5912 || gimple_bb (def_stmt) == gimple_bb (stmt)
5913 || dominated_by_p (CDI_DOMINATORS,
5914 gimple_bb (def_stmt),
5915 gimple_bb (stmt)))
5916 return NULL_TREE;
5917 temp = or_var_with_comparison (arg, invert, code2,
5918 op2a, op2b);
5919 if (!temp)
5920 return NULL_TREE;
5921 else if (!result)
5922 result = temp;
5923 else if (!same_bool_result_p (result, temp))
5924 return NULL_TREE;
5926 else
5927 return NULL_TREE;
5929 return result;
5932 default:
5933 break;
5936 return NULL_TREE;
5939 /* Try to simplify the OR of two comparisons, specified by
5940 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5941 If this can be simplified to a single expression (without requiring
5942 introducing more SSA variables to hold intermediate values),
5943 return the resulting tree. Otherwise return NULL_TREE.
5944 If the result expression is non-null, it has boolean type. */
5946 tree
5947 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5948 enum tree_code code2, tree op2a, tree op2b)
5950 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5951 if (t)
5952 return t;
5953 else
5954 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5958 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5960 Either NULL_TREE, a simplified but non-constant or a constant
5961 is returned.
5963 ??? This should go into a gimple-fold-inline.h file to be eventually
5964 privatized with the single valueize function used in the various TUs
5965 to avoid the indirect function call overhead. */
5967 tree
5968 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
5969 tree (*gvalueize) (tree))
5971 code_helper rcode;
5972 tree ops[3] = {};
5973 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5974 edges if there are intermediate VARYING defs. For this reason
5975 do not follow SSA edges here even though SCCVN can technically
5976 just deal fine with that. */
5977 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
5979 tree res = NULL_TREE;
5980 if (gimple_simplified_result_is_gimple_val (rcode, ops))
5981 res = ops[0];
5982 else if (mprts_hook)
5983 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5984 if (res)
5986 if (dump_file && dump_flags & TDF_DETAILS)
5988 fprintf (dump_file, "Match-and-simplified ");
5989 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5990 fprintf (dump_file, " to ");
5991 print_generic_expr (dump_file, res);
5992 fprintf (dump_file, "\n");
5994 return res;
5998 location_t loc = gimple_location (stmt);
5999 switch (gimple_code (stmt))
6001 case GIMPLE_ASSIGN:
6003 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6005 switch (get_gimple_rhs_class (subcode))
6007 case GIMPLE_SINGLE_RHS:
6009 tree rhs = gimple_assign_rhs1 (stmt);
6010 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6012 if (TREE_CODE (rhs) == SSA_NAME)
6014 /* If the RHS is an SSA_NAME, return its known constant value,
6015 if any. */
6016 return (*valueize) (rhs);
6018 /* Handle propagating invariant addresses into address
6019 operations. */
6020 else if (TREE_CODE (rhs) == ADDR_EXPR
6021 && !is_gimple_min_invariant (rhs))
6023 HOST_WIDE_INT offset = 0;
6024 tree base;
6025 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6026 &offset,
6027 valueize);
6028 if (base
6029 && (CONSTANT_CLASS_P (base)
6030 || decl_address_invariant_p (base)))
6031 return build_invariant_address (TREE_TYPE (rhs),
6032 base, offset);
6034 else if (TREE_CODE (rhs) == CONSTRUCTOR
6035 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6036 && (CONSTRUCTOR_NELTS (rhs)
6037 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6039 unsigned i, nelts;
6040 tree val;
6042 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs));
6043 auto_vec<tree, 32> vec (nelts);
6044 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6046 val = (*valueize) (val);
6047 if (TREE_CODE (val) == INTEGER_CST
6048 || TREE_CODE (val) == REAL_CST
6049 || TREE_CODE (val) == FIXED_CST)
6050 vec.quick_push (val);
6051 else
6052 return NULL_TREE;
6055 return build_vector (TREE_TYPE (rhs), vec);
6057 if (subcode == OBJ_TYPE_REF)
6059 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6060 /* If callee is constant, we can fold away the wrapper. */
6061 if (is_gimple_min_invariant (val))
6062 return val;
6065 if (kind == tcc_reference)
6067 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6068 || TREE_CODE (rhs) == REALPART_EXPR
6069 || TREE_CODE (rhs) == IMAGPART_EXPR)
6070 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6072 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6073 return fold_unary_loc (EXPR_LOCATION (rhs),
6074 TREE_CODE (rhs),
6075 TREE_TYPE (rhs), val);
6077 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6078 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6080 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6081 return fold_ternary_loc (EXPR_LOCATION (rhs),
6082 TREE_CODE (rhs),
6083 TREE_TYPE (rhs), val,
6084 TREE_OPERAND (rhs, 1),
6085 TREE_OPERAND (rhs, 2));
6087 else if (TREE_CODE (rhs) == MEM_REF
6088 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6090 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6091 if (TREE_CODE (val) == ADDR_EXPR
6092 && is_gimple_min_invariant (val))
6094 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6095 unshare_expr (val),
6096 TREE_OPERAND (rhs, 1));
6097 if (tem)
6098 rhs = tem;
6101 return fold_const_aggregate_ref_1 (rhs, valueize);
6103 else if (kind == tcc_declaration)
6104 return get_symbol_constant_value (rhs);
6105 return rhs;
6108 case GIMPLE_UNARY_RHS:
6109 return NULL_TREE;
6111 case GIMPLE_BINARY_RHS:
6112 /* Translate &x + CST into an invariant form suitable for
6113 further propagation. */
6114 if (subcode == POINTER_PLUS_EXPR)
6116 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6117 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6118 if (TREE_CODE (op0) == ADDR_EXPR
6119 && TREE_CODE (op1) == INTEGER_CST)
6121 tree off = fold_convert (ptr_type_node, op1);
6122 return build_fold_addr_expr_loc
6123 (loc,
6124 fold_build2 (MEM_REF,
6125 TREE_TYPE (TREE_TYPE (op0)),
6126 unshare_expr (op0), off));
6129 /* Canonicalize bool != 0 and bool == 0 appearing after
6130 valueization. While gimple_simplify handles this
6131 it can get confused by the ~X == 1 -> X == 0 transform
6132 which we cant reduce to a SSA name or a constant
6133 (and we have no way to tell gimple_simplify to not
6134 consider those transforms in the first place). */
6135 else if (subcode == EQ_EXPR
6136 || subcode == NE_EXPR)
6138 tree lhs = gimple_assign_lhs (stmt);
6139 tree op0 = gimple_assign_rhs1 (stmt);
6140 if (useless_type_conversion_p (TREE_TYPE (lhs),
6141 TREE_TYPE (op0)))
6143 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6144 op0 = (*valueize) (op0);
6145 if (TREE_CODE (op0) == INTEGER_CST)
6146 std::swap (op0, op1);
6147 if (TREE_CODE (op1) == INTEGER_CST
6148 && ((subcode == NE_EXPR && integer_zerop (op1))
6149 || (subcode == EQ_EXPR && integer_onep (op1))))
6150 return op0;
6153 return NULL_TREE;
6155 case GIMPLE_TERNARY_RHS:
6157 /* Handle ternary operators that can appear in GIMPLE form. */
6158 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6159 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6160 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6161 return fold_ternary_loc (loc, subcode,
6162 gimple_expr_type (stmt), op0, op1, op2);
6165 default:
6166 gcc_unreachable ();
6170 case GIMPLE_CALL:
6172 tree fn;
6173 gcall *call_stmt = as_a <gcall *> (stmt);
6175 if (gimple_call_internal_p (stmt))
6177 enum tree_code subcode = ERROR_MARK;
6178 switch (gimple_call_internal_fn (stmt))
6180 case IFN_UBSAN_CHECK_ADD:
6181 subcode = PLUS_EXPR;
6182 break;
6183 case IFN_UBSAN_CHECK_SUB:
6184 subcode = MINUS_EXPR;
6185 break;
6186 case IFN_UBSAN_CHECK_MUL:
6187 subcode = MULT_EXPR;
6188 break;
6189 case IFN_BUILTIN_EXPECT:
6191 tree arg0 = gimple_call_arg (stmt, 0);
6192 tree op0 = (*valueize) (arg0);
6193 if (TREE_CODE (op0) == INTEGER_CST)
6194 return op0;
6195 return NULL_TREE;
6197 default:
6198 return NULL_TREE;
6200 tree arg0 = gimple_call_arg (stmt, 0);
6201 tree arg1 = gimple_call_arg (stmt, 1);
6202 tree op0 = (*valueize) (arg0);
6203 tree op1 = (*valueize) (arg1);
6205 if (TREE_CODE (op0) != INTEGER_CST
6206 || TREE_CODE (op1) != INTEGER_CST)
6208 switch (subcode)
6210 case MULT_EXPR:
6211 /* x * 0 = 0 * x = 0 without overflow. */
6212 if (integer_zerop (op0) || integer_zerop (op1))
6213 return build_zero_cst (TREE_TYPE (arg0));
6214 break;
6215 case MINUS_EXPR:
6216 /* y - y = 0 without overflow. */
6217 if (operand_equal_p (op0, op1, 0))
6218 return build_zero_cst (TREE_TYPE (arg0));
6219 break;
6220 default:
6221 break;
6224 tree res
6225 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6226 if (res
6227 && TREE_CODE (res) == INTEGER_CST
6228 && !TREE_OVERFLOW (res))
6229 return res;
6230 return NULL_TREE;
6233 fn = (*valueize) (gimple_call_fn (stmt));
6234 if (TREE_CODE (fn) == ADDR_EXPR
6235 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6236 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6237 && gimple_builtin_call_types_compatible_p (stmt,
6238 TREE_OPERAND (fn, 0)))
6240 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6241 tree retval;
6242 unsigned i;
6243 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6244 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6245 retval = fold_builtin_call_array (loc,
6246 gimple_call_return_type (call_stmt),
6247 fn, gimple_call_num_args (stmt), args);
6248 if (retval)
6250 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6251 STRIP_NOPS (retval);
6252 retval = fold_convert (gimple_call_return_type (call_stmt),
6253 retval);
6255 return retval;
6257 return NULL_TREE;
6260 default:
6261 return NULL_TREE;
6265 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6266 Returns NULL_TREE if folding to a constant is not possible, otherwise
6267 returns a constant according to is_gimple_min_invariant. */
6269 tree
6270 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6272 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6273 if (res && is_gimple_min_invariant (res))
6274 return res;
6275 return NULL_TREE;
6279 /* The following set of functions are supposed to fold references using
6280 their constant initializers. */
6282 /* See if we can find constructor defining value of BASE.
6283 When we know the consructor with constant offset (such as
6284 base is array[40] and we do know constructor of array), then
6285 BIT_OFFSET is adjusted accordingly.
6287 As a special case, return error_mark_node when constructor
6288 is not explicitly available, but it is known to be zero
6289 such as 'static const int a;'. */
6290 static tree
6291 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
6292 tree (*valueize)(tree))
6294 HOST_WIDE_INT bit_offset2, size, max_size;
6295 bool reverse;
6297 if (TREE_CODE (base) == MEM_REF)
6299 if (!integer_zerop (TREE_OPERAND (base, 1)))
6301 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
6302 return NULL_TREE;
6303 *bit_offset += (mem_ref_offset (base).to_short_addr ()
6304 * BITS_PER_UNIT);
6307 if (valueize
6308 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6309 base = valueize (TREE_OPERAND (base, 0));
6310 if (!base || TREE_CODE (base) != ADDR_EXPR)
6311 return NULL_TREE;
6312 base = TREE_OPERAND (base, 0);
6314 else if (valueize
6315 && TREE_CODE (base) == SSA_NAME)
6316 base = valueize (base);
6318 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6319 DECL_INITIAL. If BASE is a nested reference into another
6320 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6321 the inner reference. */
6322 switch (TREE_CODE (base))
6324 case VAR_DECL:
6325 case CONST_DECL:
6327 tree init = ctor_for_folding (base);
6329 /* Our semantic is exact opposite of ctor_for_folding;
6330 NULL means unknown, while error_mark_node is 0. */
6331 if (init == error_mark_node)
6332 return NULL_TREE;
6333 if (!init)
6334 return error_mark_node;
6335 return init;
6338 case VIEW_CONVERT_EXPR:
6339 return get_base_constructor (TREE_OPERAND (base, 0),
6340 bit_offset, valueize);
6342 case ARRAY_REF:
6343 case COMPONENT_REF:
6344 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6345 &reverse);
6346 if (max_size == -1 || size != max_size)
6347 return NULL_TREE;
6348 *bit_offset += bit_offset2;
6349 return get_base_constructor (base, bit_offset, valueize);
6351 case CONSTRUCTOR:
6352 return base;
6354 default:
6355 if (CONSTANT_CLASS_P (base))
6356 return base;
6358 return NULL_TREE;
6362 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6363 SIZE to the memory at bit OFFSET. */
6365 static tree
6366 fold_array_ctor_reference (tree type, tree ctor,
6367 unsigned HOST_WIDE_INT offset,
6368 unsigned HOST_WIDE_INT size,
6369 tree from_decl)
6371 offset_int low_bound;
6372 offset_int elt_size;
6373 offset_int access_index;
6374 tree domain_type = NULL_TREE;
6375 HOST_WIDE_INT inner_offset;
6377 /* Compute low bound and elt size. */
6378 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6379 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6380 if (domain_type && TYPE_MIN_VALUE (domain_type))
6382 /* Static constructors for variably sized objects makes no sense. */
6383 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6384 return NULL_TREE;
6385 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6387 else
6388 low_bound = 0;
6389 /* Static constructors for variably sized objects makes no sense. */
6390 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6391 return NULL_TREE;
6392 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6394 /* We can handle only constantly sized accesses that are known to not
6395 be larger than size of array element. */
6396 if (!TYPE_SIZE_UNIT (type)
6397 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6398 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6399 || elt_size == 0)
6400 return NULL_TREE;
6402 /* Compute the array index we look for. */
6403 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6404 elt_size);
6405 access_index += low_bound;
6407 /* And offset within the access. */
6408 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6410 /* See if the array field is large enough to span whole access. We do not
6411 care to fold accesses spanning multiple array indexes. */
6412 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6413 return NULL_TREE;
6414 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6415 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
6417 /* When memory is not explicitely mentioned in constructor,
6418 it is 0 (or out of range). */
6419 return build_zero_cst (type);
6422 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6423 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6425 static tree
6426 fold_nonarray_ctor_reference (tree type, tree ctor,
6427 unsigned HOST_WIDE_INT offset,
6428 unsigned HOST_WIDE_INT size,
6429 tree from_decl)
6431 unsigned HOST_WIDE_INT cnt;
6432 tree cfield, cval;
6434 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6435 cval)
6437 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6438 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6439 tree field_size = DECL_SIZE (cfield);
6440 offset_int bitoffset;
6441 offset_int bitoffset_end, access_end;
6443 /* Variable sized objects in static constructors makes no sense,
6444 but field_size can be NULL for flexible array members. */
6445 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6446 && TREE_CODE (byte_offset) == INTEGER_CST
6447 && (field_size != NULL_TREE
6448 ? TREE_CODE (field_size) == INTEGER_CST
6449 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6451 /* Compute bit offset of the field. */
6452 bitoffset = (wi::to_offset (field_offset)
6453 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6454 /* Compute bit offset where the field ends. */
6455 if (field_size != NULL_TREE)
6456 bitoffset_end = bitoffset + wi::to_offset (field_size);
6457 else
6458 bitoffset_end = 0;
6460 access_end = offset_int (offset) + size;
6462 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6463 [BITOFFSET, BITOFFSET_END)? */
6464 if (wi::cmps (access_end, bitoffset) > 0
6465 && (field_size == NULL_TREE
6466 || wi::lts_p (offset, bitoffset_end)))
6468 offset_int inner_offset = offset_int (offset) - bitoffset;
6469 /* We do have overlap. Now see if field is large enough to
6470 cover the access. Give up for accesses spanning multiple
6471 fields. */
6472 if (wi::cmps (access_end, bitoffset_end) > 0)
6473 return NULL_TREE;
6474 if (offset < bitoffset)
6475 return NULL_TREE;
6476 return fold_ctor_reference (type, cval,
6477 inner_offset.to_uhwi (), size,
6478 from_decl);
6481 /* When memory is not explicitely mentioned in constructor, it is 0. */
6482 return build_zero_cst (type);
6485 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
6486 to the memory at bit OFFSET. */
6488 tree
6489 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
6490 unsigned HOST_WIDE_INT size, tree from_decl)
6492 tree ret;
6494 /* We found the field with exact match. */
6495 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6496 && !offset)
6497 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6499 /* We are at the end of walk, see if we can view convert the
6500 result. */
6501 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6502 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6503 && !compare_tree_int (TYPE_SIZE (type), size)
6504 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6506 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6507 if (ret)
6509 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6510 if (ret)
6511 STRIP_USELESS_TYPE_CONVERSION (ret);
6513 return ret;
6515 /* For constants and byte-aligned/sized reads try to go through
6516 native_encode/interpret. */
6517 if (CONSTANT_CLASS_P (ctor)
6518 && BITS_PER_UNIT == 8
6519 && offset % BITS_PER_UNIT == 0
6520 && size % BITS_PER_UNIT == 0
6521 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6523 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6524 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6525 offset / BITS_PER_UNIT);
6526 if (len > 0)
6527 return native_interpret_expr (type, buf, len);
6529 if (TREE_CODE (ctor) == CONSTRUCTOR)
6532 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6533 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6534 return fold_array_ctor_reference (type, ctor, offset, size,
6535 from_decl);
6536 else
6537 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6538 from_decl);
6541 return NULL_TREE;
6544 /* Return the tree representing the element referenced by T if T is an
6545 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6546 names using VALUEIZE. Return NULL_TREE otherwise. */
6548 tree
6549 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6551 tree ctor, idx, base;
6552 HOST_WIDE_INT offset, size, max_size;
6553 tree tem;
6554 bool reverse;
6556 if (TREE_THIS_VOLATILE (t))
6557 return NULL_TREE;
6559 if (DECL_P (t))
6560 return get_symbol_constant_value (t);
6562 tem = fold_read_from_constant_string (t);
6563 if (tem)
6564 return tem;
6566 switch (TREE_CODE (t))
6568 case ARRAY_REF:
6569 case ARRAY_RANGE_REF:
6570 /* Constant indexes are handled well by get_base_constructor.
6571 Only special case variable offsets.
6572 FIXME: This code can't handle nested references with variable indexes
6573 (they will be handled only by iteration of ccp). Perhaps we can bring
6574 get_ref_base_and_extent here and make it use a valueize callback. */
6575 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6576 && valueize
6577 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6578 && TREE_CODE (idx) == INTEGER_CST)
6580 tree low_bound, unit_size;
6582 /* If the resulting bit-offset is constant, track it. */
6583 if ((low_bound = array_ref_low_bound (t),
6584 TREE_CODE (low_bound) == INTEGER_CST)
6585 && (unit_size = array_ref_element_size (t),
6586 tree_fits_uhwi_p (unit_size)))
6588 offset_int woffset
6589 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
6590 TYPE_PRECISION (TREE_TYPE (idx)));
6592 if (wi::fits_shwi_p (woffset))
6594 offset = woffset.to_shwi ();
6595 /* TODO: This code seems wrong, multiply then check
6596 to see if it fits. */
6597 offset *= tree_to_uhwi (unit_size);
6598 offset *= BITS_PER_UNIT;
6600 base = TREE_OPERAND (t, 0);
6601 ctor = get_base_constructor (base, &offset, valueize);
6602 /* Empty constructor. Always fold to 0. */
6603 if (ctor == error_mark_node)
6604 return build_zero_cst (TREE_TYPE (t));
6605 /* Out of bound array access. Value is undefined,
6606 but don't fold. */
6607 if (offset < 0)
6608 return NULL_TREE;
6609 /* We can not determine ctor. */
6610 if (!ctor)
6611 return NULL_TREE;
6612 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6613 tree_to_uhwi (unit_size)
6614 * BITS_PER_UNIT,
6615 base);
6619 /* Fallthru. */
6621 case COMPONENT_REF:
6622 case BIT_FIELD_REF:
6623 case TARGET_MEM_REF:
6624 case MEM_REF:
6625 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6626 ctor = get_base_constructor (base, &offset, valueize);
6628 /* Empty constructor. Always fold to 0. */
6629 if (ctor == error_mark_node)
6630 return build_zero_cst (TREE_TYPE (t));
6631 /* We do not know precise address. */
6632 if (max_size == -1 || max_size != size)
6633 return NULL_TREE;
6634 /* We can not determine ctor. */
6635 if (!ctor)
6636 return NULL_TREE;
6638 /* Out of bound array access. Value is undefined, but don't fold. */
6639 if (offset < 0)
6640 return NULL_TREE;
6642 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6643 base);
6645 case REALPART_EXPR:
6646 case IMAGPART_EXPR:
6648 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6649 if (c && TREE_CODE (c) == COMPLEX_CST)
6650 return fold_build1_loc (EXPR_LOCATION (t),
6651 TREE_CODE (t), TREE_TYPE (t), c);
6652 break;
6655 default:
6656 break;
6659 return NULL_TREE;
6662 tree
6663 fold_const_aggregate_ref (tree t)
6665 return fold_const_aggregate_ref_1 (t, NULL);
6668 /* Lookup virtual method with index TOKEN in a virtual table V
6669 at OFFSET.
6670 Set CAN_REFER if non-NULL to false if method
6671 is not referable or if the virtual table is ill-formed (such as rewriten
6672 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6674 tree
6675 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6676 tree v,
6677 unsigned HOST_WIDE_INT offset,
6678 bool *can_refer)
6680 tree vtable = v, init, fn;
6681 unsigned HOST_WIDE_INT size;
6682 unsigned HOST_WIDE_INT elt_size, access_index;
6683 tree domain_type;
6685 if (can_refer)
6686 *can_refer = true;
6688 /* First of all double check we have virtual table. */
6689 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6691 /* Pass down that we lost track of the target. */
6692 if (can_refer)
6693 *can_refer = false;
6694 return NULL_TREE;
6697 init = ctor_for_folding (v);
6699 /* The virtual tables should always be born with constructors
6700 and we always should assume that they are avaialble for
6701 folding. At the moment we do not stream them in all cases,
6702 but it should never happen that ctor seem unreachable. */
6703 gcc_assert (init);
6704 if (init == error_mark_node)
6706 /* Pass down that we lost track of the target. */
6707 if (can_refer)
6708 *can_refer = false;
6709 return NULL_TREE;
6711 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6712 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6713 offset *= BITS_PER_UNIT;
6714 offset += token * size;
6716 /* Lookup the value in the constructor that is assumed to be array.
6717 This is equivalent to
6718 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6719 offset, size, NULL);
6720 but in a constant time. We expect that frontend produced a simple
6721 array without indexed initializers. */
6723 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6724 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6725 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6726 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6728 access_index = offset / BITS_PER_UNIT / elt_size;
6729 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6731 /* This code makes an assumption that there are no
6732 indexed fileds produced by C++ FE, so we can directly index the array. */
6733 if (access_index < CONSTRUCTOR_NELTS (init))
6735 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6736 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6737 STRIP_NOPS (fn);
6739 else
6740 fn = NULL;
6742 /* For type inconsistent program we may end up looking up virtual method
6743 in virtual table that does not contain TOKEN entries. We may overrun
6744 the virtual table and pick up a constant or RTTI info pointer.
6745 In any case the call is undefined. */
6746 if (!fn
6747 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6748 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6749 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6750 else
6752 fn = TREE_OPERAND (fn, 0);
6754 /* When cgraph node is missing and function is not public, we cannot
6755 devirtualize. This can happen in WHOPR when the actual method
6756 ends up in other partition, because we found devirtualization
6757 possibility too late. */
6758 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6760 if (can_refer)
6762 *can_refer = false;
6763 return fn;
6765 return NULL_TREE;
6769 /* Make sure we create a cgraph node for functions we'll reference.
6770 They can be non-existent if the reference comes from an entry
6771 of an external vtable for example. */
6772 cgraph_node::get_create (fn);
6774 return fn;
6777 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6778 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6779 KNOWN_BINFO carries the binfo describing the true type of
6780 OBJ_TYPE_REF_OBJECT(REF).
6781 Set CAN_REFER if non-NULL to false if method
6782 is not referable or if the virtual table is ill-formed (such as rewriten
6783 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6785 tree
6786 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6787 bool *can_refer)
6789 unsigned HOST_WIDE_INT offset;
6790 tree v;
6792 v = BINFO_VTABLE (known_binfo);
6793 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6794 if (!v)
6795 return NULL_TREE;
6797 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6799 if (can_refer)
6800 *can_refer = false;
6801 return NULL_TREE;
6803 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6806 /* Given a pointer value T, return a simplified version of an
6807 indirection through T, or NULL_TREE if no simplification is
6808 possible. Note that the resulting type may be different from
6809 the type pointed to in the sense that it is still compatible
6810 from the langhooks point of view. */
6812 tree
6813 gimple_fold_indirect_ref (tree t)
6815 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6816 tree sub = t;
6817 tree subtype;
6819 STRIP_NOPS (sub);
6820 subtype = TREE_TYPE (sub);
6821 if (!POINTER_TYPE_P (subtype)
6822 || TYPE_REF_CAN_ALIAS_ALL (ptype))
6823 return NULL_TREE;
6825 if (TREE_CODE (sub) == ADDR_EXPR)
6827 tree op = TREE_OPERAND (sub, 0);
6828 tree optype = TREE_TYPE (op);
6829 /* *&p => p */
6830 if (useless_type_conversion_p (type, optype))
6831 return op;
6833 /* *(foo *)&fooarray => fooarray[0] */
6834 if (TREE_CODE (optype) == ARRAY_TYPE
6835 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6836 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6838 tree type_domain = TYPE_DOMAIN (optype);
6839 tree min_val = size_zero_node;
6840 if (type_domain && TYPE_MIN_VALUE (type_domain))
6841 min_val = TYPE_MIN_VALUE (type_domain);
6842 if (TREE_CODE (min_val) == INTEGER_CST)
6843 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6845 /* *(foo *)&complexfoo => __real__ complexfoo */
6846 else if (TREE_CODE (optype) == COMPLEX_TYPE
6847 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6848 return fold_build1 (REALPART_EXPR, type, op);
6849 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6850 else if (TREE_CODE (optype) == VECTOR_TYPE
6851 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6853 tree part_width = TYPE_SIZE (type);
6854 tree index = bitsize_int (0);
6855 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6859 /* *(p + CST) -> ... */
6860 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6861 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6863 tree addr = TREE_OPERAND (sub, 0);
6864 tree off = TREE_OPERAND (sub, 1);
6865 tree addrtype;
6867 STRIP_NOPS (addr);
6868 addrtype = TREE_TYPE (addr);
6870 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6871 if (TREE_CODE (addr) == ADDR_EXPR
6872 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6873 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
6874 && tree_fits_uhwi_p (off))
6876 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
6877 tree part_width = TYPE_SIZE (type);
6878 unsigned HOST_WIDE_INT part_widthi
6879 = tree_to_shwi (part_width) / BITS_PER_UNIT;
6880 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6881 tree index = bitsize_int (indexi);
6882 if (offset / part_widthi
6883 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
6884 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6885 part_width, index);
6888 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6889 if (TREE_CODE (addr) == ADDR_EXPR
6890 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6891 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6893 tree size = TYPE_SIZE_UNIT (type);
6894 if (tree_int_cst_equal (size, off))
6895 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6898 /* *(p + CST) -> MEM_REF <p, CST>. */
6899 if (TREE_CODE (addr) != ADDR_EXPR
6900 || DECL_P (TREE_OPERAND (addr, 0)))
6901 return fold_build2 (MEM_REF, type,
6902 addr,
6903 wide_int_to_tree (ptype, wi::to_wide (off)));
6906 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6907 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6908 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6909 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6911 tree type_domain;
6912 tree min_val = size_zero_node;
6913 tree osub = sub;
6914 sub = gimple_fold_indirect_ref (sub);
6915 if (! sub)
6916 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6917 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6918 if (type_domain && TYPE_MIN_VALUE (type_domain))
6919 min_val = TYPE_MIN_VALUE (type_domain);
6920 if (TREE_CODE (min_val) == INTEGER_CST)
6921 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6924 return NULL_TREE;
6927 /* Return true if CODE is an operation that when operating on signed
6928 integer types involves undefined behavior on overflow and the
6929 operation can be expressed with unsigned arithmetic. */
6931 bool
6932 arith_code_with_undefined_signed_overflow (tree_code code)
6934 switch (code)
6936 case PLUS_EXPR:
6937 case MINUS_EXPR:
6938 case MULT_EXPR:
6939 case NEGATE_EXPR:
6940 case POINTER_PLUS_EXPR:
6941 return true;
6942 default:
6943 return false;
6947 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6948 operation that can be transformed to unsigned arithmetic by converting
6949 its operand, carrying out the operation in the corresponding unsigned
6950 type and converting the result back to the original type.
6952 Returns a sequence of statements that replace STMT and also contain
6953 a modified form of STMT itself. */
6955 gimple_seq
6956 rewrite_to_defined_overflow (gimple *stmt)
6958 if (dump_file && (dump_flags & TDF_DETAILS))
6960 fprintf (dump_file, "rewriting stmt with undefined signed "
6961 "overflow ");
6962 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6965 tree lhs = gimple_assign_lhs (stmt);
6966 tree type = unsigned_type_for (TREE_TYPE (lhs));
6967 gimple_seq stmts = NULL;
6968 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6970 tree op = gimple_op (stmt, i);
6971 op = gimple_convert (&stmts, type, op);
6972 gimple_set_op (stmt, i, op);
6974 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6975 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6976 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6977 gimple_seq_add_stmt (&stmts, stmt);
6978 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6979 gimple_seq_add_stmt (&stmts, cvt);
6981 return stmts;
6985 /* The valueization hook we use for the gimple_build API simplification.
6986 This makes us match fold_buildN behavior by only combining with
6987 statements in the sequence(s) we are currently building. */
6989 static tree
6990 gimple_build_valueize (tree op)
6992 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6993 return op;
6994 return NULL_TREE;
6997 /* Build the expression CODE OP0 of type TYPE with location LOC,
6998 simplifying it first if possible. Returns the built
6999 expression value and appends statements possibly defining it
7000 to SEQ. */
7002 tree
7003 gimple_build (gimple_seq *seq, location_t loc,
7004 enum tree_code code, tree type, tree op0)
7006 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7007 if (!res)
7009 res = create_tmp_reg_or_ssa_name (type);
7010 gimple *stmt;
7011 if (code == REALPART_EXPR
7012 || code == IMAGPART_EXPR
7013 || code == VIEW_CONVERT_EXPR)
7014 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7015 else
7016 stmt = gimple_build_assign (res, code, op0);
7017 gimple_set_location (stmt, loc);
7018 gimple_seq_add_stmt_without_update (seq, stmt);
7020 return res;
7023 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7024 simplifying it first if possible. Returns the built
7025 expression value and appends statements possibly defining it
7026 to SEQ. */
7028 tree
7029 gimple_build (gimple_seq *seq, location_t loc,
7030 enum tree_code code, tree type, tree op0, tree op1)
7032 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7033 if (!res)
7035 res = create_tmp_reg_or_ssa_name (type);
7036 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7037 gimple_set_location (stmt, loc);
7038 gimple_seq_add_stmt_without_update (seq, stmt);
7040 return res;
7043 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7044 simplifying it first if possible. Returns the built
7045 expression value and appends statements possibly defining it
7046 to SEQ. */
7048 tree
7049 gimple_build (gimple_seq *seq, location_t loc,
7050 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7052 tree res = gimple_simplify (code, type, op0, op1, op2,
7053 seq, gimple_build_valueize);
7054 if (!res)
7056 res = create_tmp_reg_or_ssa_name (type);
7057 gimple *stmt;
7058 if (code == BIT_FIELD_REF)
7059 stmt = gimple_build_assign (res, code,
7060 build3 (code, type, op0, op1, op2));
7061 else
7062 stmt = gimple_build_assign (res, code, op0, op1, op2);
7063 gimple_set_location (stmt, loc);
7064 gimple_seq_add_stmt_without_update (seq, stmt);
7066 return res;
7069 /* Build the call FN (ARG0) with a result of type TYPE
7070 (or no result if TYPE is void) with location LOC,
7071 simplifying it first if possible. Returns the built
7072 expression value (or NULL_TREE if TYPE is void) and appends
7073 statements possibly defining it to SEQ. */
7075 tree
7076 gimple_build (gimple_seq *seq, location_t loc,
7077 enum built_in_function fn, tree type, tree arg0)
7079 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7080 if (!res)
7082 tree decl = builtin_decl_implicit (fn);
7083 gimple *stmt = gimple_build_call (decl, 1, arg0);
7084 if (!VOID_TYPE_P (type))
7086 res = create_tmp_reg_or_ssa_name (type);
7087 gimple_call_set_lhs (stmt, res);
7089 gimple_set_location (stmt, loc);
7090 gimple_seq_add_stmt_without_update (seq, stmt);
7092 return res;
7095 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7096 (or no result if TYPE is void) with location LOC,
7097 simplifying it first if possible. Returns the built
7098 expression value (or NULL_TREE if TYPE is void) and appends
7099 statements possibly defining it to SEQ. */
7101 tree
7102 gimple_build (gimple_seq *seq, location_t loc,
7103 enum built_in_function fn, tree type, tree arg0, tree arg1)
7105 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7106 if (!res)
7108 tree decl = builtin_decl_implicit (fn);
7109 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
7110 if (!VOID_TYPE_P (type))
7112 res = create_tmp_reg_or_ssa_name (type);
7113 gimple_call_set_lhs (stmt, res);
7115 gimple_set_location (stmt, loc);
7116 gimple_seq_add_stmt_without_update (seq, stmt);
7118 return res;
7121 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7122 (or no result if TYPE is void) with location LOC,
7123 simplifying it first if possible. Returns the built
7124 expression value (or NULL_TREE if TYPE is void) and appends
7125 statements possibly defining it to SEQ. */
7127 tree
7128 gimple_build (gimple_seq *seq, location_t loc,
7129 enum built_in_function fn, tree type,
7130 tree arg0, tree arg1, tree arg2)
7132 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7133 seq, gimple_build_valueize);
7134 if (!res)
7136 tree decl = builtin_decl_implicit (fn);
7137 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7138 if (!VOID_TYPE_P (type))
7140 res = create_tmp_reg_or_ssa_name (type);
7141 gimple_call_set_lhs (stmt, res);
7143 gimple_set_location (stmt, loc);
7144 gimple_seq_add_stmt_without_update (seq, stmt);
7146 return res;
7149 /* Build the conversion (TYPE) OP with a result of type TYPE
7150 with location LOC if such conversion is neccesary in GIMPLE,
7151 simplifying it first.
7152 Returns the built expression value and appends
7153 statements possibly defining it to SEQ. */
7155 tree
7156 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7158 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7159 return op;
7160 return gimple_build (seq, loc, NOP_EXPR, type, op);
7163 /* Build the conversion (ptrofftype) OP with a result of a type
7164 compatible with ptrofftype with location LOC if such conversion
7165 is neccesary in GIMPLE, simplifying it first.
7166 Returns the built expression value and appends
7167 statements possibly defining it to SEQ. */
7169 tree
7170 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7172 if (ptrofftype_p (TREE_TYPE (op)))
7173 return op;
7174 return gimple_convert (seq, loc, sizetype, op);
7177 /* Build a vector of type TYPE in which each element has the value OP.
7178 Return a gimple value for the result, appending any new statements
7179 to SEQ. */
7181 tree
7182 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7183 tree op)
7185 tree res, vec = build_vector_from_val (type, op);
7186 if (is_gimple_val (vec))
7187 return vec;
7188 if (gimple_in_ssa_p (cfun))
7189 res = make_ssa_name (type);
7190 else
7191 res = create_tmp_reg (type);
7192 gimple *stmt = gimple_build_assign (res, vec);
7193 gimple_set_location (stmt, loc);
7194 gimple_seq_add_stmt_without_update (seq, stmt);
7195 return res;
7198 /* Build a vector of type TYPE in which the elements have the values
7199 given by ELTS. Return a gimple value for the result, appending any
7200 new instructions to SEQ. */
7202 tree
7203 gimple_build_vector (gimple_seq *seq, location_t loc, tree type,
7204 vec<tree> elts)
7206 unsigned int nelts = elts.length ();
7207 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
7208 for (unsigned int i = 0; i < nelts; ++i)
7209 if (!TREE_CONSTANT (elts[i]))
7211 vec<constructor_elt, va_gc> *v;
7212 vec_alloc (v, nelts);
7213 for (i = 0; i < nelts; ++i)
7214 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[i]);
7216 tree res;
7217 if (gimple_in_ssa_p (cfun))
7218 res = make_ssa_name (type);
7219 else
7220 res = create_tmp_reg (type);
7221 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7222 gimple_set_location (stmt, loc);
7223 gimple_seq_add_stmt_without_update (seq, stmt);
7224 return res;
7226 return build_vector (type, elts);
7229 /* Return true if the result of assignment STMT is known to be non-negative.
7230 If the return value is based on the assumption that signed overflow is
7231 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7232 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7234 static bool
7235 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7236 int depth)
7238 enum tree_code code = gimple_assign_rhs_code (stmt);
7239 switch (get_gimple_rhs_class (code))
7241 case GIMPLE_UNARY_RHS:
7242 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7243 gimple_expr_type (stmt),
7244 gimple_assign_rhs1 (stmt),
7245 strict_overflow_p, depth);
7246 case GIMPLE_BINARY_RHS:
7247 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7248 gimple_expr_type (stmt),
7249 gimple_assign_rhs1 (stmt),
7250 gimple_assign_rhs2 (stmt),
7251 strict_overflow_p, depth);
7252 case GIMPLE_TERNARY_RHS:
7253 return false;
7254 case GIMPLE_SINGLE_RHS:
7255 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7256 strict_overflow_p, depth);
7257 case GIMPLE_INVALID_RHS:
7258 break;
7260 gcc_unreachable ();
7263 /* Return true if return value of call STMT is known to be non-negative.
7264 If the return value is based on the assumption that signed overflow is
7265 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7266 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7268 static bool
7269 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7270 int depth)
7272 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7273 gimple_call_arg (stmt, 0) : NULL_TREE;
7274 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7275 gimple_call_arg (stmt, 1) : NULL_TREE;
7277 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7278 gimple_call_combined_fn (stmt),
7279 arg0,
7280 arg1,
7281 strict_overflow_p, depth);
7284 /* Return true if return value of call STMT is known to be non-negative.
7285 If the return value is based on the assumption that signed overflow is
7286 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7287 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7289 static bool
7290 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7291 int depth)
7293 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7295 tree arg = gimple_phi_arg_def (stmt, i);
7296 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7297 return false;
7299 return true;
7302 /* Return true if STMT is known to compute a non-negative value.
7303 If the return value is based on the assumption that signed overflow is
7304 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7305 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7307 bool
7308 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7309 int depth)
7311 switch (gimple_code (stmt))
7313 case GIMPLE_ASSIGN:
7314 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7315 depth);
7316 case GIMPLE_CALL:
7317 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7318 depth);
7319 case GIMPLE_PHI:
7320 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7321 depth);
7322 default:
7323 return false;
7327 /* Return true if the floating-point value computed by assignment STMT
7328 is known to have an integer value. We also allow +Inf, -Inf and NaN
7329 to be considered integer values. Return false for signaling NaN.
7331 DEPTH is the current nesting depth of the query. */
7333 static bool
7334 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7336 enum tree_code code = gimple_assign_rhs_code (stmt);
7337 switch (get_gimple_rhs_class (code))
7339 case GIMPLE_UNARY_RHS:
7340 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7341 gimple_assign_rhs1 (stmt), depth);
7342 case GIMPLE_BINARY_RHS:
7343 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7344 gimple_assign_rhs1 (stmt),
7345 gimple_assign_rhs2 (stmt), depth);
7346 case GIMPLE_TERNARY_RHS:
7347 return false;
7348 case GIMPLE_SINGLE_RHS:
7349 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7350 case GIMPLE_INVALID_RHS:
7351 break;
7353 gcc_unreachable ();
7356 /* Return true if the floating-point value computed by call STMT is known
7357 to have an integer value. We also allow +Inf, -Inf and NaN to be
7358 considered integer values. Return false for signaling NaN.
7360 DEPTH is the current nesting depth of the query. */
7362 static bool
7363 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7365 tree arg0 = (gimple_call_num_args (stmt) > 0
7366 ? gimple_call_arg (stmt, 0)
7367 : NULL_TREE);
7368 tree arg1 = (gimple_call_num_args (stmt) > 1
7369 ? gimple_call_arg (stmt, 1)
7370 : NULL_TREE);
7371 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7372 arg0, arg1, depth);
7375 /* Return true if the floating-point result of phi STMT is known to have
7376 an integer value. We also allow +Inf, -Inf and NaN to be considered
7377 integer values. Return false for signaling NaN.
7379 DEPTH is the current nesting depth of the query. */
7381 static bool
7382 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7384 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7386 tree arg = gimple_phi_arg_def (stmt, i);
7387 if (!integer_valued_real_single_p (arg, depth + 1))
7388 return false;
7390 return true;
7393 /* Return true if the floating-point value computed by STMT is known
7394 to have an integer value. We also allow +Inf, -Inf and NaN to be
7395 considered integer values. Return false for signaling NaN.
7397 DEPTH is the current nesting depth of the query. */
7399 bool
7400 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7402 switch (gimple_code (stmt))
7404 case GIMPLE_ASSIGN:
7405 return gimple_assign_integer_valued_real_p (stmt, depth);
7406 case GIMPLE_CALL:
7407 return gimple_call_integer_valued_real_p (stmt, depth);
7408 case GIMPLE_PHI:
7409 return gimple_phi_integer_valued_real_p (stmt, depth);
7410 default:
7411 return false;