2015-10-01 Steven G. Kargl <kargl@gcc.gnu.org>
[official-gcc.git] / gcc / gimple-fold.c
bloba6caaa490689c4c22485c1e4202999884d0c45bb
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "predict.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "rtl.h"
29 #include "ssa.h"
30 #include "alias.h"
31 #include "fold-const.h"
32 #include "flags.h"
33 #include "insn-config.h"
34 #include "expmed.h"
35 #include "dojump.h"
36 #include "explow.h"
37 #include "calls.h"
38 #include "emit-rtl.h"
39 #include "varasm.h"
40 #include "stmt.h"
41 #include "expr.h"
42 #include "stor-layout.h"
43 #include "dumpfile.h"
44 #include "dominance.h"
45 #include "internal-fn.h"
46 #include "gimple-fold.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "tree-into-ssa.h"
50 #include "tree-dfa.h"
51 #include "tree-ssa.h"
52 #include "tree-ssa-propagate.h"
53 #include "target.h"
54 #include "cgraph.h"
55 #include "ipa-utils.h"
56 #include "gimple-pretty-print.h"
57 #include "tree-ssa-address.h"
58 #include "langhooks.h"
59 #include "gimplify-me.h"
60 #include "dbgcnt.h"
61 #include "builtins.h"
62 #include "output.h"
63 #include "tree-eh.h"
64 #include "gimple-match.h"
65 #include "gomp-constants.h"
67 /* Return true when DECL can be referenced from current unit.
68 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
69 We can get declarations that are not possible to reference for various
70 reasons:
72 1) When analyzing C++ virtual tables.
73 C++ virtual tables do have known constructors even
74 when they are keyed to other compilation unit.
75 Those tables can contain pointers to methods and vars
76 in other units. Those methods have both STATIC and EXTERNAL
77 set.
78 2) In WHOPR mode devirtualization might lead to reference
79 to method that was partitioned elsehwere.
80 In this case we have static VAR_DECL or FUNCTION_DECL
81 that has no corresponding callgraph/varpool node
82 declaring the body.
83 3) COMDAT functions referred by external vtables that
84 we devirtualize only during final compilation stage.
85 At this time we already decided that we will not output
86 the function body and thus we can't reference the symbol
87 directly. */
89 static bool
90 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
92 varpool_node *vnode;
93 struct cgraph_node *node;
94 symtab_node *snode;
96 if (DECL_ABSTRACT_P (decl))
97 return false;
99 /* We are concerned only about static/external vars and functions. */
100 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
101 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
102 return true;
104 /* Static objects can be referred only if they was not optimized out yet. */
105 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
107 /* Before we start optimizing unreachable code we can be sure all
108 static objects are defined. */
109 if (symtab->function_flags_ready)
110 return true;
111 snode = symtab_node::get (decl);
112 if (!snode || !snode->definition)
113 return false;
114 node = dyn_cast <cgraph_node *> (snode);
115 return !node || !node->global.inlined_to;
118 /* We will later output the initializer, so we can refer to it.
119 So we are concerned only when DECL comes from initializer of
120 external var or var that has been optimized out. */
121 if (!from_decl
122 || TREE_CODE (from_decl) != VAR_DECL
123 || (!DECL_EXTERNAL (from_decl)
124 && (vnode = varpool_node::get (from_decl)) != NULL
125 && vnode->definition)
126 || (flag_ltrans
127 && (vnode = varpool_node::get (from_decl)) != NULL
128 && vnode->in_other_partition))
129 return true;
130 /* We are folding reference from external vtable. The vtable may reffer
131 to a symbol keyed to other compilation unit. The other compilation
132 unit may be in separate DSO and the symbol may be hidden. */
133 if (DECL_VISIBILITY_SPECIFIED (decl)
134 && DECL_EXTERNAL (decl)
135 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
136 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
137 return false;
138 /* When function is public, we always can introduce new reference.
139 Exception are the COMDAT functions where introducing a direct
140 reference imply need to include function body in the curren tunit. */
141 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
142 return true;
143 /* We have COMDAT. We are going to check if we still have definition
144 or if the definition is going to be output in other partition.
145 Bypass this when gimplifying; all needed functions will be produced.
147 As observed in PR20991 for already optimized out comdat virtual functions
148 it may be tempting to not necessarily give up because the copy will be
149 output elsewhere when corresponding vtable is output.
150 This is however not possible - ABI specify that COMDATs are output in
151 units where they are used and when the other unit was compiled with LTO
152 it is possible that vtable was kept public while the function itself
153 was privatized. */
154 if (!symtab->function_flags_ready)
155 return true;
157 snode = symtab_node::get (decl);
158 if (!snode
159 || ((!snode->definition || DECL_EXTERNAL (decl))
160 && (!snode->in_other_partition
161 || (!snode->forced_by_abi && !snode->force_output))))
162 return false;
163 node = dyn_cast <cgraph_node *> (snode);
164 return !node || !node->global.inlined_to;
167 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
168 acceptable form for is_gimple_min_invariant.
169 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
171 tree
172 canonicalize_constructor_val (tree cval, tree from_decl)
174 tree orig_cval = cval;
175 STRIP_NOPS (cval);
176 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
177 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
179 tree ptr = TREE_OPERAND (cval, 0);
180 if (is_gimple_min_invariant (ptr))
181 cval = build1_loc (EXPR_LOCATION (cval),
182 ADDR_EXPR, TREE_TYPE (ptr),
183 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
184 ptr,
185 fold_convert (ptr_type_node,
186 TREE_OPERAND (cval, 1))));
188 if (TREE_CODE (cval) == ADDR_EXPR)
190 tree base = NULL_TREE;
191 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
193 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
194 if (base)
195 TREE_OPERAND (cval, 0) = base;
197 else
198 base = get_base_address (TREE_OPERAND (cval, 0));
199 if (!base)
200 return NULL_TREE;
202 if ((TREE_CODE (base) == VAR_DECL
203 || TREE_CODE (base) == FUNCTION_DECL)
204 && !can_refer_decl_in_current_unit_p (base, from_decl))
205 return NULL_TREE;
206 if (TREE_CODE (base) == VAR_DECL)
207 TREE_ADDRESSABLE (base) = 1;
208 else if (TREE_CODE (base) == FUNCTION_DECL)
210 /* Make sure we create a cgraph node for functions we'll reference.
211 They can be non-existent if the reference comes from an entry
212 of an external vtable for example. */
213 cgraph_node::get_create (base);
215 /* Fixup types in global initializers. */
216 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
217 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
219 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
220 cval = fold_convert (TREE_TYPE (orig_cval), cval);
221 return cval;
223 if (TREE_OVERFLOW_P (cval))
224 return drop_tree_overflow (cval);
225 return orig_cval;
228 /* If SYM is a constant variable with known value, return the value.
229 NULL_TREE is returned otherwise. */
231 tree
232 get_symbol_constant_value (tree sym)
234 tree val = ctor_for_folding (sym);
235 if (val != error_mark_node)
237 if (val)
239 val = canonicalize_constructor_val (unshare_expr (val), sym);
240 if (val && is_gimple_min_invariant (val))
241 return val;
242 else
243 return NULL_TREE;
245 /* Variables declared 'const' without an initializer
246 have zero as the initializer if they may not be
247 overridden at link or run time. */
248 if (!val
249 && is_gimple_reg_type (TREE_TYPE (sym)))
250 return build_zero_cst (TREE_TYPE (sym));
253 return NULL_TREE;
258 /* Subroutine of fold_stmt. We perform several simplifications of the
259 memory reference tree EXPR and make sure to re-gimplify them properly
260 after propagation of constant addresses. IS_LHS is true if the
261 reference is supposed to be an lvalue. */
263 static tree
264 maybe_fold_reference (tree expr, bool is_lhs)
266 tree result;
268 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
269 || TREE_CODE (expr) == REALPART_EXPR
270 || TREE_CODE (expr) == IMAGPART_EXPR)
271 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
272 return fold_unary_loc (EXPR_LOCATION (expr),
273 TREE_CODE (expr),
274 TREE_TYPE (expr),
275 TREE_OPERAND (expr, 0));
276 else if (TREE_CODE (expr) == BIT_FIELD_REF
277 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
278 return fold_ternary_loc (EXPR_LOCATION (expr),
279 TREE_CODE (expr),
280 TREE_TYPE (expr),
281 TREE_OPERAND (expr, 0),
282 TREE_OPERAND (expr, 1),
283 TREE_OPERAND (expr, 2));
285 if (!is_lhs
286 && (result = fold_const_aggregate_ref (expr))
287 && is_gimple_min_invariant (result))
288 return result;
290 return NULL_TREE;
294 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
295 replacement rhs for the statement or NULL_TREE if no simplification
296 could be made. It is assumed that the operands have been previously
297 folded. */
299 static tree
300 fold_gimple_assign (gimple_stmt_iterator *si)
302 gimple *stmt = gsi_stmt (*si);
303 enum tree_code subcode = gimple_assign_rhs_code (stmt);
304 location_t loc = gimple_location (stmt);
306 tree result = NULL_TREE;
308 switch (get_gimple_rhs_class (subcode))
310 case GIMPLE_SINGLE_RHS:
312 tree rhs = gimple_assign_rhs1 (stmt);
314 if (TREE_CLOBBER_P (rhs))
315 return NULL_TREE;
317 if (REFERENCE_CLASS_P (rhs))
318 return maybe_fold_reference (rhs, false);
320 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
322 tree val = OBJ_TYPE_REF_EXPR (rhs);
323 if (is_gimple_min_invariant (val))
324 return val;
325 else if (flag_devirtualize && virtual_method_call_p (rhs))
327 bool final;
328 vec <cgraph_node *>targets
329 = possible_polymorphic_call_targets (rhs, stmt, &final);
330 if (final && targets.length () <= 1 && dbg_cnt (devirt))
332 if (dump_enabled_p ())
334 location_t loc = gimple_location_safe (stmt);
335 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
336 "resolving virtual function address "
337 "reference to function %s\n",
338 targets.length () == 1
339 ? targets[0]->name ()
340 : "NULL");
342 if (targets.length () == 1)
344 val = fold_convert (TREE_TYPE (val),
345 build_fold_addr_expr_loc
346 (loc, targets[0]->decl));
347 STRIP_USELESS_TYPE_CONVERSION (val);
349 else
350 /* We can not use __builtin_unreachable here because it
351 can not have address taken. */
352 val = build_int_cst (TREE_TYPE (val), 0);
353 return val;
358 else if (TREE_CODE (rhs) == ADDR_EXPR)
360 tree ref = TREE_OPERAND (rhs, 0);
361 tree tem = maybe_fold_reference (ref, true);
362 if (tem
363 && TREE_CODE (tem) == MEM_REF
364 && integer_zerop (TREE_OPERAND (tem, 1)))
365 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
366 else if (tem)
367 result = fold_convert (TREE_TYPE (rhs),
368 build_fold_addr_expr_loc (loc, tem));
369 else if (TREE_CODE (ref) == MEM_REF
370 && integer_zerop (TREE_OPERAND (ref, 1)))
371 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
374 else if (TREE_CODE (rhs) == CONSTRUCTOR
375 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
376 && (CONSTRUCTOR_NELTS (rhs)
377 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
379 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
380 unsigned i;
381 tree val;
383 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
384 if (TREE_CODE (val) != INTEGER_CST
385 && TREE_CODE (val) != REAL_CST
386 && TREE_CODE (val) != FIXED_CST)
387 return NULL_TREE;
389 return build_vector_from_ctor (TREE_TYPE (rhs),
390 CONSTRUCTOR_ELTS (rhs));
393 else if (DECL_P (rhs))
394 return get_symbol_constant_value (rhs);
396 /* If we couldn't fold the RHS, hand over to the generic
397 fold routines. */
398 if (result == NULL_TREE)
399 result = fold (rhs);
401 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
402 that may have been added by fold, and "useless" type
403 conversions that might now be apparent due to propagation. */
404 STRIP_USELESS_TYPE_CONVERSION (result);
406 if (result != rhs && valid_gimple_rhs_p (result))
407 return result;
409 return NULL_TREE;
411 break;
413 case GIMPLE_UNARY_RHS:
414 break;
416 case GIMPLE_BINARY_RHS:
417 break;
419 case GIMPLE_TERNARY_RHS:
420 result = fold_ternary_loc (loc, subcode,
421 TREE_TYPE (gimple_assign_lhs (stmt)),
422 gimple_assign_rhs1 (stmt),
423 gimple_assign_rhs2 (stmt),
424 gimple_assign_rhs3 (stmt));
426 if (result)
428 STRIP_USELESS_TYPE_CONVERSION (result);
429 if (valid_gimple_rhs_p (result))
430 return result;
432 break;
434 case GIMPLE_INVALID_RHS:
435 gcc_unreachable ();
438 return NULL_TREE;
442 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
443 adjusting the replacement stmts location and virtual operands.
444 If the statement has a lhs the last stmt in the sequence is expected
445 to assign to that lhs. */
447 static void
448 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
450 gimple *stmt = gsi_stmt (*si_p);
452 if (gimple_has_location (stmt))
453 annotate_all_with_location (stmts, gimple_location (stmt));
455 /* First iterate over the replacement statements backward, assigning
456 virtual operands to their defining statements. */
457 gimple *laststore = NULL;
458 for (gimple_stmt_iterator i = gsi_last (stmts);
459 !gsi_end_p (i); gsi_prev (&i))
461 gimple *new_stmt = gsi_stmt (i);
462 if ((gimple_assign_single_p (new_stmt)
463 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
464 || (is_gimple_call (new_stmt)
465 && (gimple_call_flags (new_stmt)
466 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
468 tree vdef;
469 if (!laststore)
470 vdef = gimple_vdef (stmt);
471 else
472 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
473 gimple_set_vdef (new_stmt, vdef);
474 if (vdef && TREE_CODE (vdef) == SSA_NAME)
475 SSA_NAME_DEF_STMT (vdef) = new_stmt;
476 laststore = new_stmt;
480 /* Second iterate over the statements forward, assigning virtual
481 operands to their uses. */
482 tree reaching_vuse = gimple_vuse (stmt);
483 for (gimple_stmt_iterator i = gsi_start (stmts);
484 !gsi_end_p (i); gsi_next (&i))
486 gimple *new_stmt = gsi_stmt (i);
487 /* If the new statement possibly has a VUSE, update it with exact SSA
488 name we know will reach this one. */
489 if (gimple_has_mem_ops (new_stmt))
490 gimple_set_vuse (new_stmt, reaching_vuse);
491 gimple_set_modified (new_stmt, true);
492 if (gimple_vdef (new_stmt))
493 reaching_vuse = gimple_vdef (new_stmt);
496 /* If the new sequence does not do a store release the virtual
497 definition of the original statement. */
498 if (reaching_vuse
499 && reaching_vuse == gimple_vuse (stmt))
501 tree vdef = gimple_vdef (stmt);
502 if (vdef
503 && TREE_CODE (vdef) == SSA_NAME)
505 unlink_stmt_vdef (stmt);
506 release_ssa_name (vdef);
510 /* Finally replace the original statement with the sequence. */
511 gsi_replace_with_seq (si_p, stmts, false);
514 /* Convert EXPR into a GIMPLE value suitable for substitution on the
515 RHS of an assignment. Insert the necessary statements before
516 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
517 is replaced. If the call is expected to produces a result, then it
518 is replaced by an assignment of the new RHS to the result variable.
519 If the result is to be ignored, then the call is replaced by a
520 GIMPLE_NOP. A proper VDEF chain is retained by making the first
521 VUSE and the last VDEF of the whole sequence be the same as the replaced
522 statement and using new SSA names for stores in between. */
524 void
525 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
527 tree lhs;
528 gimple *stmt, *new_stmt;
529 gimple_stmt_iterator i;
530 gimple_seq stmts = NULL;
532 stmt = gsi_stmt (*si_p);
534 gcc_assert (is_gimple_call (stmt));
536 push_gimplify_context (gimple_in_ssa_p (cfun));
538 lhs = gimple_call_lhs (stmt);
539 if (lhs == NULL_TREE)
541 gimplify_and_add (expr, &stmts);
542 /* We can end up with folding a memcpy of an empty class assignment
543 which gets optimized away by C++ gimplification. */
544 if (gimple_seq_empty_p (stmts))
546 pop_gimplify_context (NULL);
547 if (gimple_in_ssa_p (cfun))
549 unlink_stmt_vdef (stmt);
550 release_defs (stmt);
552 gsi_replace (si_p, gimple_build_nop (), false);
553 return;
556 else
558 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
559 new_stmt = gimple_build_assign (lhs, tmp);
560 i = gsi_last (stmts);
561 gsi_insert_after_without_update (&i, new_stmt,
562 GSI_CONTINUE_LINKING);
565 pop_gimplify_context (NULL);
567 gsi_replace_with_seq_vops (si_p, stmts);
571 /* Replace the call at *GSI with the gimple value VAL. */
573 static void
574 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
576 gimple *stmt = gsi_stmt (*gsi);
577 tree lhs = gimple_call_lhs (stmt);
578 gimple *repl;
579 if (lhs)
581 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
582 val = fold_convert (TREE_TYPE (lhs), val);
583 repl = gimple_build_assign (lhs, val);
585 else
586 repl = gimple_build_nop ();
587 tree vdef = gimple_vdef (stmt);
588 if (vdef && TREE_CODE (vdef) == SSA_NAME)
590 unlink_stmt_vdef (stmt);
591 release_ssa_name (vdef);
593 gsi_replace (gsi, repl, false);
596 /* Replace the call at *GSI with the new call REPL and fold that
597 again. */
599 static void
600 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
602 gimple *stmt = gsi_stmt (*gsi);
603 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
604 gimple_set_location (repl, gimple_location (stmt));
605 if (gimple_vdef (stmt)
606 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
608 gimple_set_vdef (repl, gimple_vdef (stmt));
609 gimple_set_vuse (repl, gimple_vuse (stmt));
610 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
612 gsi_replace (gsi, repl, false);
613 fold_stmt (gsi);
616 /* Return true if VAR is a VAR_DECL or a component thereof. */
618 static bool
619 var_decl_component_p (tree var)
621 tree inner = var;
622 while (handled_component_p (inner))
623 inner = TREE_OPERAND (inner, 0);
624 return SSA_VAR_P (inner);
627 /* Fold function call to builtin mem{{,p}cpy,move}. Return
628 false if no simplification can be made.
629 If ENDP is 0, return DEST (like memcpy).
630 If ENDP is 1, return DEST+LEN (like mempcpy).
631 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
632 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
633 (memmove). */
635 static bool
636 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
637 tree dest, tree src, int endp)
639 gimple *stmt = gsi_stmt (*gsi);
640 tree lhs = gimple_call_lhs (stmt);
641 tree len = gimple_call_arg (stmt, 2);
642 tree destvar, srcvar;
643 location_t loc = gimple_location (stmt);
645 /* If the LEN parameter is zero, return DEST. */
646 if (integer_zerop (len))
648 gimple *repl;
649 if (gimple_call_lhs (stmt))
650 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
651 else
652 repl = gimple_build_nop ();
653 tree vdef = gimple_vdef (stmt);
654 if (vdef && TREE_CODE (vdef) == SSA_NAME)
656 unlink_stmt_vdef (stmt);
657 release_ssa_name (vdef);
659 gsi_replace (gsi, repl, false);
660 return true;
663 /* If SRC and DEST are the same (and not volatile), return
664 DEST{,+LEN,+LEN-1}. */
665 if (operand_equal_p (src, dest, 0))
667 unlink_stmt_vdef (stmt);
668 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
669 release_ssa_name (gimple_vdef (stmt));
670 if (!lhs)
672 gsi_replace (gsi, gimple_build_nop (), false);
673 return true;
675 goto done;
677 else
679 tree srctype, desttype;
680 unsigned int src_align, dest_align;
681 tree off0;
683 /* Build accesses at offset zero with a ref-all character type. */
684 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
685 ptr_mode, true), 0);
687 /* If we can perform the copy efficiently with first doing all loads
688 and then all stores inline it that way. Currently efficiently
689 means that we can load all the memory into a single integer
690 register which is what MOVE_MAX gives us. */
691 src_align = get_pointer_alignment (src);
692 dest_align = get_pointer_alignment (dest);
693 if (tree_fits_uhwi_p (len)
694 && compare_tree_int (len, MOVE_MAX) <= 0
695 /* ??? Don't transform copies from strings with known length this
696 confuses the tree-ssa-strlen.c. This doesn't handle
697 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
698 reason. */
699 && !c_strlen (src, 2))
701 unsigned ilen = tree_to_uhwi (len);
702 if (exact_log2 (ilen) != -1)
704 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
705 if (type
706 && TYPE_MODE (type) != BLKmode
707 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
708 == ilen * 8)
709 /* If the destination pointer is not aligned we must be able
710 to emit an unaligned store. */
711 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
712 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
714 tree srctype = type;
715 tree desttype = type;
716 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
717 srctype = build_aligned_type (type, src_align);
718 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
719 tree tem = fold_const_aggregate_ref (srcmem);
720 if (tem)
721 srcmem = tem;
722 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
723 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
724 src_align))
725 srcmem = NULL_TREE;
726 if (srcmem)
728 gimple *new_stmt;
729 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
731 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
732 if (gimple_in_ssa_p (cfun))
733 srcmem = make_ssa_name (TREE_TYPE (srcmem),
734 new_stmt);
735 else
736 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
737 gimple_assign_set_lhs (new_stmt, srcmem);
738 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
739 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
741 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
742 desttype = build_aligned_type (type, dest_align);
743 new_stmt
744 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
745 dest, off0),
746 srcmem);
747 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
748 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
749 if (gimple_vdef (new_stmt)
750 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
751 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
752 if (!lhs)
754 gsi_replace (gsi, new_stmt, false);
755 return true;
757 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
758 goto done;
764 if (endp == 3)
766 /* Both DEST and SRC must be pointer types.
767 ??? This is what old code did. Is the testing for pointer types
768 really mandatory?
770 If either SRC is readonly or length is 1, we can use memcpy. */
771 if (!dest_align || !src_align)
772 return false;
773 if (readonly_data_expr (src)
774 || (tree_fits_uhwi_p (len)
775 && (MIN (src_align, dest_align) / BITS_PER_UNIT
776 >= tree_to_uhwi (len))))
778 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
779 if (!fn)
780 return false;
781 gimple_call_set_fndecl (stmt, fn);
782 gimple_call_set_arg (stmt, 0, dest);
783 gimple_call_set_arg (stmt, 1, src);
784 fold_stmt (gsi);
785 return true;
788 /* If *src and *dest can't overlap, optimize into memcpy as well. */
789 if (TREE_CODE (src) == ADDR_EXPR
790 && TREE_CODE (dest) == ADDR_EXPR)
792 tree src_base, dest_base, fn;
793 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
794 HOST_WIDE_INT size = -1;
795 HOST_WIDE_INT maxsize = -1;
797 srcvar = TREE_OPERAND (src, 0);
798 src_base = get_ref_base_and_extent (srcvar, &src_offset,
799 &size, &maxsize);
800 destvar = TREE_OPERAND (dest, 0);
801 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
802 &size, &maxsize);
803 if (tree_fits_uhwi_p (len))
804 maxsize = tree_to_uhwi (len);
805 else
806 maxsize = -1;
807 src_offset /= BITS_PER_UNIT;
808 dest_offset /= BITS_PER_UNIT;
809 if (SSA_VAR_P (src_base)
810 && SSA_VAR_P (dest_base))
812 if (operand_equal_p (src_base, dest_base, 0)
813 && ranges_overlap_p (src_offset, maxsize,
814 dest_offset, maxsize))
815 return false;
817 else if (TREE_CODE (src_base) == MEM_REF
818 && TREE_CODE (dest_base) == MEM_REF)
820 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
821 TREE_OPERAND (dest_base, 0), 0))
822 return false;
823 offset_int off = mem_ref_offset (src_base) + src_offset;
824 if (!wi::fits_shwi_p (off))
825 return false;
826 src_offset = off.to_shwi ();
828 off = mem_ref_offset (dest_base) + dest_offset;
829 if (!wi::fits_shwi_p (off))
830 return false;
831 dest_offset = off.to_shwi ();
832 if (ranges_overlap_p (src_offset, maxsize,
833 dest_offset, maxsize))
834 return false;
836 else
837 return false;
839 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
840 if (!fn)
841 return false;
842 gimple_call_set_fndecl (stmt, fn);
843 gimple_call_set_arg (stmt, 0, dest);
844 gimple_call_set_arg (stmt, 1, src);
845 fold_stmt (gsi);
846 return true;
849 /* If the destination and source do not alias optimize into
850 memcpy as well. */
851 if ((is_gimple_min_invariant (dest)
852 || TREE_CODE (dest) == SSA_NAME)
853 && (is_gimple_min_invariant (src)
854 || TREE_CODE (src) == SSA_NAME))
856 ao_ref destr, srcr;
857 ao_ref_init_from_ptr_and_size (&destr, dest, len);
858 ao_ref_init_from_ptr_and_size (&srcr, src, len);
859 if (!refs_may_alias_p_1 (&destr, &srcr, false))
861 tree fn;
862 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
863 if (!fn)
864 return false;
865 gimple_call_set_fndecl (stmt, fn);
866 gimple_call_set_arg (stmt, 0, dest);
867 gimple_call_set_arg (stmt, 1, src);
868 fold_stmt (gsi);
869 return true;
873 return false;
876 if (!tree_fits_shwi_p (len))
877 return false;
878 /* FIXME:
879 This logic lose for arguments like (type *)malloc (sizeof (type)),
880 since we strip the casts of up to VOID return value from malloc.
881 Perhaps we ought to inherit type from non-VOID argument here? */
882 STRIP_NOPS (src);
883 STRIP_NOPS (dest);
884 if (!POINTER_TYPE_P (TREE_TYPE (src))
885 || !POINTER_TYPE_P (TREE_TYPE (dest)))
886 return false;
887 /* In the following try to find a type that is most natural to be
888 used for the memcpy source and destination and that allows
889 the most optimization when memcpy is turned into a plain assignment
890 using that type. In theory we could always use a char[len] type
891 but that only gains us that the destination and source possibly
892 no longer will have their address taken. */
893 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
894 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
896 tree tem = TREE_OPERAND (src, 0);
897 STRIP_NOPS (tem);
898 if (tem != TREE_OPERAND (src, 0))
899 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
901 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
903 tree tem = TREE_OPERAND (dest, 0);
904 STRIP_NOPS (tem);
905 if (tem != TREE_OPERAND (dest, 0))
906 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
908 srctype = TREE_TYPE (TREE_TYPE (src));
909 if (TREE_CODE (srctype) == ARRAY_TYPE
910 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
912 srctype = TREE_TYPE (srctype);
913 STRIP_NOPS (src);
914 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
916 desttype = TREE_TYPE (TREE_TYPE (dest));
917 if (TREE_CODE (desttype) == ARRAY_TYPE
918 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
920 desttype = TREE_TYPE (desttype);
921 STRIP_NOPS (dest);
922 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
924 if (TREE_ADDRESSABLE (srctype)
925 || TREE_ADDRESSABLE (desttype))
926 return false;
928 /* Make sure we are not copying using a floating-point mode or
929 a type whose size possibly does not match its precision. */
930 if (FLOAT_MODE_P (TYPE_MODE (desttype))
931 || TREE_CODE (desttype) == BOOLEAN_TYPE
932 || TREE_CODE (desttype) == ENUMERAL_TYPE)
933 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
934 if (FLOAT_MODE_P (TYPE_MODE (srctype))
935 || TREE_CODE (srctype) == BOOLEAN_TYPE
936 || TREE_CODE (srctype) == ENUMERAL_TYPE)
937 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
938 if (!srctype)
939 srctype = desttype;
940 if (!desttype)
941 desttype = srctype;
942 if (!srctype)
943 return false;
945 src_align = get_pointer_alignment (src);
946 dest_align = get_pointer_alignment (dest);
947 if (dest_align < TYPE_ALIGN (desttype)
948 || src_align < TYPE_ALIGN (srctype))
949 return false;
951 destvar = dest;
952 STRIP_NOPS (destvar);
953 if (TREE_CODE (destvar) == ADDR_EXPR
954 && var_decl_component_p (TREE_OPERAND (destvar, 0))
955 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
956 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
957 else
958 destvar = NULL_TREE;
960 srcvar = src;
961 STRIP_NOPS (srcvar);
962 if (TREE_CODE (srcvar) == ADDR_EXPR
963 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
964 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
966 if (!destvar
967 || src_align >= TYPE_ALIGN (desttype))
968 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
969 srcvar, off0);
970 else if (!STRICT_ALIGNMENT)
972 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
973 src_align);
974 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
976 else
977 srcvar = NULL_TREE;
979 else
980 srcvar = NULL_TREE;
982 if (srcvar == NULL_TREE && destvar == NULL_TREE)
983 return false;
985 if (srcvar == NULL_TREE)
987 STRIP_NOPS (src);
988 if (src_align >= TYPE_ALIGN (desttype))
989 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
990 else
992 if (STRICT_ALIGNMENT)
993 return false;
994 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
995 src_align);
996 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
999 else if (destvar == NULL_TREE)
1001 STRIP_NOPS (dest);
1002 if (dest_align >= TYPE_ALIGN (srctype))
1003 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1004 else
1006 if (STRICT_ALIGNMENT)
1007 return false;
1008 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1009 dest_align);
1010 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1014 gimple *new_stmt;
1015 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1017 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1018 if (gimple_in_ssa_p (cfun))
1019 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1020 else
1021 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1022 gimple_assign_set_lhs (new_stmt, srcvar);
1023 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1024 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1026 new_stmt = gimple_build_assign (destvar, srcvar);
1027 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1028 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1029 if (gimple_vdef (new_stmt)
1030 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1031 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1032 if (!lhs)
1034 gsi_replace (gsi, new_stmt, false);
1035 return true;
1037 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1040 done:
1041 if (endp == 0 || endp == 3)
1042 len = NULL_TREE;
1043 else if (endp == 2)
1044 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1045 ssize_int (1));
1046 if (endp == 2 || endp == 1)
1047 dest = fold_build_pointer_plus_loc (loc, dest, len);
1049 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1050 GSI_SAME_STMT);
1051 gimple *repl = gimple_build_assign (lhs, dest);
1052 gsi_replace (gsi, repl, false);
1053 return true;
1056 /* Fold function call to builtin memset or bzero at *GSI setting the
1057 memory of size LEN to VAL. Return whether a simplification was made. */
1059 static bool
1060 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1062 gimple *stmt = gsi_stmt (*gsi);
1063 tree etype;
1064 unsigned HOST_WIDE_INT length, cval;
1066 /* If the LEN parameter is zero, return DEST. */
1067 if (integer_zerop (len))
1069 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1070 return true;
1073 if (! tree_fits_uhwi_p (len))
1074 return false;
1076 if (TREE_CODE (c) != INTEGER_CST)
1077 return false;
1079 tree dest = gimple_call_arg (stmt, 0);
1080 tree var = dest;
1081 if (TREE_CODE (var) != ADDR_EXPR)
1082 return false;
1084 var = TREE_OPERAND (var, 0);
1085 if (TREE_THIS_VOLATILE (var))
1086 return false;
1088 etype = TREE_TYPE (var);
1089 if (TREE_CODE (etype) == ARRAY_TYPE)
1090 etype = TREE_TYPE (etype);
1092 if (!INTEGRAL_TYPE_P (etype)
1093 && !POINTER_TYPE_P (etype))
1094 return NULL_TREE;
1096 if (! var_decl_component_p (var))
1097 return NULL_TREE;
1099 length = tree_to_uhwi (len);
1100 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1101 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1102 return NULL_TREE;
1104 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1105 return NULL_TREE;
1107 if (integer_zerop (c))
1108 cval = 0;
1109 else
1111 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1112 return NULL_TREE;
1114 cval = TREE_INT_CST_LOW (c);
1115 cval &= 0xff;
1116 cval |= cval << 8;
1117 cval |= cval << 16;
1118 cval |= (cval << 31) << 1;
1121 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1122 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1123 gimple_set_vuse (store, gimple_vuse (stmt));
1124 tree vdef = gimple_vdef (stmt);
1125 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1127 gimple_set_vdef (store, gimple_vdef (stmt));
1128 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1130 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1131 if (gimple_call_lhs (stmt))
1133 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1134 gsi_replace (gsi, asgn, false);
1136 else
1138 gimple_stmt_iterator gsi2 = *gsi;
1139 gsi_prev (gsi);
1140 gsi_remove (&gsi2, true);
1143 return true;
1147 /* Return the string length, maximum string length or maximum value of
1148 ARG in LENGTH.
1149 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1150 is not NULL and, for TYPE == 0, its value is not equal to the length
1151 we determine or if we are unable to determine the length or value,
1152 return false. VISITED is a bitmap of visited variables.
1153 TYPE is 0 if string length should be returned, 1 for maximum string
1154 length and 2 for maximum value ARG can have. */
1156 static bool
1157 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1159 tree var, val;
1160 gimple *def_stmt;
1162 if (TREE_CODE (arg) != SSA_NAME)
1164 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1165 if (TREE_CODE (arg) == ADDR_EXPR
1166 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1167 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1169 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1170 if (TREE_CODE (aop0) == INDIRECT_REF
1171 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1172 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1173 length, visited, type);
1176 if (type == 2)
1178 val = arg;
1179 if (TREE_CODE (val) != INTEGER_CST
1180 || tree_int_cst_sgn (val) < 0)
1181 return false;
1183 else
1184 val = c_strlen (arg, 1);
1185 if (!val)
1186 return false;
1188 if (*length)
1190 if (type > 0)
1192 if (TREE_CODE (*length) != INTEGER_CST
1193 || TREE_CODE (val) != INTEGER_CST)
1194 return false;
1196 if (tree_int_cst_lt (*length, val))
1197 *length = val;
1198 return true;
1200 else if (simple_cst_equal (val, *length) != 1)
1201 return false;
1204 *length = val;
1205 return true;
1208 /* If ARG is registered for SSA update we cannot look at its defining
1209 statement. */
1210 if (name_registered_for_update_p (arg))
1211 return false;
1213 /* If we were already here, break the infinite cycle. */
1214 if (!*visited)
1215 *visited = BITMAP_ALLOC (NULL);
1216 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1217 return true;
1219 var = arg;
1220 def_stmt = SSA_NAME_DEF_STMT (var);
1222 switch (gimple_code (def_stmt))
1224 case GIMPLE_ASSIGN:
1225 /* The RHS of the statement defining VAR must either have a
1226 constant length or come from another SSA_NAME with a constant
1227 length. */
1228 if (gimple_assign_single_p (def_stmt)
1229 || gimple_assign_unary_nop_p (def_stmt))
1231 tree rhs = gimple_assign_rhs1 (def_stmt);
1232 return get_maxval_strlen (rhs, length, visited, type);
1234 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1236 tree op2 = gimple_assign_rhs2 (def_stmt);
1237 tree op3 = gimple_assign_rhs3 (def_stmt);
1238 return get_maxval_strlen (op2, length, visited, type)
1239 && get_maxval_strlen (op3, length, visited, type);
1241 return false;
1243 case GIMPLE_PHI:
1245 /* All the arguments of the PHI node must have the same constant
1246 length. */
1247 unsigned i;
1249 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1251 tree arg = gimple_phi_arg (def_stmt, i)->def;
1253 /* If this PHI has itself as an argument, we cannot
1254 determine the string length of this argument. However,
1255 if we can find a constant string length for the other
1256 PHI args then we can still be sure that this is a
1257 constant string length. So be optimistic and just
1258 continue with the next argument. */
1259 if (arg == gimple_phi_result (def_stmt))
1260 continue;
1262 if (!get_maxval_strlen (arg, length, visited, type))
1263 return false;
1266 return true;
1268 default:
1269 return false;
1273 tree
1274 get_maxval_strlen (tree arg, int type)
1276 bitmap visited = NULL;
1277 tree len = NULL_TREE;
1278 if (!get_maxval_strlen (arg, &len, &visited, type))
1279 len = NULL_TREE;
1280 if (visited)
1281 BITMAP_FREE (visited);
1283 return len;
1287 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1288 If LEN is not NULL, it represents the length of the string to be
1289 copied. Return NULL_TREE if no simplification can be made. */
1291 static bool
1292 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1293 tree dest, tree src)
1295 location_t loc = gimple_location (gsi_stmt (*gsi));
1296 tree fn;
1298 /* If SRC and DEST are the same (and not volatile), return DEST. */
1299 if (operand_equal_p (src, dest, 0))
1301 replace_call_with_value (gsi, dest);
1302 return true;
1305 if (optimize_function_for_size_p (cfun))
1306 return false;
1308 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1309 if (!fn)
1310 return false;
1312 tree len = get_maxval_strlen (src, 0);
1313 if (!len)
1314 return false;
1316 len = fold_convert_loc (loc, size_type_node, len);
1317 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1318 len = force_gimple_operand_gsi (gsi, len, true,
1319 NULL_TREE, true, GSI_SAME_STMT);
1320 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1321 replace_call_with_call_and_fold (gsi, repl);
1322 return true;
1325 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1326 If SLEN is not NULL, it represents the length of the source string.
1327 Return NULL_TREE if no simplification can be made. */
1329 static bool
1330 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1331 tree dest, tree src, tree len)
1333 location_t loc = gimple_location (gsi_stmt (*gsi));
1334 tree fn;
1336 /* If the LEN parameter is zero, return DEST. */
1337 if (integer_zerop (len))
1339 replace_call_with_value (gsi, dest);
1340 return true;
1343 /* We can't compare slen with len as constants below if len is not a
1344 constant. */
1345 if (TREE_CODE (len) != INTEGER_CST)
1346 return false;
1348 /* Now, we must be passed a constant src ptr parameter. */
1349 tree slen = get_maxval_strlen (src, 0);
1350 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1351 return false;
1353 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1355 /* We do not support simplification of this case, though we do
1356 support it when expanding trees into RTL. */
1357 /* FIXME: generate a call to __builtin_memset. */
1358 if (tree_int_cst_lt (slen, len))
1359 return false;
1361 /* OK transform into builtin memcpy. */
1362 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1363 if (!fn)
1364 return false;
1366 len = fold_convert_loc (loc, size_type_node, len);
1367 len = force_gimple_operand_gsi (gsi, len, true,
1368 NULL_TREE, true, GSI_SAME_STMT);
1369 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1370 replace_call_with_call_and_fold (gsi, repl);
1371 return true;
1374 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1375 to the call.
1377 Return NULL_TREE if no simplification was possible, otherwise return the
1378 simplified form of the call as a tree.
1380 The simplified form may be a constant or other expression which
1381 computes the same value, but in a more efficient manner (including
1382 calls to other builtin functions).
1384 The call may contain arguments which need to be evaluated, but
1385 which are not useful to determine the result of the call. In
1386 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1387 COMPOUND_EXPR will be an argument which must be evaluated.
1388 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1389 COMPOUND_EXPR in the chain will contain the tree for the simplified
1390 form of the builtin function call. */
1392 static bool
1393 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1395 gimple *stmt = gsi_stmt (*gsi);
1396 location_t loc = gimple_location (stmt);
1398 const char *p = c_getstr (src);
1400 /* If the string length is zero, return the dst parameter. */
1401 if (p && *p == '\0')
1403 replace_call_with_value (gsi, dst);
1404 return true;
1407 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1408 return false;
1410 /* See if we can store by pieces into (dst + strlen(dst)). */
1411 tree newdst;
1412 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1413 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1415 if (!strlen_fn || !memcpy_fn)
1416 return false;
1418 /* If the length of the source string isn't computable don't
1419 split strcat into strlen and memcpy. */
1420 tree len = get_maxval_strlen (src, 0);
1421 if (! len)
1422 return false;
1424 /* Create strlen (dst). */
1425 gimple_seq stmts = NULL, stmts2;
1426 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1427 gimple_set_location (repl, loc);
1428 if (gimple_in_ssa_p (cfun))
1429 newdst = make_ssa_name (size_type_node);
1430 else
1431 newdst = create_tmp_reg (size_type_node);
1432 gimple_call_set_lhs (repl, newdst);
1433 gimple_seq_add_stmt_without_update (&stmts, repl);
1435 /* Create (dst p+ strlen (dst)). */
1436 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1437 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1438 gimple_seq_add_seq_without_update (&stmts, stmts2);
1440 len = fold_convert_loc (loc, size_type_node, len);
1441 len = size_binop_loc (loc, PLUS_EXPR, len,
1442 build_int_cst (size_type_node, 1));
1443 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1444 gimple_seq_add_seq_without_update (&stmts, stmts2);
1446 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1447 gimple_seq_add_stmt_without_update (&stmts, repl);
1448 if (gimple_call_lhs (stmt))
1450 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1451 gimple_seq_add_stmt_without_update (&stmts, repl);
1452 gsi_replace_with_seq_vops (gsi, stmts);
1453 /* gsi now points at the assignment to the lhs, get a
1454 stmt iterator to the memcpy call.
1455 ??? We can't use gsi_for_stmt as that doesn't work when the
1456 CFG isn't built yet. */
1457 gimple_stmt_iterator gsi2 = *gsi;
1458 gsi_prev (&gsi2);
1459 fold_stmt (&gsi2);
1461 else
1463 gsi_replace_with_seq_vops (gsi, stmts);
1464 fold_stmt (gsi);
1466 return true;
1469 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1470 are the arguments to the call. */
1472 static bool
1473 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1475 gimple *stmt = gsi_stmt (*gsi);
1476 tree dest = gimple_call_arg (stmt, 0);
1477 tree src = gimple_call_arg (stmt, 1);
1478 tree size = gimple_call_arg (stmt, 2);
1479 tree fn;
1480 const char *p;
1483 p = c_getstr (src);
1484 /* If the SRC parameter is "", return DEST. */
1485 if (p && *p == '\0')
1487 replace_call_with_value (gsi, dest);
1488 return true;
1491 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1492 return false;
1494 /* If __builtin_strcat_chk is used, assume strcat is available. */
1495 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1496 if (!fn)
1497 return false;
1499 gimple *repl = gimple_build_call (fn, 2, dest, src);
1500 replace_call_with_call_and_fold (gsi, repl);
1501 return true;
1504 /* Simplify a call to the strncat builtin. */
1506 static bool
1507 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1509 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1510 tree dst = gimple_call_arg (stmt, 0);
1511 tree src = gimple_call_arg (stmt, 1);
1512 tree len = gimple_call_arg (stmt, 2);
1514 const char *p = c_getstr (src);
1516 /* If the requested length is zero, or the src parameter string
1517 length is zero, return the dst parameter. */
1518 if (integer_zerop (len) || (p && *p == '\0'))
1520 replace_call_with_value (gsi, dst);
1521 return true;
1524 /* If the requested len is greater than or equal to the string
1525 length, call strcat. */
1526 if (TREE_CODE (len) == INTEGER_CST && p
1527 && compare_tree_int (len, strlen (p)) >= 0)
1529 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1531 /* If the replacement _DECL isn't initialized, don't do the
1532 transformation. */
1533 if (!fn)
1534 return false;
1536 gcall *repl = gimple_build_call (fn, 2, dst, src);
1537 replace_call_with_call_and_fold (gsi, repl);
1538 return true;
1541 return false;
1544 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1545 LEN, and SIZE. */
1547 static bool
1548 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1550 gimple *stmt = gsi_stmt (*gsi);
1551 tree dest = gimple_call_arg (stmt, 0);
1552 tree src = gimple_call_arg (stmt, 1);
1553 tree len = gimple_call_arg (stmt, 2);
1554 tree size = gimple_call_arg (stmt, 3);
1555 tree fn;
1556 const char *p;
1558 p = c_getstr (src);
1559 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1560 if ((p && *p == '\0')
1561 || integer_zerop (len))
1563 replace_call_with_value (gsi, dest);
1564 return true;
1567 if (! tree_fits_uhwi_p (size))
1568 return false;
1570 if (! integer_all_onesp (size))
1572 tree src_len = c_strlen (src, 1);
1573 if (src_len
1574 && tree_fits_uhwi_p (src_len)
1575 && tree_fits_uhwi_p (len)
1576 && ! tree_int_cst_lt (len, src_len))
1578 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1579 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1580 if (!fn)
1581 return false;
1583 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1584 replace_call_with_call_and_fold (gsi, repl);
1585 return true;
1587 return false;
1590 /* If __builtin_strncat_chk is used, assume strncat is available. */
1591 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1592 if (!fn)
1593 return false;
1595 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1596 replace_call_with_call_and_fold (gsi, repl);
1597 return true;
1600 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1601 to the call. IGNORE is true if the value returned
1602 by the builtin will be ignored. UNLOCKED is true is true if this
1603 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1604 the known length of the string. Return NULL_TREE if no simplification
1605 was possible. */
1607 static bool
1608 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1609 tree arg0, tree arg1,
1610 bool unlocked)
1612 gimple *stmt = gsi_stmt (*gsi);
1614 /* If we're using an unlocked function, assume the other unlocked
1615 functions exist explicitly. */
1616 tree const fn_fputc = (unlocked
1617 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1618 : builtin_decl_implicit (BUILT_IN_FPUTC));
1619 tree const fn_fwrite = (unlocked
1620 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1621 : builtin_decl_implicit (BUILT_IN_FWRITE));
1623 /* If the return value is used, don't do the transformation. */
1624 if (gimple_call_lhs (stmt))
1625 return false;
1627 /* Get the length of the string passed to fputs. If the length
1628 can't be determined, punt. */
1629 tree len = get_maxval_strlen (arg0, 0);
1630 if (!len
1631 || TREE_CODE (len) != INTEGER_CST)
1632 return false;
1634 switch (compare_tree_int (len, 1))
1636 case -1: /* length is 0, delete the call entirely . */
1637 replace_call_with_value (gsi, integer_zero_node);
1638 return true;
1640 case 0: /* length is 1, call fputc. */
1642 const char *p = c_getstr (arg0);
1643 if (p != NULL)
1645 if (!fn_fputc)
1646 return false;
1648 gimple *repl = gimple_build_call (fn_fputc, 2,
1649 build_int_cst
1650 (integer_type_node, p[0]), arg1);
1651 replace_call_with_call_and_fold (gsi, repl);
1652 return true;
1655 /* FALLTHROUGH */
1656 case 1: /* length is greater than 1, call fwrite. */
1658 /* If optimizing for size keep fputs. */
1659 if (optimize_function_for_size_p (cfun))
1660 return false;
1661 /* New argument list transforming fputs(string, stream) to
1662 fwrite(string, 1, len, stream). */
1663 if (!fn_fwrite)
1664 return false;
1666 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
1667 size_one_node, len, arg1);
1668 replace_call_with_call_and_fold (gsi, repl);
1669 return true;
1671 default:
1672 gcc_unreachable ();
1674 return false;
1677 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1678 DEST, SRC, LEN, and SIZE are the arguments to the call.
1679 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1680 code of the builtin. If MAXLEN is not NULL, it is maximum length
1681 passed as third argument. */
1683 static bool
1684 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1685 tree dest, tree src, tree len, tree size,
1686 enum built_in_function fcode)
1688 gimple *stmt = gsi_stmt (*gsi);
1689 location_t loc = gimple_location (stmt);
1690 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1691 tree fn;
1693 /* If SRC and DEST are the same (and not volatile), return DEST
1694 (resp. DEST+LEN for __mempcpy_chk). */
1695 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1697 if (fcode != BUILT_IN_MEMPCPY_CHK)
1699 replace_call_with_value (gsi, dest);
1700 return true;
1702 else
1704 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1705 temp = force_gimple_operand_gsi (gsi, temp,
1706 false, NULL_TREE, true,
1707 GSI_SAME_STMT);
1708 replace_call_with_value (gsi, temp);
1709 return true;
1713 if (! tree_fits_uhwi_p (size))
1714 return false;
1716 tree maxlen = get_maxval_strlen (len, 2);
1717 if (! integer_all_onesp (size))
1719 if (! tree_fits_uhwi_p (len))
1721 /* If LEN is not constant, try MAXLEN too.
1722 For MAXLEN only allow optimizing into non-_ocs function
1723 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1724 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1726 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1728 /* (void) __mempcpy_chk () can be optimized into
1729 (void) __memcpy_chk (). */
1730 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1731 if (!fn)
1732 return false;
1734 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1735 replace_call_with_call_and_fold (gsi, repl);
1736 return true;
1738 return false;
1741 else
1742 maxlen = len;
1744 if (tree_int_cst_lt (size, maxlen))
1745 return false;
1748 fn = NULL_TREE;
1749 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1750 mem{cpy,pcpy,move,set} is available. */
1751 switch (fcode)
1753 case BUILT_IN_MEMCPY_CHK:
1754 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1755 break;
1756 case BUILT_IN_MEMPCPY_CHK:
1757 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1758 break;
1759 case BUILT_IN_MEMMOVE_CHK:
1760 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1761 break;
1762 case BUILT_IN_MEMSET_CHK:
1763 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1764 break;
1765 default:
1766 break;
1769 if (!fn)
1770 return false;
1772 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1773 replace_call_with_call_and_fold (gsi, repl);
1774 return true;
1777 /* Fold a call to the __st[rp]cpy_chk builtin.
1778 DEST, SRC, and SIZE are the arguments to the call.
1779 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1780 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1781 strings passed as second argument. */
1783 static bool
1784 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1785 tree dest,
1786 tree src, tree size,
1787 enum built_in_function fcode)
1789 gimple *stmt = gsi_stmt (*gsi);
1790 location_t loc = gimple_location (stmt);
1791 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1792 tree len, fn;
1794 /* If SRC and DEST are the same (and not volatile), return DEST. */
1795 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1797 replace_call_with_value (gsi, dest);
1798 return true;
1801 if (! tree_fits_uhwi_p (size))
1802 return false;
1804 tree maxlen = get_maxval_strlen (src, 1);
1805 if (! integer_all_onesp (size))
1807 len = c_strlen (src, 1);
1808 if (! len || ! tree_fits_uhwi_p (len))
1810 /* If LEN is not constant, try MAXLEN too.
1811 For MAXLEN only allow optimizing into non-_ocs function
1812 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1813 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1815 if (fcode == BUILT_IN_STPCPY_CHK)
1817 if (! ignore)
1818 return false;
1820 /* If return value of __stpcpy_chk is ignored,
1821 optimize into __strcpy_chk. */
1822 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1823 if (!fn)
1824 return false;
1826 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1827 replace_call_with_call_and_fold (gsi, repl);
1828 return true;
1831 if (! len || TREE_SIDE_EFFECTS (len))
1832 return false;
1834 /* If c_strlen returned something, but not a constant,
1835 transform __strcpy_chk into __memcpy_chk. */
1836 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1837 if (!fn)
1838 return false;
1840 len = fold_convert_loc (loc, size_type_node, len);
1841 len = size_binop_loc (loc, PLUS_EXPR, len,
1842 build_int_cst (size_type_node, 1));
1843 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1844 true, GSI_SAME_STMT);
1845 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1846 replace_call_with_call_and_fold (gsi, repl);
1847 return true;
1850 else
1851 maxlen = len;
1853 if (! tree_int_cst_lt (maxlen, size))
1854 return false;
1857 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1858 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1859 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1860 if (!fn)
1861 return false;
1863 gimple *repl = gimple_build_call (fn, 2, dest, src);
1864 replace_call_with_call_and_fold (gsi, repl);
1865 return true;
1868 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1869 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1870 length passed as third argument. IGNORE is true if return value can be
1871 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1873 static bool
1874 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1875 tree dest, tree src,
1876 tree len, tree size,
1877 enum built_in_function fcode)
1879 gimple *stmt = gsi_stmt (*gsi);
1880 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1881 tree fn;
1883 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1885 /* If return value of __stpncpy_chk is ignored,
1886 optimize into __strncpy_chk. */
1887 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1888 if (fn)
1890 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1891 replace_call_with_call_and_fold (gsi, repl);
1892 return true;
1896 if (! tree_fits_uhwi_p (size))
1897 return false;
1899 tree maxlen = get_maxval_strlen (len, 2);
1900 if (! integer_all_onesp (size))
1902 if (! tree_fits_uhwi_p (len))
1904 /* If LEN is not constant, try MAXLEN too.
1905 For MAXLEN only allow optimizing into non-_ocs function
1906 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1907 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1908 return false;
1910 else
1911 maxlen = len;
1913 if (tree_int_cst_lt (size, maxlen))
1914 return false;
1917 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1918 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1919 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1920 if (!fn)
1921 return false;
1923 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1924 replace_call_with_call_and_fold (gsi, repl);
1925 return true;
1928 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
1929 Return NULL_TREE if no simplification can be made. */
1931 static bool
1932 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
1934 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1935 location_t loc = gimple_location (stmt);
1936 tree dest = gimple_call_arg (stmt, 0);
1937 tree src = gimple_call_arg (stmt, 1);
1938 tree fn, len, lenp1;
1940 /* If the result is unused, replace stpcpy with strcpy. */
1941 if (gimple_call_lhs (stmt) == NULL_TREE)
1943 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
1944 if (!fn)
1945 return false;
1946 gimple_call_set_fndecl (stmt, fn);
1947 fold_stmt (gsi);
1948 return true;
1951 len = c_strlen (src, 1);
1952 if (!len
1953 || TREE_CODE (len) != INTEGER_CST)
1954 return false;
1956 if (optimize_function_for_size_p (cfun)
1957 /* If length is zero it's small enough. */
1958 && !integer_zerop (len))
1959 return false;
1961 /* If the source has a known length replace stpcpy with memcpy. */
1962 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1963 if (!fn)
1964 return false;
1966 gimple_seq stmts = NULL;
1967 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
1968 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
1969 tem, build_int_cst (size_type_node, 1));
1970 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1971 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
1972 gimple_set_vuse (repl, gimple_vuse (stmt));
1973 gimple_set_vdef (repl, gimple_vdef (stmt));
1974 if (gimple_vdef (repl)
1975 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
1976 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
1977 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
1978 /* Replace the result with dest + len. */
1979 stmts = NULL;
1980 tem = gimple_convert (&stmts, loc, sizetype, len);
1981 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1982 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
1983 POINTER_PLUS_EXPR, dest, tem);
1984 gsi_replace (gsi, ret, false);
1985 /* Finally fold the memcpy call. */
1986 gimple_stmt_iterator gsi2 = *gsi;
1987 gsi_prev (&gsi2);
1988 fold_stmt (&gsi2);
1989 return true;
1992 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
1993 NULL_TREE if a normal call should be emitted rather than expanding
1994 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
1995 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
1996 passed as second argument. */
1998 static bool
1999 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2000 enum built_in_function fcode)
2002 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2003 tree dest, size, len, fn, fmt, flag;
2004 const char *fmt_str;
2006 /* Verify the required arguments in the original call. */
2007 if (gimple_call_num_args (stmt) < 5)
2008 return false;
2010 dest = gimple_call_arg (stmt, 0);
2011 len = gimple_call_arg (stmt, 1);
2012 flag = gimple_call_arg (stmt, 2);
2013 size = gimple_call_arg (stmt, 3);
2014 fmt = gimple_call_arg (stmt, 4);
2016 if (! tree_fits_uhwi_p (size))
2017 return false;
2019 if (! integer_all_onesp (size))
2021 tree maxlen = get_maxval_strlen (len, 2);
2022 if (! tree_fits_uhwi_p (len))
2024 /* If LEN is not constant, try MAXLEN too.
2025 For MAXLEN only allow optimizing into non-_ocs function
2026 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2027 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2028 return false;
2030 else
2031 maxlen = len;
2033 if (tree_int_cst_lt (size, maxlen))
2034 return false;
2037 if (!init_target_chars ())
2038 return false;
2040 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2041 or if format doesn't contain % chars or is "%s". */
2042 if (! integer_zerop (flag))
2044 fmt_str = c_getstr (fmt);
2045 if (fmt_str == NULL)
2046 return false;
2047 if (strchr (fmt_str, target_percent) != NULL
2048 && strcmp (fmt_str, target_percent_s))
2049 return false;
2052 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2053 available. */
2054 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2055 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2056 if (!fn)
2057 return false;
2059 /* Replace the called function and the first 5 argument by 3 retaining
2060 trailing varargs. */
2061 gimple_call_set_fndecl (stmt, fn);
2062 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2063 gimple_call_set_arg (stmt, 0, dest);
2064 gimple_call_set_arg (stmt, 1, len);
2065 gimple_call_set_arg (stmt, 2, fmt);
2066 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2067 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2068 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2069 fold_stmt (gsi);
2070 return true;
2073 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2074 Return NULL_TREE if a normal call should be emitted rather than
2075 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2076 or BUILT_IN_VSPRINTF_CHK. */
2078 static bool
2079 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2080 enum built_in_function fcode)
2082 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2083 tree dest, size, len, fn, fmt, flag;
2084 const char *fmt_str;
2085 unsigned nargs = gimple_call_num_args (stmt);
2087 /* Verify the required arguments in the original call. */
2088 if (nargs < 4)
2089 return false;
2090 dest = gimple_call_arg (stmt, 0);
2091 flag = gimple_call_arg (stmt, 1);
2092 size = gimple_call_arg (stmt, 2);
2093 fmt = gimple_call_arg (stmt, 3);
2095 if (! tree_fits_uhwi_p (size))
2096 return false;
2098 len = NULL_TREE;
2100 if (!init_target_chars ())
2101 return false;
2103 /* Check whether the format is a literal string constant. */
2104 fmt_str = c_getstr (fmt);
2105 if (fmt_str != NULL)
2107 /* If the format doesn't contain % args or %%, we know the size. */
2108 if (strchr (fmt_str, target_percent) == 0)
2110 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2111 len = build_int_cstu (size_type_node, strlen (fmt_str));
2113 /* If the format is "%s" and first ... argument is a string literal,
2114 we know the size too. */
2115 else if (fcode == BUILT_IN_SPRINTF_CHK
2116 && strcmp (fmt_str, target_percent_s) == 0)
2118 tree arg;
2120 if (nargs == 5)
2122 arg = gimple_call_arg (stmt, 4);
2123 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2125 len = c_strlen (arg, 1);
2126 if (! len || ! tree_fits_uhwi_p (len))
2127 len = NULL_TREE;
2133 if (! integer_all_onesp (size))
2135 if (! len || ! tree_int_cst_lt (len, size))
2136 return false;
2139 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2140 or if format doesn't contain % chars or is "%s". */
2141 if (! integer_zerop (flag))
2143 if (fmt_str == NULL)
2144 return false;
2145 if (strchr (fmt_str, target_percent) != NULL
2146 && strcmp (fmt_str, target_percent_s))
2147 return false;
2150 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2151 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2152 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2153 if (!fn)
2154 return false;
2156 /* Replace the called function and the first 4 argument by 2 retaining
2157 trailing varargs. */
2158 gimple_call_set_fndecl (stmt, fn);
2159 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2160 gimple_call_set_arg (stmt, 0, dest);
2161 gimple_call_set_arg (stmt, 1, fmt);
2162 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2163 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2164 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2165 fold_stmt (gsi);
2166 return true;
2169 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2170 ORIG may be null if this is a 2-argument call. We don't attempt to
2171 simplify calls with more than 3 arguments.
2173 Return NULL_TREE if no simplification was possible, otherwise return the
2174 simplified form of the call as a tree. If IGNORED is true, it means that
2175 the caller does not use the returned value of the function. */
2177 static bool
2178 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2180 gimple *stmt = gsi_stmt (*gsi);
2181 tree dest = gimple_call_arg (stmt, 0);
2182 tree fmt = gimple_call_arg (stmt, 1);
2183 tree orig = NULL_TREE;
2184 const char *fmt_str = NULL;
2186 /* Verify the required arguments in the original call. We deal with two
2187 types of sprintf() calls: 'sprintf (str, fmt)' and
2188 'sprintf (dest, "%s", orig)'. */
2189 if (gimple_call_num_args (stmt) > 3)
2190 return false;
2192 if (gimple_call_num_args (stmt) == 3)
2193 orig = gimple_call_arg (stmt, 2);
2195 /* Check whether the format is a literal string constant. */
2196 fmt_str = c_getstr (fmt);
2197 if (fmt_str == NULL)
2198 return false;
2200 if (!init_target_chars ())
2201 return false;
2203 /* If the format doesn't contain % args or %%, use strcpy. */
2204 if (strchr (fmt_str, target_percent) == NULL)
2206 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2208 if (!fn)
2209 return false;
2211 /* Don't optimize sprintf (buf, "abc", ptr++). */
2212 if (orig)
2213 return false;
2215 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2216 'format' is known to contain no % formats. */
2217 gimple_seq stmts = NULL;
2218 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2219 gimple_seq_add_stmt_without_update (&stmts, repl);
2220 if (gimple_call_lhs (stmt))
2222 repl = gimple_build_assign (gimple_call_lhs (stmt),
2223 build_int_cst (integer_type_node,
2224 strlen (fmt_str)));
2225 gimple_seq_add_stmt_without_update (&stmts, repl);
2226 gsi_replace_with_seq_vops (gsi, stmts);
2227 /* gsi now points at the assignment to the lhs, get a
2228 stmt iterator to the memcpy call.
2229 ??? We can't use gsi_for_stmt as that doesn't work when the
2230 CFG isn't built yet. */
2231 gimple_stmt_iterator gsi2 = *gsi;
2232 gsi_prev (&gsi2);
2233 fold_stmt (&gsi2);
2235 else
2237 gsi_replace_with_seq_vops (gsi, stmts);
2238 fold_stmt (gsi);
2240 return true;
2243 /* If the format is "%s", use strcpy if the result isn't used. */
2244 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2246 tree fn;
2247 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2249 if (!fn)
2250 return false;
2252 /* Don't crash on sprintf (str1, "%s"). */
2253 if (!orig)
2254 return false;
2256 tree orig_len = NULL_TREE;
2257 if (gimple_call_lhs (stmt))
2259 orig_len = get_maxval_strlen (orig, 0);
2260 if (!orig_len)
2261 return false;
2264 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2265 gimple_seq stmts = NULL;
2266 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2267 gimple_seq_add_stmt_without_update (&stmts, repl);
2268 if (gimple_call_lhs (stmt))
2270 if (!useless_type_conversion_p (integer_type_node,
2271 TREE_TYPE (orig_len)))
2272 orig_len = fold_convert (integer_type_node, orig_len);
2273 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2274 gimple_seq_add_stmt_without_update (&stmts, repl);
2275 gsi_replace_with_seq_vops (gsi, stmts);
2276 /* gsi now points at the assignment to the lhs, get a
2277 stmt iterator to the memcpy call.
2278 ??? We can't use gsi_for_stmt as that doesn't work when the
2279 CFG isn't built yet. */
2280 gimple_stmt_iterator gsi2 = *gsi;
2281 gsi_prev (&gsi2);
2282 fold_stmt (&gsi2);
2284 else
2286 gsi_replace_with_seq_vops (gsi, stmts);
2287 fold_stmt (gsi);
2289 return true;
2291 return false;
2294 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2295 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2296 attempt to simplify calls with more than 4 arguments.
2298 Return NULL_TREE if no simplification was possible, otherwise return the
2299 simplified form of the call as a tree. If IGNORED is true, it means that
2300 the caller does not use the returned value of the function. */
2302 static bool
2303 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2305 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2306 tree dest = gimple_call_arg (stmt, 0);
2307 tree destsize = gimple_call_arg (stmt, 1);
2308 tree fmt = gimple_call_arg (stmt, 2);
2309 tree orig = NULL_TREE;
2310 const char *fmt_str = NULL;
2312 if (gimple_call_num_args (stmt) > 4)
2313 return false;
2315 if (gimple_call_num_args (stmt) == 4)
2316 orig = gimple_call_arg (stmt, 3);
2318 if (!tree_fits_uhwi_p (destsize))
2319 return false;
2320 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2322 /* Check whether the format is a literal string constant. */
2323 fmt_str = c_getstr (fmt);
2324 if (fmt_str == NULL)
2325 return false;
2327 if (!init_target_chars ())
2328 return false;
2330 /* If the format doesn't contain % args or %%, use strcpy. */
2331 if (strchr (fmt_str, target_percent) == NULL)
2333 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2334 if (!fn)
2335 return false;
2337 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2338 if (orig)
2339 return false;
2341 /* We could expand this as
2342 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2343 or to
2344 memcpy (str, fmt_with_nul_at_cstm1, cst);
2345 but in the former case that might increase code size
2346 and in the latter case grow .rodata section too much.
2347 So punt for now. */
2348 size_t len = strlen (fmt_str);
2349 if (len >= destlen)
2350 return false;
2352 gimple_seq stmts = NULL;
2353 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2354 gimple_seq_add_stmt_without_update (&stmts, repl);
2355 if (gimple_call_lhs (stmt))
2357 repl = gimple_build_assign (gimple_call_lhs (stmt),
2358 build_int_cst (integer_type_node, len));
2359 gimple_seq_add_stmt_without_update (&stmts, repl);
2360 gsi_replace_with_seq_vops (gsi, stmts);
2361 /* gsi now points at the assignment to the lhs, get a
2362 stmt iterator to the memcpy call.
2363 ??? We can't use gsi_for_stmt as that doesn't work when the
2364 CFG isn't built yet. */
2365 gimple_stmt_iterator gsi2 = *gsi;
2366 gsi_prev (&gsi2);
2367 fold_stmt (&gsi2);
2369 else
2371 gsi_replace_with_seq_vops (gsi, stmts);
2372 fold_stmt (gsi);
2374 return true;
2377 /* If the format is "%s", use strcpy if the result isn't used. */
2378 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2380 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2381 if (!fn)
2382 return false;
2384 /* Don't crash on snprintf (str1, cst, "%s"). */
2385 if (!orig)
2386 return false;
2388 tree orig_len = get_maxval_strlen (orig, 0);
2389 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2390 return false;
2392 /* We could expand this as
2393 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2394 or to
2395 memcpy (str1, str2_with_nul_at_cstm1, cst);
2396 but in the former case that might increase code size
2397 and in the latter case grow .rodata section too much.
2398 So punt for now. */
2399 if (compare_tree_int (orig_len, destlen) >= 0)
2400 return false;
2402 /* Convert snprintf (str1, cst, "%s", str2) into
2403 strcpy (str1, str2) if strlen (str2) < cst. */
2404 gimple_seq stmts = NULL;
2405 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2406 gimple_seq_add_stmt_without_update (&stmts, repl);
2407 if (gimple_call_lhs (stmt))
2409 if (!useless_type_conversion_p (integer_type_node,
2410 TREE_TYPE (orig_len)))
2411 orig_len = fold_convert (integer_type_node, orig_len);
2412 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2413 gimple_seq_add_stmt_without_update (&stmts, repl);
2414 gsi_replace_with_seq_vops (gsi, stmts);
2415 /* gsi now points at the assignment to the lhs, get a
2416 stmt iterator to the memcpy call.
2417 ??? We can't use gsi_for_stmt as that doesn't work when the
2418 CFG isn't built yet. */
2419 gimple_stmt_iterator gsi2 = *gsi;
2420 gsi_prev (&gsi2);
2421 fold_stmt (&gsi2);
2423 else
2425 gsi_replace_with_seq_vops (gsi, stmts);
2426 fold_stmt (gsi);
2428 return true;
2430 return false;
2433 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2434 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2435 more than 3 arguments, and ARG may be null in the 2-argument case.
2437 Return NULL_TREE if no simplification was possible, otherwise return the
2438 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2439 code of the function to be simplified. */
2441 static bool
2442 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2443 tree fp, tree fmt, tree arg,
2444 enum built_in_function fcode)
2446 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2447 tree fn_fputc, fn_fputs;
2448 const char *fmt_str = NULL;
2450 /* If the return value is used, don't do the transformation. */
2451 if (gimple_call_lhs (stmt) != NULL_TREE)
2452 return false;
2454 /* Check whether the format is a literal string constant. */
2455 fmt_str = c_getstr (fmt);
2456 if (fmt_str == NULL)
2457 return false;
2459 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2461 /* If we're using an unlocked function, assume the other
2462 unlocked functions exist explicitly. */
2463 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2464 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2466 else
2468 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2469 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2472 if (!init_target_chars ())
2473 return false;
2475 /* If the format doesn't contain % args or %%, use strcpy. */
2476 if (strchr (fmt_str, target_percent) == NULL)
2478 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2479 && arg)
2480 return false;
2482 /* If the format specifier was "", fprintf does nothing. */
2483 if (fmt_str[0] == '\0')
2485 replace_call_with_value (gsi, NULL_TREE);
2486 return true;
2489 /* When "string" doesn't contain %, replace all cases of
2490 fprintf (fp, string) with fputs (string, fp). The fputs
2491 builtin will take care of special cases like length == 1. */
2492 if (fn_fputs)
2494 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2495 replace_call_with_call_and_fold (gsi, repl);
2496 return true;
2500 /* The other optimizations can be done only on the non-va_list variants. */
2501 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2502 return false;
2504 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2505 else if (strcmp (fmt_str, target_percent_s) == 0)
2507 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2508 return false;
2509 if (fn_fputs)
2511 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2512 replace_call_with_call_and_fold (gsi, repl);
2513 return true;
2517 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2518 else if (strcmp (fmt_str, target_percent_c) == 0)
2520 if (!arg
2521 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2522 return false;
2523 if (fn_fputc)
2525 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2526 replace_call_with_call_and_fold (gsi, repl);
2527 return true;
2531 return false;
2534 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2535 FMT and ARG are the arguments to the call; we don't fold cases with
2536 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2538 Return NULL_TREE if no simplification was possible, otherwise return the
2539 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2540 code of the function to be simplified. */
2542 static bool
2543 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2544 tree arg, enum built_in_function fcode)
2546 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2547 tree fn_putchar, fn_puts, newarg;
2548 const char *fmt_str = NULL;
2550 /* If the return value is used, don't do the transformation. */
2551 if (gimple_call_lhs (stmt) != NULL_TREE)
2552 return false;
2554 /* Check whether the format is a literal string constant. */
2555 fmt_str = c_getstr (fmt);
2556 if (fmt_str == NULL)
2557 return false;
2559 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2561 /* If we're using an unlocked function, assume the other
2562 unlocked functions exist explicitly. */
2563 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2564 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2566 else
2568 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2569 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2572 if (!init_target_chars ())
2573 return false;
2575 if (strcmp (fmt_str, target_percent_s) == 0
2576 || strchr (fmt_str, target_percent) == NULL)
2578 const char *str;
2580 if (strcmp (fmt_str, target_percent_s) == 0)
2582 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2583 return false;
2585 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2586 return false;
2588 str = c_getstr (arg);
2589 if (str == NULL)
2590 return false;
2592 else
2594 /* The format specifier doesn't contain any '%' characters. */
2595 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2596 && arg)
2597 return false;
2598 str = fmt_str;
2601 /* If the string was "", printf does nothing. */
2602 if (str[0] == '\0')
2604 replace_call_with_value (gsi, NULL_TREE);
2605 return true;
2608 /* If the string has length of 1, call putchar. */
2609 if (str[1] == '\0')
2611 /* Given printf("c"), (where c is any one character,)
2612 convert "c"[0] to an int and pass that to the replacement
2613 function. */
2614 newarg = build_int_cst (integer_type_node, str[0]);
2615 if (fn_putchar)
2617 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2618 replace_call_with_call_and_fold (gsi, repl);
2619 return true;
2622 else
2624 /* If the string was "string\n", call puts("string"). */
2625 size_t len = strlen (str);
2626 if ((unsigned char)str[len - 1] == target_newline
2627 && (size_t) (int) len == len
2628 && (int) len > 0)
2630 char *newstr;
2631 tree offset_node, string_cst;
2633 /* Create a NUL-terminated string that's one char shorter
2634 than the original, stripping off the trailing '\n'. */
2635 newarg = build_string_literal (len, str);
2636 string_cst = string_constant (newarg, &offset_node);
2637 gcc_checking_assert (string_cst
2638 && (TREE_STRING_LENGTH (string_cst)
2639 == (int) len)
2640 && integer_zerop (offset_node)
2641 && (unsigned char)
2642 TREE_STRING_POINTER (string_cst)[len - 1]
2643 == target_newline);
2644 /* build_string_literal creates a new STRING_CST,
2645 modify it in place to avoid double copying. */
2646 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2647 newstr[len - 1] = '\0';
2648 if (fn_puts)
2650 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2651 replace_call_with_call_and_fold (gsi, repl);
2652 return true;
2655 else
2656 /* We'd like to arrange to call fputs(string,stdout) here,
2657 but we need stdout and don't have a way to get it yet. */
2658 return false;
2662 /* The other optimizations can be done only on the non-va_list variants. */
2663 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2664 return false;
2666 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2667 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2669 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2670 return false;
2671 if (fn_puts)
2673 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2674 replace_call_with_call_and_fold (gsi, repl);
2675 return true;
2679 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2680 else if (strcmp (fmt_str, target_percent_c) == 0)
2682 if (!arg || ! useless_type_conversion_p (integer_type_node,
2683 TREE_TYPE (arg)))
2684 return false;
2685 if (fn_putchar)
2687 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2688 replace_call_with_call_and_fold (gsi, repl);
2689 return true;
2693 return false;
2698 /* Fold a call to __builtin_strlen with known length LEN. */
2700 static bool
2701 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2703 gimple *stmt = gsi_stmt (*gsi);
2704 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2705 if (!len)
2706 return false;
2707 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2708 replace_call_with_value (gsi, len);
2709 return true;
2712 /* Fold a call to __builtin_acc_on_device. */
2714 static bool
2715 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2717 /* Defer folding until we know which compiler we're in. */
2718 if (symtab->state != EXPANSION)
2719 return false;
2721 unsigned val_host = GOMP_DEVICE_HOST;
2722 unsigned val_dev = GOMP_DEVICE_NONE;
2724 #ifdef ACCEL_COMPILER
2725 val_host = GOMP_DEVICE_NOT_HOST;
2726 val_dev = ACCEL_COMPILER_acc_device;
2727 #endif
2729 location_t loc = gimple_location (gsi_stmt (*gsi));
2731 tree host_eq = make_ssa_name (boolean_type_node);
2732 gimple *host_ass = gimple_build_assign
2733 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2734 gimple_set_location (host_ass, loc);
2735 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2737 tree dev_eq = make_ssa_name (boolean_type_node);
2738 gimple *dev_ass = gimple_build_assign
2739 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2740 gimple_set_location (dev_ass, loc);
2741 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2743 tree result = make_ssa_name (boolean_type_node);
2744 gimple *result_ass = gimple_build_assign
2745 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2746 gimple_set_location (result_ass, loc);
2747 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2749 replace_call_with_value (gsi, result);
2751 return true;
2754 /* Fold the non-target builtin at *GSI and return whether any simplification
2755 was made. */
2757 static bool
2758 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2760 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2761 tree callee = gimple_call_fndecl (stmt);
2763 /* Give up for always_inline inline builtins until they are
2764 inlined. */
2765 if (avoid_folding_inline_builtin (callee))
2766 return false;
2768 unsigned n = gimple_call_num_args (stmt);
2769 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2770 switch (fcode)
2772 case BUILT_IN_BZERO:
2773 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2774 gimple_call_arg (stmt, 1));
2775 case BUILT_IN_MEMSET:
2776 return gimple_fold_builtin_memset (gsi,
2777 gimple_call_arg (stmt, 1),
2778 gimple_call_arg (stmt, 2));
2779 case BUILT_IN_BCOPY:
2780 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2781 gimple_call_arg (stmt, 0), 3);
2782 case BUILT_IN_MEMCPY:
2783 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2784 gimple_call_arg (stmt, 1), 0);
2785 case BUILT_IN_MEMPCPY:
2786 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2787 gimple_call_arg (stmt, 1), 1);
2788 case BUILT_IN_MEMMOVE:
2789 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2790 gimple_call_arg (stmt, 1), 3);
2791 case BUILT_IN_SPRINTF_CHK:
2792 case BUILT_IN_VSPRINTF_CHK:
2793 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2794 case BUILT_IN_STRCAT_CHK:
2795 return gimple_fold_builtin_strcat_chk (gsi);
2796 case BUILT_IN_STRNCAT_CHK:
2797 return gimple_fold_builtin_strncat_chk (gsi);
2798 case BUILT_IN_STRLEN:
2799 return gimple_fold_builtin_strlen (gsi);
2800 case BUILT_IN_STRCPY:
2801 return gimple_fold_builtin_strcpy (gsi,
2802 gimple_call_arg (stmt, 0),
2803 gimple_call_arg (stmt, 1));
2804 case BUILT_IN_STRNCPY:
2805 return gimple_fold_builtin_strncpy (gsi,
2806 gimple_call_arg (stmt, 0),
2807 gimple_call_arg (stmt, 1),
2808 gimple_call_arg (stmt, 2));
2809 case BUILT_IN_STRCAT:
2810 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2811 gimple_call_arg (stmt, 1));
2812 case BUILT_IN_STRNCAT:
2813 return gimple_fold_builtin_strncat (gsi);
2814 case BUILT_IN_FPUTS:
2815 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2816 gimple_call_arg (stmt, 1), false);
2817 case BUILT_IN_FPUTS_UNLOCKED:
2818 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2819 gimple_call_arg (stmt, 1), true);
2820 case BUILT_IN_MEMCPY_CHK:
2821 case BUILT_IN_MEMPCPY_CHK:
2822 case BUILT_IN_MEMMOVE_CHK:
2823 case BUILT_IN_MEMSET_CHK:
2824 return gimple_fold_builtin_memory_chk (gsi,
2825 gimple_call_arg (stmt, 0),
2826 gimple_call_arg (stmt, 1),
2827 gimple_call_arg (stmt, 2),
2828 gimple_call_arg (stmt, 3),
2829 fcode);
2830 case BUILT_IN_STPCPY:
2831 return gimple_fold_builtin_stpcpy (gsi);
2832 case BUILT_IN_STRCPY_CHK:
2833 case BUILT_IN_STPCPY_CHK:
2834 return gimple_fold_builtin_stxcpy_chk (gsi,
2835 gimple_call_arg (stmt, 0),
2836 gimple_call_arg (stmt, 1),
2837 gimple_call_arg (stmt, 2),
2838 fcode);
2839 case BUILT_IN_STRNCPY_CHK:
2840 case BUILT_IN_STPNCPY_CHK:
2841 return gimple_fold_builtin_stxncpy_chk (gsi,
2842 gimple_call_arg (stmt, 0),
2843 gimple_call_arg (stmt, 1),
2844 gimple_call_arg (stmt, 2),
2845 gimple_call_arg (stmt, 3),
2846 fcode);
2847 case BUILT_IN_SNPRINTF_CHK:
2848 case BUILT_IN_VSNPRINTF_CHK:
2849 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2850 case BUILT_IN_SNPRINTF:
2851 return gimple_fold_builtin_snprintf (gsi);
2852 case BUILT_IN_SPRINTF:
2853 return gimple_fold_builtin_sprintf (gsi);
2854 case BUILT_IN_FPRINTF:
2855 case BUILT_IN_FPRINTF_UNLOCKED:
2856 case BUILT_IN_VFPRINTF:
2857 if (n == 2 || n == 3)
2858 return gimple_fold_builtin_fprintf (gsi,
2859 gimple_call_arg (stmt, 0),
2860 gimple_call_arg (stmt, 1),
2861 n == 3
2862 ? gimple_call_arg (stmt, 2)
2863 : NULL_TREE,
2864 fcode);
2865 break;
2866 case BUILT_IN_FPRINTF_CHK:
2867 case BUILT_IN_VFPRINTF_CHK:
2868 if (n == 3 || n == 4)
2869 return gimple_fold_builtin_fprintf (gsi,
2870 gimple_call_arg (stmt, 0),
2871 gimple_call_arg (stmt, 2),
2872 n == 4
2873 ? gimple_call_arg (stmt, 3)
2874 : NULL_TREE,
2875 fcode);
2876 break;
2877 case BUILT_IN_PRINTF:
2878 case BUILT_IN_PRINTF_UNLOCKED:
2879 case BUILT_IN_VPRINTF:
2880 if (n == 1 || n == 2)
2881 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2882 n == 2
2883 ? gimple_call_arg (stmt, 1)
2884 : NULL_TREE, fcode);
2885 break;
2886 case BUILT_IN_PRINTF_CHK:
2887 case BUILT_IN_VPRINTF_CHK:
2888 if (n == 2 || n == 3)
2889 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2890 n == 3
2891 ? gimple_call_arg (stmt, 2)
2892 : NULL_TREE, fcode);
2893 case BUILT_IN_ACC_ON_DEVICE:
2894 return gimple_fold_builtin_acc_on_device (gsi,
2895 gimple_call_arg (stmt, 0));
2896 default:;
2899 /* Try the generic builtin folder. */
2900 bool ignore = (gimple_call_lhs (stmt) == NULL);
2901 tree result = fold_call_stmt (stmt, ignore);
2902 if (result)
2904 if (ignore)
2905 STRIP_NOPS (result);
2906 else
2907 result = fold_convert (gimple_call_return_type (stmt), result);
2908 if (!update_call_from_tree (gsi, result))
2909 gimplify_and_update_call_from_tree (gsi, result);
2910 return true;
2913 return false;
2916 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2917 doesn't fit into TYPE. The test for overflow should be regardless of
2918 -fwrapv, and even for unsigned types. */
2920 bool
2921 arith_overflowed_p (enum tree_code code, const_tree type,
2922 const_tree arg0, const_tree arg1)
2924 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2925 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2926 widest2_int_cst;
2927 widest2_int warg0 = widest2_int_cst (arg0);
2928 widest2_int warg1 = widest2_int_cst (arg1);
2929 widest2_int wres;
2930 switch (code)
2932 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2933 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2934 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2935 default: gcc_unreachable ();
2937 signop sign = TYPE_SIGN (type);
2938 if (sign == UNSIGNED && wi::neg_p (wres))
2939 return true;
2940 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2943 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2944 The statement may be replaced by another statement, e.g., if the call
2945 simplifies to a constant value. Return true if any changes were made.
2946 It is assumed that the operands have been previously folded. */
2948 static bool
2949 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
2951 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2952 tree callee;
2953 bool changed = false;
2954 unsigned i;
2956 /* Fold *& in call arguments. */
2957 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2958 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2960 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2961 if (tmp)
2963 gimple_call_set_arg (stmt, i, tmp);
2964 changed = true;
2968 /* Check for virtual calls that became direct calls. */
2969 callee = gimple_call_fn (stmt);
2970 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
2972 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
2974 if (dump_file && virtual_method_call_p (callee)
2975 && !possible_polymorphic_call_target_p
2976 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
2977 (OBJ_TYPE_REF_EXPR (callee)))))
2979 fprintf (dump_file,
2980 "Type inheritance inconsistent devirtualization of ");
2981 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2982 fprintf (dump_file, " to ");
2983 print_generic_expr (dump_file, callee, TDF_SLIM);
2984 fprintf (dump_file, "\n");
2987 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
2988 changed = true;
2990 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
2992 bool final;
2993 vec <cgraph_node *>targets
2994 = possible_polymorphic_call_targets (callee, stmt, &final);
2995 if (final && targets.length () <= 1 && dbg_cnt (devirt))
2997 tree lhs = gimple_call_lhs (stmt);
2998 if (dump_enabled_p ())
3000 location_t loc = gimple_location_safe (stmt);
3001 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3002 "folding virtual function call to %s\n",
3003 targets.length () == 1
3004 ? targets[0]->name ()
3005 : "__builtin_unreachable");
3007 if (targets.length () == 1)
3009 gimple_call_set_fndecl (stmt, targets[0]->decl);
3010 changed = true;
3011 /* If the call becomes noreturn, remove the lhs. */
3012 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3014 if (TREE_CODE (lhs) == SSA_NAME)
3016 tree var = create_tmp_var (TREE_TYPE (lhs));
3017 tree def = get_or_create_ssa_default_def (cfun, var);
3018 gimple *new_stmt = gimple_build_assign (lhs, def);
3019 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3021 gimple_call_set_lhs (stmt, NULL_TREE);
3023 maybe_remove_unused_call_args (cfun, stmt);
3025 else
3027 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3028 gimple *new_stmt = gimple_build_call (fndecl, 0);
3029 gimple_set_location (new_stmt, gimple_location (stmt));
3030 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3032 tree var = create_tmp_var (TREE_TYPE (lhs));
3033 tree def = get_or_create_ssa_default_def (cfun, var);
3035 /* To satisfy condition for
3036 cgraph_update_edges_for_call_stmt_node,
3037 we need to preserve GIMPLE_CALL statement
3038 at position of GSI iterator. */
3039 update_call_from_tree (gsi, def);
3040 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3042 else
3044 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3045 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3046 gsi_replace (gsi, new_stmt, false);
3048 return true;
3054 /* Check for indirect calls that became direct calls, and then
3055 no longer require a static chain. */
3056 if (gimple_call_chain (stmt))
3058 tree fn = gimple_call_fndecl (stmt);
3059 if (fn && !DECL_STATIC_CHAIN (fn))
3061 gimple_call_set_chain (stmt, NULL);
3062 changed = true;
3064 else
3066 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3067 if (tmp)
3069 gimple_call_set_chain (stmt, tmp);
3070 changed = true;
3075 if (inplace)
3076 return changed;
3078 /* Check for builtins that CCP can handle using information not
3079 available in the generic fold routines. */
3080 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3082 if (gimple_fold_builtin (gsi))
3083 changed = true;
3085 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3087 changed |= targetm.gimple_fold_builtin (gsi);
3089 else if (gimple_call_internal_p (stmt))
3091 enum tree_code subcode = ERROR_MARK;
3092 tree result = NULL_TREE;
3093 bool cplx_result = false;
3094 tree overflow = NULL_TREE;
3095 switch (gimple_call_internal_fn (stmt))
3097 case IFN_BUILTIN_EXPECT:
3098 result = fold_builtin_expect (gimple_location (stmt),
3099 gimple_call_arg (stmt, 0),
3100 gimple_call_arg (stmt, 1),
3101 gimple_call_arg (stmt, 2));
3102 break;
3103 case IFN_UBSAN_OBJECT_SIZE:
3104 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3105 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3106 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3107 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3108 gimple_call_arg (stmt, 2))))
3110 gsi_replace (gsi, gimple_build_nop (), false);
3111 unlink_stmt_vdef (stmt);
3112 release_defs (stmt);
3113 return true;
3115 break;
3116 case IFN_UBSAN_CHECK_ADD:
3117 subcode = PLUS_EXPR;
3118 break;
3119 case IFN_UBSAN_CHECK_SUB:
3120 subcode = MINUS_EXPR;
3121 break;
3122 case IFN_UBSAN_CHECK_MUL:
3123 subcode = MULT_EXPR;
3124 break;
3125 case IFN_ADD_OVERFLOW:
3126 subcode = PLUS_EXPR;
3127 cplx_result = true;
3128 break;
3129 case IFN_SUB_OVERFLOW:
3130 subcode = MINUS_EXPR;
3131 cplx_result = true;
3132 break;
3133 case IFN_MUL_OVERFLOW:
3134 subcode = MULT_EXPR;
3135 cplx_result = true;
3136 break;
3137 default:
3138 break;
3140 if (subcode != ERROR_MARK)
3142 tree arg0 = gimple_call_arg (stmt, 0);
3143 tree arg1 = gimple_call_arg (stmt, 1);
3144 tree type = TREE_TYPE (arg0);
3145 if (cplx_result)
3147 tree lhs = gimple_call_lhs (stmt);
3148 if (lhs == NULL_TREE)
3149 type = NULL_TREE;
3150 else
3151 type = TREE_TYPE (TREE_TYPE (lhs));
3153 if (type == NULL_TREE)
3155 /* x = y + 0; x = y - 0; x = y * 0; */
3156 else if (integer_zerop (arg1))
3157 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3158 /* x = 0 + y; x = 0 * y; */
3159 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3160 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3161 /* x = y - y; */
3162 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3163 result = integer_zero_node;
3164 /* x = y * 1; x = 1 * y; */
3165 else if (subcode == MULT_EXPR && integer_onep (arg1))
3166 result = arg0;
3167 else if (subcode == MULT_EXPR && integer_onep (arg0))
3168 result = arg1;
3169 else if (TREE_CODE (arg0) == INTEGER_CST
3170 && TREE_CODE (arg1) == INTEGER_CST)
3172 if (cplx_result)
3173 result = int_const_binop (subcode, fold_convert (type, arg0),
3174 fold_convert (type, arg1));
3175 else
3176 result = int_const_binop (subcode, arg0, arg1);
3177 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3179 if (cplx_result)
3180 overflow = build_one_cst (type);
3181 else
3182 result = NULL_TREE;
3185 if (result)
3187 if (result == integer_zero_node)
3188 result = build_zero_cst (type);
3189 else if (cplx_result && TREE_TYPE (result) != type)
3191 if (TREE_CODE (result) == INTEGER_CST)
3193 if (arith_overflowed_p (PLUS_EXPR, type, result,
3194 integer_zero_node))
3195 overflow = build_one_cst (type);
3197 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3198 && TYPE_UNSIGNED (type))
3199 || (TYPE_PRECISION (type)
3200 < (TYPE_PRECISION (TREE_TYPE (result))
3201 + (TYPE_UNSIGNED (TREE_TYPE (result))
3202 && !TYPE_UNSIGNED (type)))))
3203 result = NULL_TREE;
3204 if (result)
3205 result = fold_convert (type, result);
3210 if (result)
3212 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3213 result = drop_tree_overflow (result);
3214 if (cplx_result)
3216 if (overflow == NULL_TREE)
3217 overflow = build_zero_cst (TREE_TYPE (result));
3218 tree ctype = build_complex_type (TREE_TYPE (result));
3219 if (TREE_CODE (result) == INTEGER_CST
3220 && TREE_CODE (overflow) == INTEGER_CST)
3221 result = build_complex (ctype, result, overflow);
3222 else
3223 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3224 ctype, result, overflow);
3226 if (!update_call_from_tree (gsi, result))
3227 gimplify_and_update_call_from_tree (gsi, result);
3228 changed = true;
3232 return changed;
3236 /* Return true whether NAME has a use on STMT. */
3238 static bool
3239 has_use_on_stmt (tree name, gimple *stmt)
3241 imm_use_iterator iter;
3242 use_operand_p use_p;
3243 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3244 if (USE_STMT (use_p) == stmt)
3245 return true;
3246 return false;
3249 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3250 gimple_simplify.
3252 Replaces *GSI with the simplification result in RCODE and OPS
3253 and the associated statements in *SEQ. Does the replacement
3254 according to INPLACE and returns true if the operation succeeded. */
3256 static bool
3257 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3258 code_helper rcode, tree *ops,
3259 gimple_seq *seq, bool inplace)
3261 gimple *stmt = gsi_stmt (*gsi);
3263 /* Play safe and do not allow abnormals to be mentioned in
3264 newly created statements. See also maybe_push_res_to_seq.
3265 As an exception allow such uses if there was a use of the
3266 same SSA name on the old stmt. */
3267 if ((TREE_CODE (ops[0]) == SSA_NAME
3268 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3269 && !has_use_on_stmt (ops[0], stmt))
3270 || (ops[1]
3271 && TREE_CODE (ops[1]) == SSA_NAME
3272 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3273 && !has_use_on_stmt (ops[1], stmt))
3274 || (ops[2]
3275 && TREE_CODE (ops[2]) == SSA_NAME
3276 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3277 && !has_use_on_stmt (ops[2], stmt)))
3278 return false;
3280 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3282 gcc_assert (rcode.is_tree_code ());
3283 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3284 /* GIMPLE_CONDs condition may not throw. */
3285 && (!flag_exceptions
3286 || !cfun->can_throw_non_call_exceptions
3287 || !operation_could_trap_p (rcode,
3288 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3289 false, NULL_TREE)))
3290 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3291 else if (rcode == SSA_NAME)
3292 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3293 build_zero_cst (TREE_TYPE (ops[0])));
3294 else if (rcode == INTEGER_CST)
3296 if (integer_zerop (ops[0]))
3297 gimple_cond_make_false (cond_stmt);
3298 else
3299 gimple_cond_make_true (cond_stmt);
3301 else if (!inplace)
3303 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3304 ops, seq);
3305 if (!res)
3306 return false;
3307 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3308 build_zero_cst (TREE_TYPE (res)));
3310 else
3311 return false;
3312 if (dump_file && (dump_flags & TDF_DETAILS))
3314 fprintf (dump_file, "gimple_simplified to ");
3315 if (!gimple_seq_empty_p (*seq))
3316 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3317 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3318 0, TDF_SLIM);
3320 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3321 return true;
3323 else if (is_gimple_assign (stmt)
3324 && rcode.is_tree_code ())
3326 if (!inplace
3327 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3329 maybe_build_generic_op (rcode,
3330 TREE_TYPE (gimple_assign_lhs (stmt)),
3331 &ops[0], ops[1], ops[2]);
3332 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3333 if (dump_file && (dump_flags & TDF_DETAILS))
3335 fprintf (dump_file, "gimple_simplified to ");
3336 if (!gimple_seq_empty_p (*seq))
3337 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3338 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3339 0, TDF_SLIM);
3341 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3342 return true;
3345 else if (rcode.is_fn_code ()
3346 && gimple_call_builtin_p (stmt, rcode))
3348 unsigned i;
3349 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3351 gcc_assert (ops[i] != NULL_TREE);
3352 gimple_call_set_arg (stmt, i, ops[i]);
3354 if (i < 3)
3355 gcc_assert (ops[i] == NULL_TREE);
3356 gcc_assert (gimple_seq_empty_p (*seq));
3357 return true;
3359 else if (!inplace)
3361 if (gimple_has_lhs (stmt))
3363 tree lhs = gimple_get_lhs (stmt);
3364 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3365 ops, seq, lhs))
3366 return false;
3367 if (dump_file && (dump_flags & TDF_DETAILS))
3369 fprintf (dump_file, "gimple_simplified to ");
3370 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3372 gsi_replace_with_seq_vops (gsi, *seq);
3373 return true;
3375 else
3376 gcc_unreachable ();
3379 return false;
3382 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3384 static bool
3385 maybe_canonicalize_mem_ref_addr (tree *t)
3387 bool res = false;
3389 if (TREE_CODE (*t) == ADDR_EXPR)
3390 t = &TREE_OPERAND (*t, 0);
3392 while (handled_component_p (*t))
3393 t = &TREE_OPERAND (*t, 0);
3395 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3396 of invariant addresses into a SSA name MEM_REF address. */
3397 if (TREE_CODE (*t) == MEM_REF
3398 || TREE_CODE (*t) == TARGET_MEM_REF)
3400 tree addr = TREE_OPERAND (*t, 0);
3401 if (TREE_CODE (addr) == ADDR_EXPR
3402 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3403 || handled_component_p (TREE_OPERAND (addr, 0))))
3405 tree base;
3406 HOST_WIDE_INT coffset;
3407 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3408 &coffset);
3409 if (!base)
3410 gcc_unreachable ();
3412 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3413 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3414 TREE_OPERAND (*t, 1),
3415 size_int (coffset));
3416 res = true;
3418 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3419 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3422 /* Canonicalize back MEM_REFs to plain reference trees if the object
3423 accessed is a decl that has the same access semantics as the MEM_REF. */
3424 if (TREE_CODE (*t) == MEM_REF
3425 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3426 && integer_zerop (TREE_OPERAND (*t, 1))
3427 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3429 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3430 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3431 if (/* Same volatile qualification. */
3432 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3433 /* Same TBAA behavior with -fstrict-aliasing. */
3434 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3435 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3436 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3437 /* Same alignment. */
3438 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3439 /* We have to look out here to not drop a required conversion
3440 from the rhs to the lhs if *t appears on the lhs or vice-versa
3441 if it appears on the rhs. Thus require strict type
3442 compatibility. */
3443 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3445 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3446 res = true;
3450 /* Canonicalize TARGET_MEM_REF in particular with respect to
3451 the indexes becoming constant. */
3452 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3454 tree tem = maybe_fold_tmr (*t);
3455 if (tem)
3457 *t = tem;
3458 res = true;
3462 return res;
3465 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3466 distinguishes both cases. */
3468 static bool
3469 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3471 bool changed = false;
3472 gimple *stmt = gsi_stmt (*gsi);
3473 unsigned i;
3475 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3476 after propagation.
3477 ??? This shouldn't be done in generic folding but in the
3478 propagation helpers which also know whether an address was
3479 propagated.
3480 Also canonicalize operand order. */
3481 switch (gimple_code (stmt))
3483 case GIMPLE_ASSIGN:
3484 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3486 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3487 if ((REFERENCE_CLASS_P (*rhs)
3488 || TREE_CODE (*rhs) == ADDR_EXPR)
3489 && maybe_canonicalize_mem_ref_addr (rhs))
3490 changed = true;
3491 tree *lhs = gimple_assign_lhs_ptr (stmt);
3492 if (REFERENCE_CLASS_P (*lhs)
3493 && maybe_canonicalize_mem_ref_addr (lhs))
3494 changed = true;
3496 else
3498 /* Canonicalize operand order. */
3499 enum tree_code code = gimple_assign_rhs_code (stmt);
3500 if (TREE_CODE_CLASS (code) == tcc_comparison
3501 || commutative_tree_code (code)
3502 || commutative_ternary_tree_code (code))
3504 tree rhs1 = gimple_assign_rhs1 (stmt);
3505 tree rhs2 = gimple_assign_rhs2 (stmt);
3506 if (tree_swap_operands_p (rhs1, rhs2, false))
3508 gimple_assign_set_rhs1 (stmt, rhs2);
3509 gimple_assign_set_rhs2 (stmt, rhs1);
3510 if (TREE_CODE_CLASS (code) == tcc_comparison)
3511 gimple_assign_set_rhs_code (stmt,
3512 swap_tree_comparison (code));
3513 changed = true;
3517 break;
3518 case GIMPLE_CALL:
3520 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3522 tree *arg = gimple_call_arg_ptr (stmt, i);
3523 if (REFERENCE_CLASS_P (*arg)
3524 && maybe_canonicalize_mem_ref_addr (arg))
3525 changed = true;
3527 tree *lhs = gimple_call_lhs_ptr (stmt);
3528 if (*lhs
3529 && REFERENCE_CLASS_P (*lhs)
3530 && maybe_canonicalize_mem_ref_addr (lhs))
3531 changed = true;
3532 break;
3534 case GIMPLE_ASM:
3536 gasm *asm_stmt = as_a <gasm *> (stmt);
3537 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3539 tree link = gimple_asm_output_op (asm_stmt, i);
3540 tree op = TREE_VALUE (link);
3541 if (REFERENCE_CLASS_P (op)
3542 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3543 changed = true;
3545 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3547 tree link = gimple_asm_input_op (asm_stmt, i);
3548 tree op = TREE_VALUE (link);
3549 if ((REFERENCE_CLASS_P (op)
3550 || TREE_CODE (op) == ADDR_EXPR)
3551 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3552 changed = true;
3555 break;
3556 case GIMPLE_DEBUG:
3557 if (gimple_debug_bind_p (stmt))
3559 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3560 if (*val
3561 && (REFERENCE_CLASS_P (*val)
3562 || TREE_CODE (*val) == ADDR_EXPR)
3563 && maybe_canonicalize_mem_ref_addr (val))
3564 changed = true;
3566 break;
3567 case GIMPLE_COND:
3569 /* Canonicalize operand order. */
3570 tree lhs = gimple_cond_lhs (stmt);
3571 tree rhs = gimple_cond_rhs (stmt);
3572 if (tree_swap_operands_p (lhs, rhs, false))
3574 gcond *gc = as_a <gcond *> (stmt);
3575 gimple_cond_set_lhs (gc, rhs);
3576 gimple_cond_set_rhs (gc, lhs);
3577 gimple_cond_set_code (gc,
3578 swap_tree_comparison (gimple_cond_code (gc)));
3579 changed = true;
3582 default:;
3585 /* Dispatch to pattern-based folding. */
3586 if (!inplace
3587 || is_gimple_assign (stmt)
3588 || gimple_code (stmt) == GIMPLE_COND)
3590 gimple_seq seq = NULL;
3591 code_helper rcode;
3592 tree ops[3] = {};
3593 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3594 valueize, valueize))
3596 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3597 changed = true;
3598 else
3599 gimple_seq_discard (seq);
3603 stmt = gsi_stmt (*gsi);
3605 /* Fold the main computation performed by the statement. */
3606 switch (gimple_code (stmt))
3608 case GIMPLE_ASSIGN:
3610 /* Try to canonicalize for boolean-typed X the comparisons
3611 X == 0, X == 1, X != 0, and X != 1. */
3612 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3613 || gimple_assign_rhs_code (stmt) == NE_EXPR)
3615 tree lhs = gimple_assign_lhs (stmt);
3616 tree op1 = gimple_assign_rhs1 (stmt);
3617 tree op2 = gimple_assign_rhs2 (stmt);
3618 tree type = TREE_TYPE (op1);
3620 /* Check whether the comparison operands are of the same boolean
3621 type as the result type is.
3622 Check that second operand is an integer-constant with value
3623 one or zero. */
3624 if (TREE_CODE (op2) == INTEGER_CST
3625 && (integer_zerop (op2) || integer_onep (op2))
3626 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3628 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3629 bool is_logical_not = false;
3631 /* X == 0 and X != 1 is a logical-not.of X
3632 X == 1 and X != 0 is X */
3633 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3634 || (cmp_code == NE_EXPR && integer_onep (op2)))
3635 is_logical_not = true;
3637 if (is_logical_not == false)
3638 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3639 /* Only for one-bit precision typed X the transformation
3640 !X -> ~X is valied. */
3641 else if (TYPE_PRECISION (type) == 1)
3642 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3643 /* Otherwise we use !X -> X ^ 1. */
3644 else
3645 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3646 build_int_cst (type, 1));
3647 changed = true;
3648 break;
3652 unsigned old_num_ops = gimple_num_ops (stmt);
3653 tree lhs = gimple_assign_lhs (stmt);
3654 tree new_rhs = fold_gimple_assign (gsi);
3655 if (new_rhs
3656 && !useless_type_conversion_p (TREE_TYPE (lhs),
3657 TREE_TYPE (new_rhs)))
3658 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3659 if (new_rhs
3660 && (!inplace
3661 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3663 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3664 changed = true;
3666 break;
3669 case GIMPLE_CALL:
3670 changed |= gimple_fold_call (gsi, inplace);
3671 break;
3673 case GIMPLE_ASM:
3674 /* Fold *& in asm operands. */
3676 gasm *asm_stmt = as_a <gasm *> (stmt);
3677 size_t noutputs;
3678 const char **oconstraints;
3679 const char *constraint;
3680 bool allows_mem, allows_reg;
3682 noutputs = gimple_asm_noutputs (asm_stmt);
3683 oconstraints = XALLOCAVEC (const char *, noutputs);
3685 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3687 tree link = gimple_asm_output_op (asm_stmt, i);
3688 tree op = TREE_VALUE (link);
3689 oconstraints[i]
3690 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3691 if (REFERENCE_CLASS_P (op)
3692 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3694 TREE_VALUE (link) = op;
3695 changed = true;
3698 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3700 tree link = gimple_asm_input_op (asm_stmt, i);
3701 tree op = TREE_VALUE (link);
3702 constraint
3703 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3704 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3705 oconstraints, &allows_mem, &allows_reg);
3706 if (REFERENCE_CLASS_P (op)
3707 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3708 != NULL_TREE)
3710 TREE_VALUE (link) = op;
3711 changed = true;
3715 break;
3717 case GIMPLE_DEBUG:
3718 if (gimple_debug_bind_p (stmt))
3720 tree val = gimple_debug_bind_get_value (stmt);
3721 if (val
3722 && REFERENCE_CLASS_P (val))
3724 tree tem = maybe_fold_reference (val, false);
3725 if (tem)
3727 gimple_debug_bind_set_value (stmt, tem);
3728 changed = true;
3731 else if (val
3732 && TREE_CODE (val) == ADDR_EXPR)
3734 tree ref = TREE_OPERAND (val, 0);
3735 tree tem = maybe_fold_reference (ref, false);
3736 if (tem)
3738 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3739 gimple_debug_bind_set_value (stmt, tem);
3740 changed = true;
3744 break;
3746 default:;
3749 stmt = gsi_stmt (*gsi);
3751 /* Fold *& on the lhs. */
3752 if (gimple_has_lhs (stmt))
3754 tree lhs = gimple_get_lhs (stmt);
3755 if (lhs && REFERENCE_CLASS_P (lhs))
3757 tree new_lhs = maybe_fold_reference (lhs, true);
3758 if (new_lhs)
3760 gimple_set_lhs (stmt, new_lhs);
3761 changed = true;
3766 return changed;
3769 /* Valueziation callback that ends up not following SSA edges. */
3771 tree
3772 no_follow_ssa_edges (tree)
3774 return NULL_TREE;
3777 /* Valueization callback that ends up following single-use SSA edges only. */
3779 tree
3780 follow_single_use_edges (tree val)
3782 if (TREE_CODE (val) == SSA_NAME
3783 && !has_single_use (val))
3784 return NULL_TREE;
3785 return val;
3788 /* Fold the statement pointed to by GSI. In some cases, this function may
3789 replace the whole statement with a new one. Returns true iff folding
3790 makes any changes.
3791 The statement pointed to by GSI should be in valid gimple form but may
3792 be in unfolded state as resulting from for example constant propagation
3793 which can produce *&x = 0. */
3795 bool
3796 fold_stmt (gimple_stmt_iterator *gsi)
3798 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3801 bool
3802 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3804 return fold_stmt_1 (gsi, false, valueize);
3807 /* Perform the minimal folding on statement *GSI. Only operations like
3808 *&x created by constant propagation are handled. The statement cannot
3809 be replaced with a new one. Return true if the statement was
3810 changed, false otherwise.
3811 The statement *GSI should be in valid gimple form but may
3812 be in unfolded state as resulting from for example constant propagation
3813 which can produce *&x = 0. */
3815 bool
3816 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3818 gimple *stmt = gsi_stmt (*gsi);
3819 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3820 gcc_assert (gsi_stmt (*gsi) == stmt);
3821 return changed;
3824 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3825 if EXPR is null or we don't know how.
3826 If non-null, the result always has boolean type. */
3828 static tree
3829 canonicalize_bool (tree expr, bool invert)
3831 if (!expr)
3832 return NULL_TREE;
3833 else if (invert)
3835 if (integer_nonzerop (expr))
3836 return boolean_false_node;
3837 else if (integer_zerop (expr))
3838 return boolean_true_node;
3839 else if (TREE_CODE (expr) == SSA_NAME)
3840 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3841 build_int_cst (TREE_TYPE (expr), 0));
3842 else if (COMPARISON_CLASS_P (expr))
3843 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3844 boolean_type_node,
3845 TREE_OPERAND (expr, 0),
3846 TREE_OPERAND (expr, 1));
3847 else
3848 return NULL_TREE;
3850 else
3852 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3853 return expr;
3854 if (integer_nonzerop (expr))
3855 return boolean_true_node;
3856 else if (integer_zerop (expr))
3857 return boolean_false_node;
3858 else if (TREE_CODE (expr) == SSA_NAME)
3859 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3860 build_int_cst (TREE_TYPE (expr), 0));
3861 else if (COMPARISON_CLASS_P (expr))
3862 return fold_build2 (TREE_CODE (expr),
3863 boolean_type_node,
3864 TREE_OPERAND (expr, 0),
3865 TREE_OPERAND (expr, 1));
3866 else
3867 return NULL_TREE;
3871 /* Check to see if a boolean expression EXPR is logically equivalent to the
3872 comparison (OP1 CODE OP2). Check for various identities involving
3873 SSA_NAMEs. */
3875 static bool
3876 same_bool_comparison_p (const_tree expr, enum tree_code code,
3877 const_tree op1, const_tree op2)
3879 gimple *s;
3881 /* The obvious case. */
3882 if (TREE_CODE (expr) == code
3883 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3884 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3885 return true;
3887 /* Check for comparing (name, name != 0) and the case where expr
3888 is an SSA_NAME with a definition matching the comparison. */
3889 if (TREE_CODE (expr) == SSA_NAME
3890 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3892 if (operand_equal_p (expr, op1, 0))
3893 return ((code == NE_EXPR && integer_zerop (op2))
3894 || (code == EQ_EXPR && integer_nonzerop (op2)));
3895 s = SSA_NAME_DEF_STMT (expr);
3896 if (is_gimple_assign (s)
3897 && gimple_assign_rhs_code (s) == code
3898 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3899 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3900 return true;
3903 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3904 of name is a comparison, recurse. */
3905 if (TREE_CODE (op1) == SSA_NAME
3906 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3908 s = SSA_NAME_DEF_STMT (op1);
3909 if (is_gimple_assign (s)
3910 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3912 enum tree_code c = gimple_assign_rhs_code (s);
3913 if ((c == NE_EXPR && integer_zerop (op2))
3914 || (c == EQ_EXPR && integer_nonzerop (op2)))
3915 return same_bool_comparison_p (expr, c,
3916 gimple_assign_rhs1 (s),
3917 gimple_assign_rhs2 (s));
3918 if ((c == EQ_EXPR && integer_zerop (op2))
3919 || (c == NE_EXPR && integer_nonzerop (op2)))
3920 return same_bool_comparison_p (expr,
3921 invert_tree_comparison (c, false),
3922 gimple_assign_rhs1 (s),
3923 gimple_assign_rhs2 (s));
3926 return false;
3929 /* Check to see if two boolean expressions OP1 and OP2 are logically
3930 equivalent. */
3932 static bool
3933 same_bool_result_p (const_tree op1, const_tree op2)
3935 /* Simple cases first. */
3936 if (operand_equal_p (op1, op2, 0))
3937 return true;
3939 /* Check the cases where at least one of the operands is a comparison.
3940 These are a bit smarter than operand_equal_p in that they apply some
3941 identifies on SSA_NAMEs. */
3942 if (COMPARISON_CLASS_P (op2)
3943 && same_bool_comparison_p (op1, TREE_CODE (op2),
3944 TREE_OPERAND (op2, 0),
3945 TREE_OPERAND (op2, 1)))
3946 return true;
3947 if (COMPARISON_CLASS_P (op1)
3948 && same_bool_comparison_p (op2, TREE_CODE (op1),
3949 TREE_OPERAND (op1, 0),
3950 TREE_OPERAND (op1, 1)))
3951 return true;
3953 /* Default case. */
3954 return false;
3957 /* Forward declarations for some mutually recursive functions. */
3959 static tree
3960 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3961 enum tree_code code2, tree op2a, tree op2b);
3962 static tree
3963 and_var_with_comparison (tree var, bool invert,
3964 enum tree_code code2, tree op2a, tree op2b);
3965 static tree
3966 and_var_with_comparison_1 (gimple *stmt,
3967 enum tree_code code2, tree op2a, tree op2b);
3968 static tree
3969 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3970 enum tree_code code2, tree op2a, tree op2b);
3971 static tree
3972 or_var_with_comparison (tree var, bool invert,
3973 enum tree_code code2, tree op2a, tree op2b);
3974 static tree
3975 or_var_with_comparison_1 (gimple *stmt,
3976 enum tree_code code2, tree op2a, tree op2b);
3978 /* Helper function for and_comparisons_1: try to simplify the AND of the
3979 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3980 If INVERT is true, invert the value of the VAR before doing the AND.
3981 Return NULL_EXPR if we can't simplify this to a single expression. */
3983 static tree
3984 and_var_with_comparison (tree var, bool invert,
3985 enum tree_code code2, tree op2a, tree op2b)
3987 tree t;
3988 gimple *stmt = SSA_NAME_DEF_STMT (var);
3990 /* We can only deal with variables whose definitions are assignments. */
3991 if (!is_gimple_assign (stmt))
3992 return NULL_TREE;
3994 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3995 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3996 Then we only have to consider the simpler non-inverted cases. */
3997 if (invert)
3998 t = or_var_with_comparison_1 (stmt,
3999 invert_tree_comparison (code2, false),
4000 op2a, op2b);
4001 else
4002 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4003 return canonicalize_bool (t, invert);
4006 /* Try to simplify the AND of the ssa variable defined by the assignment
4007 STMT with the comparison specified by (OP2A CODE2 OP2B).
4008 Return NULL_EXPR if we can't simplify this to a single expression. */
4010 static tree
4011 and_var_with_comparison_1 (gimple *stmt,
4012 enum tree_code code2, tree op2a, tree op2b)
4014 tree var = gimple_assign_lhs (stmt);
4015 tree true_test_var = NULL_TREE;
4016 tree false_test_var = NULL_TREE;
4017 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4019 /* Check for identities like (var AND (var == 0)) => false. */
4020 if (TREE_CODE (op2a) == SSA_NAME
4021 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4023 if ((code2 == NE_EXPR && integer_zerop (op2b))
4024 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4026 true_test_var = op2a;
4027 if (var == true_test_var)
4028 return var;
4030 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4031 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4033 false_test_var = op2a;
4034 if (var == false_test_var)
4035 return boolean_false_node;
4039 /* If the definition is a comparison, recurse on it. */
4040 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4042 tree t = and_comparisons_1 (innercode,
4043 gimple_assign_rhs1 (stmt),
4044 gimple_assign_rhs2 (stmt),
4045 code2,
4046 op2a,
4047 op2b);
4048 if (t)
4049 return t;
4052 /* If the definition is an AND or OR expression, we may be able to
4053 simplify by reassociating. */
4054 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4055 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4057 tree inner1 = gimple_assign_rhs1 (stmt);
4058 tree inner2 = gimple_assign_rhs2 (stmt);
4059 gimple *s;
4060 tree t;
4061 tree partial = NULL_TREE;
4062 bool is_and = (innercode == BIT_AND_EXPR);
4064 /* Check for boolean identities that don't require recursive examination
4065 of inner1/inner2:
4066 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4067 inner1 AND (inner1 OR inner2) => inner1
4068 !inner1 AND (inner1 AND inner2) => false
4069 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4070 Likewise for similar cases involving inner2. */
4071 if (inner1 == true_test_var)
4072 return (is_and ? var : inner1);
4073 else if (inner2 == true_test_var)
4074 return (is_and ? var : inner2);
4075 else if (inner1 == false_test_var)
4076 return (is_and
4077 ? boolean_false_node
4078 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4079 else if (inner2 == false_test_var)
4080 return (is_and
4081 ? boolean_false_node
4082 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4084 /* Next, redistribute/reassociate the AND across the inner tests.
4085 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4086 if (TREE_CODE (inner1) == SSA_NAME
4087 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4088 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4089 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4090 gimple_assign_rhs1 (s),
4091 gimple_assign_rhs2 (s),
4092 code2, op2a, op2b)))
4094 /* Handle the AND case, where we are reassociating:
4095 (inner1 AND inner2) AND (op2a code2 op2b)
4096 => (t AND inner2)
4097 If the partial result t is a constant, we win. Otherwise
4098 continue on to try reassociating with the other inner test. */
4099 if (is_and)
4101 if (integer_onep (t))
4102 return inner2;
4103 else if (integer_zerop (t))
4104 return boolean_false_node;
4107 /* Handle the OR case, where we are redistributing:
4108 (inner1 OR inner2) AND (op2a code2 op2b)
4109 => (t OR (inner2 AND (op2a code2 op2b))) */
4110 else if (integer_onep (t))
4111 return boolean_true_node;
4113 /* Save partial result for later. */
4114 partial = t;
4117 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4118 if (TREE_CODE (inner2) == SSA_NAME
4119 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4120 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4121 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4122 gimple_assign_rhs1 (s),
4123 gimple_assign_rhs2 (s),
4124 code2, op2a, op2b)))
4126 /* Handle the AND case, where we are reassociating:
4127 (inner1 AND inner2) AND (op2a code2 op2b)
4128 => (inner1 AND t) */
4129 if (is_and)
4131 if (integer_onep (t))
4132 return inner1;
4133 else if (integer_zerop (t))
4134 return boolean_false_node;
4135 /* If both are the same, we can apply the identity
4136 (x AND x) == x. */
4137 else if (partial && same_bool_result_p (t, partial))
4138 return t;
4141 /* Handle the OR case. where we are redistributing:
4142 (inner1 OR inner2) AND (op2a code2 op2b)
4143 => (t OR (inner1 AND (op2a code2 op2b)))
4144 => (t OR partial) */
4145 else
4147 if (integer_onep (t))
4148 return boolean_true_node;
4149 else if (partial)
4151 /* We already got a simplification for the other
4152 operand to the redistributed OR expression. The
4153 interesting case is when at least one is false.
4154 Or, if both are the same, we can apply the identity
4155 (x OR x) == x. */
4156 if (integer_zerop (partial))
4157 return t;
4158 else if (integer_zerop (t))
4159 return partial;
4160 else if (same_bool_result_p (t, partial))
4161 return t;
4166 return NULL_TREE;
4169 /* Try to simplify the AND of two comparisons defined by
4170 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4171 If this can be done without constructing an intermediate value,
4172 return the resulting tree; otherwise NULL_TREE is returned.
4173 This function is deliberately asymmetric as it recurses on SSA_DEFs
4174 in the first comparison but not the second. */
4176 static tree
4177 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4178 enum tree_code code2, tree op2a, tree op2b)
4180 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4182 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4183 if (operand_equal_p (op1a, op2a, 0)
4184 && operand_equal_p (op1b, op2b, 0))
4186 /* Result will be either NULL_TREE, or a combined comparison. */
4187 tree t = combine_comparisons (UNKNOWN_LOCATION,
4188 TRUTH_ANDIF_EXPR, code1, code2,
4189 truth_type, op1a, op1b);
4190 if (t)
4191 return t;
4194 /* Likewise the swapped case of the above. */
4195 if (operand_equal_p (op1a, op2b, 0)
4196 && operand_equal_p (op1b, op2a, 0))
4198 /* Result will be either NULL_TREE, or a combined comparison. */
4199 tree t = combine_comparisons (UNKNOWN_LOCATION,
4200 TRUTH_ANDIF_EXPR, code1,
4201 swap_tree_comparison (code2),
4202 truth_type, op1a, op1b);
4203 if (t)
4204 return t;
4207 /* If both comparisons are of the same value against constants, we might
4208 be able to merge them. */
4209 if (operand_equal_p (op1a, op2a, 0)
4210 && TREE_CODE (op1b) == INTEGER_CST
4211 && TREE_CODE (op2b) == INTEGER_CST)
4213 int cmp = tree_int_cst_compare (op1b, op2b);
4215 /* If we have (op1a == op1b), we should either be able to
4216 return that or FALSE, depending on whether the constant op1b
4217 also satisfies the other comparison against op2b. */
4218 if (code1 == EQ_EXPR)
4220 bool done = true;
4221 bool val;
4222 switch (code2)
4224 case EQ_EXPR: val = (cmp == 0); break;
4225 case NE_EXPR: val = (cmp != 0); break;
4226 case LT_EXPR: val = (cmp < 0); break;
4227 case GT_EXPR: val = (cmp > 0); break;
4228 case LE_EXPR: val = (cmp <= 0); break;
4229 case GE_EXPR: val = (cmp >= 0); break;
4230 default: done = false;
4232 if (done)
4234 if (val)
4235 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4236 else
4237 return boolean_false_node;
4240 /* Likewise if the second comparison is an == comparison. */
4241 else if (code2 == EQ_EXPR)
4243 bool done = true;
4244 bool val;
4245 switch (code1)
4247 case EQ_EXPR: val = (cmp == 0); break;
4248 case NE_EXPR: val = (cmp != 0); break;
4249 case LT_EXPR: val = (cmp > 0); break;
4250 case GT_EXPR: val = (cmp < 0); break;
4251 case LE_EXPR: val = (cmp >= 0); break;
4252 case GE_EXPR: val = (cmp <= 0); break;
4253 default: done = false;
4255 if (done)
4257 if (val)
4258 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4259 else
4260 return boolean_false_node;
4264 /* Same business with inequality tests. */
4265 else if (code1 == NE_EXPR)
4267 bool val;
4268 switch (code2)
4270 case EQ_EXPR: val = (cmp != 0); break;
4271 case NE_EXPR: val = (cmp == 0); break;
4272 case LT_EXPR: val = (cmp >= 0); break;
4273 case GT_EXPR: val = (cmp <= 0); break;
4274 case LE_EXPR: val = (cmp > 0); break;
4275 case GE_EXPR: val = (cmp < 0); break;
4276 default:
4277 val = false;
4279 if (val)
4280 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4282 else if (code2 == NE_EXPR)
4284 bool val;
4285 switch (code1)
4287 case EQ_EXPR: val = (cmp == 0); break;
4288 case NE_EXPR: val = (cmp != 0); break;
4289 case LT_EXPR: val = (cmp <= 0); break;
4290 case GT_EXPR: val = (cmp >= 0); break;
4291 case LE_EXPR: val = (cmp < 0); break;
4292 case GE_EXPR: val = (cmp > 0); break;
4293 default:
4294 val = false;
4296 if (val)
4297 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4300 /* Chose the more restrictive of two < or <= comparisons. */
4301 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4302 && (code2 == LT_EXPR || code2 == LE_EXPR))
4304 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4305 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4306 else
4307 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4310 /* Likewise chose the more restrictive of two > or >= comparisons. */
4311 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4312 && (code2 == GT_EXPR || code2 == GE_EXPR))
4314 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4315 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4316 else
4317 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4320 /* Check for singleton ranges. */
4321 else if (cmp == 0
4322 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4323 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4324 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4326 /* Check for disjoint ranges. */
4327 else if (cmp <= 0
4328 && (code1 == LT_EXPR || code1 == LE_EXPR)
4329 && (code2 == GT_EXPR || code2 == GE_EXPR))
4330 return boolean_false_node;
4331 else if (cmp >= 0
4332 && (code1 == GT_EXPR || code1 == GE_EXPR)
4333 && (code2 == LT_EXPR || code2 == LE_EXPR))
4334 return boolean_false_node;
4337 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4338 NAME's definition is a truth value. See if there are any simplifications
4339 that can be done against the NAME's definition. */
4340 if (TREE_CODE (op1a) == SSA_NAME
4341 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4342 && (integer_zerop (op1b) || integer_onep (op1b)))
4344 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4345 || (code1 == NE_EXPR && integer_onep (op1b)));
4346 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4347 switch (gimple_code (stmt))
4349 case GIMPLE_ASSIGN:
4350 /* Try to simplify by copy-propagating the definition. */
4351 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4353 case GIMPLE_PHI:
4354 /* If every argument to the PHI produces the same result when
4355 ANDed with the second comparison, we win.
4356 Do not do this unless the type is bool since we need a bool
4357 result here anyway. */
4358 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4360 tree result = NULL_TREE;
4361 unsigned i;
4362 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4364 tree arg = gimple_phi_arg_def (stmt, i);
4366 /* If this PHI has itself as an argument, ignore it.
4367 If all the other args produce the same result,
4368 we're still OK. */
4369 if (arg == gimple_phi_result (stmt))
4370 continue;
4371 else if (TREE_CODE (arg) == INTEGER_CST)
4373 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4375 if (!result)
4376 result = boolean_false_node;
4377 else if (!integer_zerop (result))
4378 return NULL_TREE;
4380 else if (!result)
4381 result = fold_build2 (code2, boolean_type_node,
4382 op2a, op2b);
4383 else if (!same_bool_comparison_p (result,
4384 code2, op2a, op2b))
4385 return NULL_TREE;
4387 else if (TREE_CODE (arg) == SSA_NAME
4388 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4390 tree temp;
4391 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4392 /* In simple cases we can look through PHI nodes,
4393 but we have to be careful with loops.
4394 See PR49073. */
4395 if (! dom_info_available_p (CDI_DOMINATORS)
4396 || gimple_bb (def_stmt) == gimple_bb (stmt)
4397 || dominated_by_p (CDI_DOMINATORS,
4398 gimple_bb (def_stmt),
4399 gimple_bb (stmt)))
4400 return NULL_TREE;
4401 temp = and_var_with_comparison (arg, invert, code2,
4402 op2a, op2b);
4403 if (!temp)
4404 return NULL_TREE;
4405 else if (!result)
4406 result = temp;
4407 else if (!same_bool_result_p (result, temp))
4408 return NULL_TREE;
4410 else
4411 return NULL_TREE;
4413 return result;
4416 default:
4417 break;
4420 return NULL_TREE;
4423 /* Try to simplify the AND of two comparisons, specified by
4424 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4425 If this can be simplified to a single expression (without requiring
4426 introducing more SSA variables to hold intermediate values),
4427 return the resulting tree. Otherwise return NULL_TREE.
4428 If the result expression is non-null, it has boolean type. */
4430 tree
4431 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4432 enum tree_code code2, tree op2a, tree op2b)
4434 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4435 if (t)
4436 return t;
4437 else
4438 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4441 /* Helper function for or_comparisons_1: try to simplify the OR of the
4442 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4443 If INVERT is true, invert the value of VAR before doing the OR.
4444 Return NULL_EXPR if we can't simplify this to a single expression. */
4446 static tree
4447 or_var_with_comparison (tree var, bool invert,
4448 enum tree_code code2, tree op2a, tree op2b)
4450 tree t;
4451 gimple *stmt = SSA_NAME_DEF_STMT (var);
4453 /* We can only deal with variables whose definitions are assignments. */
4454 if (!is_gimple_assign (stmt))
4455 return NULL_TREE;
4457 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4458 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4459 Then we only have to consider the simpler non-inverted cases. */
4460 if (invert)
4461 t = and_var_with_comparison_1 (stmt,
4462 invert_tree_comparison (code2, false),
4463 op2a, op2b);
4464 else
4465 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4466 return canonicalize_bool (t, invert);
4469 /* Try to simplify the OR of the ssa variable defined by the assignment
4470 STMT with the comparison specified by (OP2A CODE2 OP2B).
4471 Return NULL_EXPR if we can't simplify this to a single expression. */
4473 static tree
4474 or_var_with_comparison_1 (gimple *stmt,
4475 enum tree_code code2, tree op2a, tree op2b)
4477 tree var = gimple_assign_lhs (stmt);
4478 tree true_test_var = NULL_TREE;
4479 tree false_test_var = NULL_TREE;
4480 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4482 /* Check for identities like (var OR (var != 0)) => true . */
4483 if (TREE_CODE (op2a) == SSA_NAME
4484 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4486 if ((code2 == NE_EXPR && integer_zerop (op2b))
4487 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4489 true_test_var = op2a;
4490 if (var == true_test_var)
4491 return var;
4493 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4494 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4496 false_test_var = op2a;
4497 if (var == false_test_var)
4498 return boolean_true_node;
4502 /* If the definition is a comparison, recurse on it. */
4503 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4505 tree t = or_comparisons_1 (innercode,
4506 gimple_assign_rhs1 (stmt),
4507 gimple_assign_rhs2 (stmt),
4508 code2,
4509 op2a,
4510 op2b);
4511 if (t)
4512 return t;
4515 /* If the definition is an AND or OR expression, we may be able to
4516 simplify by reassociating. */
4517 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4518 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4520 tree inner1 = gimple_assign_rhs1 (stmt);
4521 tree inner2 = gimple_assign_rhs2 (stmt);
4522 gimple *s;
4523 tree t;
4524 tree partial = NULL_TREE;
4525 bool is_or = (innercode == BIT_IOR_EXPR);
4527 /* Check for boolean identities that don't require recursive examination
4528 of inner1/inner2:
4529 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4530 inner1 OR (inner1 AND inner2) => inner1
4531 !inner1 OR (inner1 OR inner2) => true
4532 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4534 if (inner1 == true_test_var)
4535 return (is_or ? var : inner1);
4536 else if (inner2 == true_test_var)
4537 return (is_or ? var : inner2);
4538 else if (inner1 == false_test_var)
4539 return (is_or
4540 ? boolean_true_node
4541 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4542 else if (inner2 == false_test_var)
4543 return (is_or
4544 ? boolean_true_node
4545 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4547 /* Next, redistribute/reassociate the OR across the inner tests.
4548 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4549 if (TREE_CODE (inner1) == SSA_NAME
4550 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4551 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4552 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4553 gimple_assign_rhs1 (s),
4554 gimple_assign_rhs2 (s),
4555 code2, op2a, op2b)))
4557 /* Handle the OR case, where we are reassociating:
4558 (inner1 OR inner2) OR (op2a code2 op2b)
4559 => (t OR inner2)
4560 If the partial result t is a constant, we win. Otherwise
4561 continue on to try reassociating with the other inner test. */
4562 if (is_or)
4564 if (integer_onep (t))
4565 return boolean_true_node;
4566 else if (integer_zerop (t))
4567 return inner2;
4570 /* Handle the AND case, where we are redistributing:
4571 (inner1 AND inner2) OR (op2a code2 op2b)
4572 => (t AND (inner2 OR (op2a code op2b))) */
4573 else if (integer_zerop (t))
4574 return boolean_false_node;
4576 /* Save partial result for later. */
4577 partial = t;
4580 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4581 if (TREE_CODE (inner2) == SSA_NAME
4582 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4583 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4584 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4585 gimple_assign_rhs1 (s),
4586 gimple_assign_rhs2 (s),
4587 code2, op2a, op2b)))
4589 /* Handle the OR case, where we are reassociating:
4590 (inner1 OR inner2) OR (op2a code2 op2b)
4591 => (inner1 OR t)
4592 => (t OR partial) */
4593 if (is_or)
4595 if (integer_zerop (t))
4596 return inner1;
4597 else if (integer_onep (t))
4598 return boolean_true_node;
4599 /* If both are the same, we can apply the identity
4600 (x OR x) == x. */
4601 else if (partial && same_bool_result_p (t, partial))
4602 return t;
4605 /* Handle the AND case, where we are redistributing:
4606 (inner1 AND inner2) OR (op2a code2 op2b)
4607 => (t AND (inner1 OR (op2a code2 op2b)))
4608 => (t AND partial) */
4609 else
4611 if (integer_zerop (t))
4612 return boolean_false_node;
4613 else if (partial)
4615 /* We already got a simplification for the other
4616 operand to the redistributed AND expression. The
4617 interesting case is when at least one is true.
4618 Or, if both are the same, we can apply the identity
4619 (x AND x) == x. */
4620 if (integer_onep (partial))
4621 return t;
4622 else if (integer_onep (t))
4623 return partial;
4624 else if (same_bool_result_p (t, partial))
4625 return t;
4630 return NULL_TREE;
4633 /* Try to simplify the OR of two comparisons defined by
4634 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4635 If this can be done without constructing an intermediate value,
4636 return the resulting tree; otherwise NULL_TREE is returned.
4637 This function is deliberately asymmetric as it recurses on SSA_DEFs
4638 in the first comparison but not the second. */
4640 static tree
4641 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4642 enum tree_code code2, tree op2a, tree op2b)
4644 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4646 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4647 if (operand_equal_p (op1a, op2a, 0)
4648 && operand_equal_p (op1b, op2b, 0))
4650 /* Result will be either NULL_TREE, or a combined comparison. */
4651 tree t = combine_comparisons (UNKNOWN_LOCATION,
4652 TRUTH_ORIF_EXPR, code1, code2,
4653 truth_type, op1a, op1b);
4654 if (t)
4655 return t;
4658 /* Likewise the swapped case of the above. */
4659 if (operand_equal_p (op1a, op2b, 0)
4660 && operand_equal_p (op1b, op2a, 0))
4662 /* Result will be either NULL_TREE, or a combined comparison. */
4663 tree t = combine_comparisons (UNKNOWN_LOCATION,
4664 TRUTH_ORIF_EXPR, code1,
4665 swap_tree_comparison (code2),
4666 truth_type, op1a, op1b);
4667 if (t)
4668 return t;
4671 /* If both comparisons are of the same value against constants, we might
4672 be able to merge them. */
4673 if (operand_equal_p (op1a, op2a, 0)
4674 && TREE_CODE (op1b) == INTEGER_CST
4675 && TREE_CODE (op2b) == INTEGER_CST)
4677 int cmp = tree_int_cst_compare (op1b, op2b);
4679 /* If we have (op1a != op1b), we should either be able to
4680 return that or TRUE, depending on whether the constant op1b
4681 also satisfies the other comparison against op2b. */
4682 if (code1 == NE_EXPR)
4684 bool done = true;
4685 bool val;
4686 switch (code2)
4688 case EQ_EXPR: val = (cmp == 0); break;
4689 case NE_EXPR: val = (cmp != 0); break;
4690 case LT_EXPR: val = (cmp < 0); break;
4691 case GT_EXPR: val = (cmp > 0); break;
4692 case LE_EXPR: val = (cmp <= 0); break;
4693 case GE_EXPR: val = (cmp >= 0); break;
4694 default: done = false;
4696 if (done)
4698 if (val)
4699 return boolean_true_node;
4700 else
4701 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4704 /* Likewise if the second comparison is a != comparison. */
4705 else if (code2 == NE_EXPR)
4707 bool done = true;
4708 bool val;
4709 switch (code1)
4711 case EQ_EXPR: val = (cmp == 0); break;
4712 case NE_EXPR: val = (cmp != 0); break;
4713 case LT_EXPR: val = (cmp > 0); break;
4714 case GT_EXPR: val = (cmp < 0); break;
4715 case LE_EXPR: val = (cmp >= 0); break;
4716 case GE_EXPR: val = (cmp <= 0); break;
4717 default: done = false;
4719 if (done)
4721 if (val)
4722 return boolean_true_node;
4723 else
4724 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4728 /* See if an equality test is redundant with the other comparison. */
4729 else if (code1 == EQ_EXPR)
4731 bool val;
4732 switch (code2)
4734 case EQ_EXPR: val = (cmp == 0); break;
4735 case NE_EXPR: val = (cmp != 0); break;
4736 case LT_EXPR: val = (cmp < 0); break;
4737 case GT_EXPR: val = (cmp > 0); break;
4738 case LE_EXPR: val = (cmp <= 0); break;
4739 case GE_EXPR: val = (cmp >= 0); break;
4740 default:
4741 val = false;
4743 if (val)
4744 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4746 else if (code2 == EQ_EXPR)
4748 bool val;
4749 switch (code1)
4751 case EQ_EXPR: val = (cmp == 0); break;
4752 case NE_EXPR: val = (cmp != 0); break;
4753 case LT_EXPR: val = (cmp > 0); break;
4754 case GT_EXPR: val = (cmp < 0); break;
4755 case LE_EXPR: val = (cmp >= 0); break;
4756 case GE_EXPR: val = (cmp <= 0); break;
4757 default:
4758 val = false;
4760 if (val)
4761 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4764 /* Chose the less restrictive of two < or <= comparisons. */
4765 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4766 && (code2 == LT_EXPR || code2 == LE_EXPR))
4768 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4769 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4770 else
4771 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4774 /* Likewise chose the less restrictive of two > or >= comparisons. */
4775 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4776 && (code2 == GT_EXPR || code2 == GE_EXPR))
4778 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4779 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4780 else
4781 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4784 /* Check for singleton ranges. */
4785 else if (cmp == 0
4786 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4787 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4788 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4790 /* Check for less/greater pairs that don't restrict the range at all. */
4791 else if (cmp >= 0
4792 && (code1 == LT_EXPR || code1 == LE_EXPR)
4793 && (code2 == GT_EXPR || code2 == GE_EXPR))
4794 return boolean_true_node;
4795 else if (cmp <= 0
4796 && (code1 == GT_EXPR || code1 == GE_EXPR)
4797 && (code2 == LT_EXPR || code2 == LE_EXPR))
4798 return boolean_true_node;
4801 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4802 NAME's definition is a truth value. See if there are any simplifications
4803 that can be done against the NAME's definition. */
4804 if (TREE_CODE (op1a) == SSA_NAME
4805 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4806 && (integer_zerop (op1b) || integer_onep (op1b)))
4808 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4809 || (code1 == NE_EXPR && integer_onep (op1b)));
4810 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4811 switch (gimple_code (stmt))
4813 case GIMPLE_ASSIGN:
4814 /* Try to simplify by copy-propagating the definition. */
4815 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4817 case GIMPLE_PHI:
4818 /* If every argument to the PHI produces the same result when
4819 ORed with the second comparison, we win.
4820 Do not do this unless the type is bool since we need a bool
4821 result here anyway. */
4822 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4824 tree result = NULL_TREE;
4825 unsigned i;
4826 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4828 tree arg = gimple_phi_arg_def (stmt, i);
4830 /* If this PHI has itself as an argument, ignore it.
4831 If all the other args produce the same result,
4832 we're still OK. */
4833 if (arg == gimple_phi_result (stmt))
4834 continue;
4835 else if (TREE_CODE (arg) == INTEGER_CST)
4837 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4839 if (!result)
4840 result = boolean_true_node;
4841 else if (!integer_onep (result))
4842 return NULL_TREE;
4844 else if (!result)
4845 result = fold_build2 (code2, boolean_type_node,
4846 op2a, op2b);
4847 else if (!same_bool_comparison_p (result,
4848 code2, op2a, op2b))
4849 return NULL_TREE;
4851 else if (TREE_CODE (arg) == SSA_NAME
4852 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4854 tree temp;
4855 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4856 /* In simple cases we can look through PHI nodes,
4857 but we have to be careful with loops.
4858 See PR49073. */
4859 if (! dom_info_available_p (CDI_DOMINATORS)
4860 || gimple_bb (def_stmt) == gimple_bb (stmt)
4861 || dominated_by_p (CDI_DOMINATORS,
4862 gimple_bb (def_stmt),
4863 gimple_bb (stmt)))
4864 return NULL_TREE;
4865 temp = or_var_with_comparison (arg, invert, code2,
4866 op2a, op2b);
4867 if (!temp)
4868 return NULL_TREE;
4869 else if (!result)
4870 result = temp;
4871 else if (!same_bool_result_p (result, temp))
4872 return NULL_TREE;
4874 else
4875 return NULL_TREE;
4877 return result;
4880 default:
4881 break;
4884 return NULL_TREE;
4887 /* Try to simplify the OR of two comparisons, specified by
4888 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4889 If this can be simplified to a single expression (without requiring
4890 introducing more SSA variables to hold intermediate values),
4891 return the resulting tree. Otherwise return NULL_TREE.
4892 If the result expression is non-null, it has boolean type. */
4894 tree
4895 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4896 enum tree_code code2, tree op2a, tree op2b)
4898 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4899 if (t)
4900 return t;
4901 else
4902 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4906 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4908 Either NULL_TREE, a simplified but non-constant or a constant
4909 is returned.
4911 ??? This should go into a gimple-fold-inline.h file to be eventually
4912 privatized with the single valueize function used in the various TUs
4913 to avoid the indirect function call overhead. */
4915 tree
4916 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
4917 tree (*gvalueize) (tree))
4919 code_helper rcode;
4920 tree ops[3] = {};
4921 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4922 edges if there are intermediate VARYING defs. For this reason
4923 do not follow SSA edges here even though SCCVN can technically
4924 just deal fine with that. */
4925 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
4927 tree res = NULL_TREE;
4928 if (rcode.is_tree_code ()
4929 && (TREE_CODE_LENGTH ((tree_code) rcode) == 0
4930 || ((tree_code) rcode) == ADDR_EXPR)
4931 && is_gimple_val (ops[0]))
4932 res = ops[0];
4933 else if (mprts_hook)
4934 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
4935 if (res)
4937 if (dump_file && dump_flags & TDF_DETAILS)
4939 fprintf (dump_file, "Match-and-simplified ");
4940 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4941 fprintf (dump_file, " to ");
4942 print_generic_expr (dump_file, res, 0);
4943 fprintf (dump_file, "\n");
4945 return res;
4949 location_t loc = gimple_location (stmt);
4950 switch (gimple_code (stmt))
4952 case GIMPLE_ASSIGN:
4954 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4956 switch (get_gimple_rhs_class (subcode))
4958 case GIMPLE_SINGLE_RHS:
4960 tree rhs = gimple_assign_rhs1 (stmt);
4961 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4963 if (TREE_CODE (rhs) == SSA_NAME)
4965 /* If the RHS is an SSA_NAME, return its known constant value,
4966 if any. */
4967 return (*valueize) (rhs);
4969 /* Handle propagating invariant addresses into address
4970 operations. */
4971 else if (TREE_CODE (rhs) == ADDR_EXPR
4972 && !is_gimple_min_invariant (rhs))
4974 HOST_WIDE_INT offset = 0;
4975 tree base;
4976 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4977 &offset,
4978 valueize);
4979 if (base
4980 && (CONSTANT_CLASS_P (base)
4981 || decl_address_invariant_p (base)))
4982 return build_invariant_address (TREE_TYPE (rhs),
4983 base, offset);
4985 else if (TREE_CODE (rhs) == CONSTRUCTOR
4986 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4987 && (CONSTRUCTOR_NELTS (rhs)
4988 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4990 unsigned i;
4991 tree val, *vec;
4993 vec = XALLOCAVEC (tree,
4994 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
4995 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4997 val = (*valueize) (val);
4998 if (TREE_CODE (val) == INTEGER_CST
4999 || TREE_CODE (val) == REAL_CST
5000 || TREE_CODE (val) == FIXED_CST)
5001 vec[i] = val;
5002 else
5003 return NULL_TREE;
5006 return build_vector (TREE_TYPE (rhs), vec);
5008 if (subcode == OBJ_TYPE_REF)
5010 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5011 /* If callee is constant, we can fold away the wrapper. */
5012 if (is_gimple_min_invariant (val))
5013 return val;
5016 if (kind == tcc_reference)
5018 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5019 || TREE_CODE (rhs) == REALPART_EXPR
5020 || TREE_CODE (rhs) == IMAGPART_EXPR)
5021 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5023 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5024 return fold_unary_loc (EXPR_LOCATION (rhs),
5025 TREE_CODE (rhs),
5026 TREE_TYPE (rhs), val);
5028 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5029 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5031 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5032 return fold_ternary_loc (EXPR_LOCATION (rhs),
5033 TREE_CODE (rhs),
5034 TREE_TYPE (rhs), val,
5035 TREE_OPERAND (rhs, 1),
5036 TREE_OPERAND (rhs, 2));
5038 else if (TREE_CODE (rhs) == MEM_REF
5039 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5041 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5042 if (TREE_CODE (val) == ADDR_EXPR
5043 && is_gimple_min_invariant (val))
5045 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5046 unshare_expr (val),
5047 TREE_OPERAND (rhs, 1));
5048 if (tem)
5049 rhs = tem;
5052 return fold_const_aggregate_ref_1 (rhs, valueize);
5054 else if (kind == tcc_declaration)
5055 return get_symbol_constant_value (rhs);
5056 return rhs;
5059 case GIMPLE_UNARY_RHS:
5060 return NULL_TREE;
5062 case GIMPLE_BINARY_RHS:
5063 /* Translate &x + CST into an invariant form suitable for
5064 further propagation. */
5065 if (subcode == POINTER_PLUS_EXPR)
5067 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5068 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5069 if (TREE_CODE (op0) == ADDR_EXPR
5070 && TREE_CODE (op1) == INTEGER_CST)
5072 tree off = fold_convert (ptr_type_node, op1);
5073 return build_fold_addr_expr_loc
5074 (loc,
5075 fold_build2 (MEM_REF,
5076 TREE_TYPE (TREE_TYPE (op0)),
5077 unshare_expr (op0), off));
5080 /* Canonicalize bool != 0 and bool == 0 appearing after
5081 valueization. While gimple_simplify handles this
5082 it can get confused by the ~X == 1 -> X == 0 transform
5083 which we cant reduce to a SSA name or a constant
5084 (and we have no way to tell gimple_simplify to not
5085 consider those transforms in the first place). */
5086 else if (subcode == EQ_EXPR
5087 || subcode == NE_EXPR)
5089 tree lhs = gimple_assign_lhs (stmt);
5090 tree op0 = gimple_assign_rhs1 (stmt);
5091 if (useless_type_conversion_p (TREE_TYPE (lhs),
5092 TREE_TYPE (op0)))
5094 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5095 op0 = (*valueize) (op0);
5096 if (TREE_CODE (op0) == INTEGER_CST)
5097 std::swap (op0, op1);
5098 if (TREE_CODE (op1) == INTEGER_CST
5099 && ((subcode == NE_EXPR && integer_zerop (op1))
5100 || (subcode == EQ_EXPR && integer_onep (op1))))
5101 return op0;
5104 return NULL_TREE;
5106 case GIMPLE_TERNARY_RHS:
5108 /* Handle ternary operators that can appear in GIMPLE form. */
5109 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5110 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5111 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5112 return fold_ternary_loc (loc, subcode,
5113 gimple_expr_type (stmt), op0, op1, op2);
5116 default:
5117 gcc_unreachable ();
5121 case GIMPLE_CALL:
5123 tree fn;
5124 gcall *call_stmt = as_a <gcall *> (stmt);
5126 if (gimple_call_internal_p (stmt))
5128 enum tree_code subcode = ERROR_MARK;
5129 switch (gimple_call_internal_fn (stmt))
5131 case IFN_UBSAN_CHECK_ADD:
5132 subcode = PLUS_EXPR;
5133 break;
5134 case IFN_UBSAN_CHECK_SUB:
5135 subcode = MINUS_EXPR;
5136 break;
5137 case IFN_UBSAN_CHECK_MUL:
5138 subcode = MULT_EXPR;
5139 break;
5140 default:
5141 return NULL_TREE;
5143 tree arg0 = gimple_call_arg (stmt, 0);
5144 tree arg1 = gimple_call_arg (stmt, 1);
5145 tree op0 = (*valueize) (arg0);
5146 tree op1 = (*valueize) (arg1);
5148 if (TREE_CODE (op0) != INTEGER_CST
5149 || TREE_CODE (op1) != INTEGER_CST)
5151 switch (subcode)
5153 case MULT_EXPR:
5154 /* x * 0 = 0 * x = 0 without overflow. */
5155 if (integer_zerop (op0) || integer_zerop (op1))
5156 return build_zero_cst (TREE_TYPE (arg0));
5157 break;
5158 case MINUS_EXPR:
5159 /* y - y = 0 without overflow. */
5160 if (operand_equal_p (op0, op1, 0))
5161 return build_zero_cst (TREE_TYPE (arg0));
5162 break;
5163 default:
5164 break;
5167 tree res
5168 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5169 if (res
5170 && TREE_CODE (res) == INTEGER_CST
5171 && !TREE_OVERFLOW (res))
5172 return res;
5173 return NULL_TREE;
5176 fn = (*valueize) (gimple_call_fn (stmt));
5177 if (TREE_CODE (fn) == ADDR_EXPR
5178 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5179 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5180 && gimple_builtin_call_types_compatible_p (stmt,
5181 TREE_OPERAND (fn, 0)))
5183 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5184 tree retval;
5185 unsigned i;
5186 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5187 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5188 retval = fold_builtin_call_array (loc,
5189 gimple_call_return_type (call_stmt),
5190 fn, gimple_call_num_args (stmt), args);
5191 if (retval)
5193 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5194 STRIP_NOPS (retval);
5195 retval = fold_convert (gimple_call_return_type (call_stmt),
5196 retval);
5198 return retval;
5200 return NULL_TREE;
5203 default:
5204 return NULL_TREE;
5208 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5209 Returns NULL_TREE if folding to a constant is not possible, otherwise
5210 returns a constant according to is_gimple_min_invariant. */
5212 tree
5213 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5215 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5216 if (res && is_gimple_min_invariant (res))
5217 return res;
5218 return NULL_TREE;
5222 /* The following set of functions are supposed to fold references using
5223 their constant initializers. */
5225 /* See if we can find constructor defining value of BASE.
5226 When we know the consructor with constant offset (such as
5227 base is array[40] and we do know constructor of array), then
5228 BIT_OFFSET is adjusted accordingly.
5230 As a special case, return error_mark_node when constructor
5231 is not explicitly available, but it is known to be zero
5232 such as 'static const int a;'. */
5233 static tree
5234 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5235 tree (*valueize)(tree))
5237 HOST_WIDE_INT bit_offset2, size, max_size;
5238 if (TREE_CODE (base) == MEM_REF)
5240 if (!integer_zerop (TREE_OPERAND (base, 1)))
5242 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5243 return NULL_TREE;
5244 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5245 * BITS_PER_UNIT);
5248 if (valueize
5249 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5250 base = valueize (TREE_OPERAND (base, 0));
5251 if (!base || TREE_CODE (base) != ADDR_EXPR)
5252 return NULL_TREE;
5253 base = TREE_OPERAND (base, 0);
5256 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5257 DECL_INITIAL. If BASE is a nested reference into another
5258 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5259 the inner reference. */
5260 switch (TREE_CODE (base))
5262 case VAR_DECL:
5263 case CONST_DECL:
5265 tree init = ctor_for_folding (base);
5267 /* Our semantic is exact opposite of ctor_for_folding;
5268 NULL means unknown, while error_mark_node is 0. */
5269 if (init == error_mark_node)
5270 return NULL_TREE;
5271 if (!init)
5272 return error_mark_node;
5273 return init;
5276 case ARRAY_REF:
5277 case COMPONENT_REF:
5278 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
5279 if (max_size == -1 || size != max_size)
5280 return NULL_TREE;
5281 *bit_offset += bit_offset2;
5282 return get_base_constructor (base, bit_offset, valueize);
5284 case STRING_CST:
5285 case CONSTRUCTOR:
5286 return base;
5288 default:
5289 return NULL_TREE;
5293 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5294 SIZE to the memory at bit OFFSET. */
5296 static tree
5297 fold_array_ctor_reference (tree type, tree ctor,
5298 unsigned HOST_WIDE_INT offset,
5299 unsigned HOST_WIDE_INT size,
5300 tree from_decl)
5302 unsigned HOST_WIDE_INT cnt;
5303 tree cfield, cval;
5304 offset_int low_bound;
5305 offset_int elt_size;
5306 offset_int index, max_index;
5307 offset_int access_index;
5308 tree domain_type = NULL_TREE, index_type = NULL_TREE;
5309 HOST_WIDE_INT inner_offset;
5311 /* Compute low bound and elt size. */
5312 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5313 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5314 if (domain_type && TYPE_MIN_VALUE (domain_type))
5316 /* Static constructors for variably sized objects makes no sense. */
5317 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
5318 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
5319 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5321 else
5322 low_bound = 0;
5323 /* Static constructors for variably sized objects makes no sense. */
5324 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
5325 == INTEGER_CST);
5326 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5328 /* We can handle only constantly sized accesses that are known to not
5329 be larger than size of array element. */
5330 if (!TYPE_SIZE_UNIT (type)
5331 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5332 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5333 || elt_size == 0)
5334 return NULL_TREE;
5336 /* Compute the array index we look for. */
5337 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5338 elt_size);
5339 access_index += low_bound;
5340 if (index_type)
5341 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
5342 TYPE_SIGN (index_type));
5344 /* And offset within the access. */
5345 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5347 /* See if the array field is large enough to span whole access. We do not
5348 care to fold accesses spanning multiple array indexes. */
5349 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5350 return NULL_TREE;
5352 index = low_bound - 1;
5353 if (index_type)
5354 index = wi::ext (index, TYPE_PRECISION (index_type),
5355 TYPE_SIGN (index_type));
5357 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
5359 /* Array constructor might explicitely set index, or specify range
5360 or leave index NULL meaning that it is next index after previous
5361 one. */
5362 if (cfield)
5364 if (TREE_CODE (cfield) == INTEGER_CST)
5365 max_index = index = wi::to_offset (cfield);
5366 else
5368 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
5369 index = wi::to_offset (TREE_OPERAND (cfield, 0));
5370 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
5373 else
5375 index += 1;
5376 if (index_type)
5377 index = wi::ext (index, TYPE_PRECISION (index_type),
5378 TYPE_SIGN (index_type));
5379 max_index = index;
5382 /* Do we have match? */
5383 if (wi::cmpu (access_index, index) >= 0
5384 && wi::cmpu (access_index, max_index) <= 0)
5385 return fold_ctor_reference (type, cval, inner_offset, size,
5386 from_decl);
5388 /* When memory is not explicitely mentioned in constructor,
5389 it is 0 (or out of range). */
5390 return build_zero_cst (type);
5393 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5394 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5396 static tree
5397 fold_nonarray_ctor_reference (tree type, tree ctor,
5398 unsigned HOST_WIDE_INT offset,
5399 unsigned HOST_WIDE_INT size,
5400 tree from_decl)
5402 unsigned HOST_WIDE_INT cnt;
5403 tree cfield, cval;
5405 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5406 cval)
5408 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5409 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5410 tree field_size = DECL_SIZE (cfield);
5411 offset_int bitoffset;
5412 offset_int bitoffset_end, access_end;
5414 /* Variable sized objects in static constructors makes no sense,
5415 but field_size can be NULL for flexible array members. */
5416 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5417 && TREE_CODE (byte_offset) == INTEGER_CST
5418 && (field_size != NULL_TREE
5419 ? TREE_CODE (field_size) == INTEGER_CST
5420 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5422 /* Compute bit offset of the field. */
5423 bitoffset = (wi::to_offset (field_offset)
5424 + wi::lshift (wi::to_offset (byte_offset),
5425 LOG2_BITS_PER_UNIT));
5426 /* Compute bit offset where the field ends. */
5427 if (field_size != NULL_TREE)
5428 bitoffset_end = bitoffset + wi::to_offset (field_size);
5429 else
5430 bitoffset_end = 0;
5432 access_end = offset_int (offset) + size;
5434 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5435 [BITOFFSET, BITOFFSET_END)? */
5436 if (wi::cmps (access_end, bitoffset) > 0
5437 && (field_size == NULL_TREE
5438 || wi::lts_p (offset, bitoffset_end)))
5440 offset_int inner_offset = offset_int (offset) - bitoffset;
5441 /* We do have overlap. Now see if field is large enough to
5442 cover the access. Give up for accesses spanning multiple
5443 fields. */
5444 if (wi::cmps (access_end, bitoffset_end) > 0)
5445 return NULL_TREE;
5446 if (wi::lts_p (offset, bitoffset))
5447 return NULL_TREE;
5448 return fold_ctor_reference (type, cval,
5449 inner_offset.to_uhwi (), size,
5450 from_decl);
5453 /* When memory is not explicitely mentioned in constructor, it is 0. */
5454 return build_zero_cst (type);
5457 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5458 to the memory at bit OFFSET. */
5460 tree
5461 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5462 unsigned HOST_WIDE_INT size, tree from_decl)
5464 tree ret;
5466 /* We found the field with exact match. */
5467 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5468 && !offset)
5469 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5471 /* We are at the end of walk, see if we can view convert the
5472 result. */
5473 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5474 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5475 && !compare_tree_int (TYPE_SIZE (type), size)
5476 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5478 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5479 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5480 if (ret)
5481 STRIP_USELESS_TYPE_CONVERSION (ret);
5482 return ret;
5484 /* For constants and byte-aligned/sized reads try to go through
5485 native_encode/interpret. */
5486 if (CONSTANT_CLASS_P (ctor)
5487 && BITS_PER_UNIT == 8
5488 && offset % BITS_PER_UNIT == 0
5489 && size % BITS_PER_UNIT == 0
5490 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5492 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5493 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5494 offset / BITS_PER_UNIT) > 0)
5495 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5497 if (TREE_CODE (ctor) == CONSTRUCTOR)
5500 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5501 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5502 return fold_array_ctor_reference (type, ctor, offset, size,
5503 from_decl);
5504 else
5505 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5506 from_decl);
5509 return NULL_TREE;
5512 /* Return the tree representing the element referenced by T if T is an
5513 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5514 names using VALUEIZE. Return NULL_TREE otherwise. */
5516 tree
5517 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5519 tree ctor, idx, base;
5520 HOST_WIDE_INT offset, size, max_size;
5521 tree tem;
5523 if (TREE_THIS_VOLATILE (t))
5524 return NULL_TREE;
5526 if (DECL_P (t))
5527 return get_symbol_constant_value (t);
5529 tem = fold_read_from_constant_string (t);
5530 if (tem)
5531 return tem;
5533 switch (TREE_CODE (t))
5535 case ARRAY_REF:
5536 case ARRAY_RANGE_REF:
5537 /* Constant indexes are handled well by get_base_constructor.
5538 Only special case variable offsets.
5539 FIXME: This code can't handle nested references with variable indexes
5540 (they will be handled only by iteration of ccp). Perhaps we can bring
5541 get_ref_base_and_extent here and make it use a valueize callback. */
5542 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5543 && valueize
5544 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5545 && TREE_CODE (idx) == INTEGER_CST)
5547 tree low_bound, unit_size;
5549 /* If the resulting bit-offset is constant, track it. */
5550 if ((low_bound = array_ref_low_bound (t),
5551 TREE_CODE (low_bound) == INTEGER_CST)
5552 && (unit_size = array_ref_element_size (t),
5553 tree_fits_uhwi_p (unit_size)))
5555 offset_int woffset
5556 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5557 TYPE_PRECISION (TREE_TYPE (idx)));
5559 if (wi::fits_shwi_p (woffset))
5561 offset = woffset.to_shwi ();
5562 /* TODO: This code seems wrong, multiply then check
5563 to see if it fits. */
5564 offset *= tree_to_uhwi (unit_size);
5565 offset *= BITS_PER_UNIT;
5567 base = TREE_OPERAND (t, 0);
5568 ctor = get_base_constructor (base, &offset, valueize);
5569 /* Empty constructor. Always fold to 0. */
5570 if (ctor == error_mark_node)
5571 return build_zero_cst (TREE_TYPE (t));
5572 /* Out of bound array access. Value is undefined,
5573 but don't fold. */
5574 if (offset < 0)
5575 return NULL_TREE;
5576 /* We can not determine ctor. */
5577 if (!ctor)
5578 return NULL_TREE;
5579 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5580 tree_to_uhwi (unit_size)
5581 * BITS_PER_UNIT,
5582 base);
5586 /* Fallthru. */
5588 case COMPONENT_REF:
5589 case BIT_FIELD_REF:
5590 case TARGET_MEM_REF:
5591 case MEM_REF:
5592 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5593 ctor = get_base_constructor (base, &offset, valueize);
5595 /* Empty constructor. Always fold to 0. */
5596 if (ctor == error_mark_node)
5597 return build_zero_cst (TREE_TYPE (t));
5598 /* We do not know precise address. */
5599 if (max_size == -1 || max_size != size)
5600 return NULL_TREE;
5601 /* We can not determine ctor. */
5602 if (!ctor)
5603 return NULL_TREE;
5605 /* Out of bound array access. Value is undefined, but don't fold. */
5606 if (offset < 0)
5607 return NULL_TREE;
5609 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5610 base);
5612 case REALPART_EXPR:
5613 case IMAGPART_EXPR:
5615 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5616 if (c && TREE_CODE (c) == COMPLEX_CST)
5617 return fold_build1_loc (EXPR_LOCATION (t),
5618 TREE_CODE (t), TREE_TYPE (t), c);
5619 break;
5622 default:
5623 break;
5626 return NULL_TREE;
5629 tree
5630 fold_const_aggregate_ref (tree t)
5632 return fold_const_aggregate_ref_1 (t, NULL);
5635 /* Lookup virtual method with index TOKEN in a virtual table V
5636 at OFFSET.
5637 Set CAN_REFER if non-NULL to false if method
5638 is not referable or if the virtual table is ill-formed (such as rewriten
5639 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5641 tree
5642 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5643 tree v,
5644 unsigned HOST_WIDE_INT offset,
5645 bool *can_refer)
5647 tree vtable = v, init, fn;
5648 unsigned HOST_WIDE_INT size;
5649 unsigned HOST_WIDE_INT elt_size, access_index;
5650 tree domain_type;
5652 if (can_refer)
5653 *can_refer = true;
5655 /* First of all double check we have virtual table. */
5656 if (TREE_CODE (v) != VAR_DECL
5657 || !DECL_VIRTUAL_P (v))
5659 /* Pass down that we lost track of the target. */
5660 if (can_refer)
5661 *can_refer = false;
5662 return NULL_TREE;
5665 init = ctor_for_folding (v);
5667 /* The virtual tables should always be born with constructors
5668 and we always should assume that they are avaialble for
5669 folding. At the moment we do not stream them in all cases,
5670 but it should never happen that ctor seem unreachable. */
5671 gcc_assert (init);
5672 if (init == error_mark_node)
5674 gcc_assert (in_lto_p);
5675 /* Pass down that we lost track of the target. */
5676 if (can_refer)
5677 *can_refer = false;
5678 return NULL_TREE;
5680 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5681 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5682 offset *= BITS_PER_UNIT;
5683 offset += token * size;
5685 /* Lookup the value in the constructor that is assumed to be array.
5686 This is equivalent to
5687 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5688 offset, size, NULL);
5689 but in a constant time. We expect that frontend produced a simple
5690 array without indexed initializers. */
5692 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5693 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5694 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5695 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5697 access_index = offset / BITS_PER_UNIT / elt_size;
5698 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5700 /* This code makes an assumption that there are no
5701 indexed fileds produced by C++ FE, so we can directly index the array. */
5702 if (access_index < CONSTRUCTOR_NELTS (init))
5704 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5705 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5706 STRIP_NOPS (fn);
5708 else
5709 fn = NULL;
5711 /* For type inconsistent program we may end up looking up virtual method
5712 in virtual table that does not contain TOKEN entries. We may overrun
5713 the virtual table and pick up a constant or RTTI info pointer.
5714 In any case the call is undefined. */
5715 if (!fn
5716 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5717 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5718 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5719 else
5721 fn = TREE_OPERAND (fn, 0);
5723 /* When cgraph node is missing and function is not public, we cannot
5724 devirtualize. This can happen in WHOPR when the actual method
5725 ends up in other partition, because we found devirtualization
5726 possibility too late. */
5727 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5729 if (can_refer)
5731 *can_refer = false;
5732 return fn;
5734 return NULL_TREE;
5738 /* Make sure we create a cgraph node for functions we'll reference.
5739 They can be non-existent if the reference comes from an entry
5740 of an external vtable for example. */
5741 cgraph_node::get_create (fn);
5743 return fn;
5746 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5747 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5748 KNOWN_BINFO carries the binfo describing the true type of
5749 OBJ_TYPE_REF_OBJECT(REF).
5750 Set CAN_REFER if non-NULL to false if method
5751 is not referable or if the virtual table is ill-formed (such as rewriten
5752 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5754 tree
5755 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5756 bool *can_refer)
5758 unsigned HOST_WIDE_INT offset;
5759 tree v;
5761 v = BINFO_VTABLE (known_binfo);
5762 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5763 if (!v)
5764 return NULL_TREE;
5766 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5768 if (can_refer)
5769 *can_refer = false;
5770 return NULL_TREE;
5772 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5775 /* Return true iff VAL is a gimple expression that is known to be
5776 non-negative. Restricted to floating-point inputs. */
5778 bool
5779 gimple_val_nonnegative_real_p (tree val)
5781 gimple *def_stmt;
5783 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
5785 /* Use existing logic for non-gimple trees. */
5786 if (tree_expr_nonnegative_p (val))
5787 return true;
5789 if (TREE_CODE (val) != SSA_NAME)
5790 return false;
5792 /* Currently we look only at the immediately defining statement
5793 to make this determination, since recursion on defining
5794 statements of operands can lead to quadratic behavior in the
5795 worst case. This is expected to catch almost all occurrences
5796 in practice. It would be possible to implement limited-depth
5797 recursion if important cases are lost. Alternatively, passes
5798 that need this information (such as the pow/powi lowering code
5799 in the cse_sincos pass) could be revised to provide it through
5800 dataflow propagation. */
5802 def_stmt = SSA_NAME_DEF_STMT (val);
5804 if (is_gimple_assign (def_stmt))
5806 tree op0, op1;
5808 /* See fold-const.c:tree_expr_nonnegative_p for additional
5809 cases that could be handled with recursion. */
5811 switch (gimple_assign_rhs_code (def_stmt))
5813 case ABS_EXPR:
5814 /* Always true for floating-point operands. */
5815 return true;
5817 case MULT_EXPR:
5818 /* True if the two operands are identical (since we are
5819 restricted to floating-point inputs). */
5820 op0 = gimple_assign_rhs1 (def_stmt);
5821 op1 = gimple_assign_rhs2 (def_stmt);
5823 if (op0 == op1
5824 || operand_equal_p (op0, op1, 0))
5825 return true;
5827 default:
5828 return false;
5831 else if (is_gimple_call (def_stmt))
5833 tree fndecl = gimple_call_fndecl (def_stmt);
5834 if (fndecl
5835 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5837 tree arg1;
5839 switch (DECL_FUNCTION_CODE (fndecl))
5841 CASE_FLT_FN (BUILT_IN_ACOS):
5842 CASE_FLT_FN (BUILT_IN_ACOSH):
5843 CASE_FLT_FN (BUILT_IN_CABS):
5844 CASE_FLT_FN (BUILT_IN_COSH):
5845 CASE_FLT_FN (BUILT_IN_ERFC):
5846 CASE_FLT_FN (BUILT_IN_EXP):
5847 CASE_FLT_FN (BUILT_IN_EXP10):
5848 CASE_FLT_FN (BUILT_IN_EXP2):
5849 CASE_FLT_FN (BUILT_IN_FABS):
5850 CASE_FLT_FN (BUILT_IN_FDIM):
5851 CASE_FLT_FN (BUILT_IN_HYPOT):
5852 CASE_FLT_FN (BUILT_IN_POW10):
5853 return true;
5855 CASE_FLT_FN (BUILT_IN_SQRT):
5856 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5857 nonnegative inputs. */
5858 if (!HONOR_SIGNED_ZEROS (val))
5859 return true;
5861 break;
5863 CASE_FLT_FN (BUILT_IN_POWI):
5864 /* True if the second argument is an even integer. */
5865 arg1 = gimple_call_arg (def_stmt, 1);
5867 if (TREE_CODE (arg1) == INTEGER_CST
5868 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
5869 return true;
5871 break;
5873 CASE_FLT_FN (BUILT_IN_POW):
5874 /* True if the second argument is an even integer-valued
5875 real. */
5876 arg1 = gimple_call_arg (def_stmt, 1);
5878 if (TREE_CODE (arg1) == REAL_CST)
5880 REAL_VALUE_TYPE c;
5881 HOST_WIDE_INT n;
5883 c = TREE_REAL_CST (arg1);
5884 n = real_to_integer (&c);
5886 if ((n & 1) == 0)
5888 REAL_VALUE_TYPE cint;
5889 real_from_integer (&cint, VOIDmode, n, SIGNED);
5890 if (real_identical (&c, &cint))
5891 return true;
5895 break;
5897 default:
5898 return false;
5903 return false;
5906 /* Given a pointer value OP0, return a simplified version of an
5907 indirection through OP0, or NULL_TREE if no simplification is
5908 possible. Note that the resulting type may be different from
5909 the type pointed to in the sense that it is still compatible
5910 from the langhooks point of view. */
5912 tree
5913 gimple_fold_indirect_ref (tree t)
5915 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5916 tree sub = t;
5917 tree subtype;
5919 STRIP_NOPS (sub);
5920 subtype = TREE_TYPE (sub);
5921 if (!POINTER_TYPE_P (subtype))
5922 return NULL_TREE;
5924 if (TREE_CODE (sub) == ADDR_EXPR)
5926 tree op = TREE_OPERAND (sub, 0);
5927 tree optype = TREE_TYPE (op);
5928 /* *&p => p */
5929 if (useless_type_conversion_p (type, optype))
5930 return op;
5932 /* *(foo *)&fooarray => fooarray[0] */
5933 if (TREE_CODE (optype) == ARRAY_TYPE
5934 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5935 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5937 tree type_domain = TYPE_DOMAIN (optype);
5938 tree min_val = size_zero_node;
5939 if (type_domain && TYPE_MIN_VALUE (type_domain))
5940 min_val = TYPE_MIN_VALUE (type_domain);
5941 if (TREE_CODE (min_val) == INTEGER_CST)
5942 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5944 /* *(foo *)&complexfoo => __real__ complexfoo */
5945 else if (TREE_CODE (optype) == COMPLEX_TYPE
5946 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5947 return fold_build1 (REALPART_EXPR, type, op);
5948 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5949 else if (TREE_CODE (optype) == VECTOR_TYPE
5950 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5952 tree part_width = TYPE_SIZE (type);
5953 tree index = bitsize_int (0);
5954 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5958 /* *(p + CST) -> ... */
5959 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5960 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5962 tree addr = TREE_OPERAND (sub, 0);
5963 tree off = TREE_OPERAND (sub, 1);
5964 tree addrtype;
5966 STRIP_NOPS (addr);
5967 addrtype = TREE_TYPE (addr);
5969 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5970 if (TREE_CODE (addr) == ADDR_EXPR
5971 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5972 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5973 && tree_fits_uhwi_p (off))
5975 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5976 tree part_width = TYPE_SIZE (type);
5977 unsigned HOST_WIDE_INT part_widthi
5978 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5979 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5980 tree index = bitsize_int (indexi);
5981 if (offset / part_widthi
5982 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5983 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5984 part_width, index);
5987 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5988 if (TREE_CODE (addr) == ADDR_EXPR
5989 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5990 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5992 tree size = TYPE_SIZE_UNIT (type);
5993 if (tree_int_cst_equal (size, off))
5994 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5997 /* *(p + CST) -> MEM_REF <p, CST>. */
5998 if (TREE_CODE (addr) != ADDR_EXPR
5999 || DECL_P (TREE_OPERAND (addr, 0)))
6000 return fold_build2 (MEM_REF, type,
6001 addr,
6002 wide_int_to_tree (ptype, off));
6005 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6006 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6007 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6008 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6010 tree type_domain;
6011 tree min_val = size_zero_node;
6012 tree osub = sub;
6013 sub = gimple_fold_indirect_ref (sub);
6014 if (! sub)
6015 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6016 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6017 if (type_domain && TYPE_MIN_VALUE (type_domain))
6018 min_val = TYPE_MIN_VALUE (type_domain);
6019 if (TREE_CODE (min_val) == INTEGER_CST)
6020 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6023 return NULL_TREE;
6026 /* Return true if CODE is an operation that when operating on signed
6027 integer types involves undefined behavior on overflow and the
6028 operation can be expressed with unsigned arithmetic. */
6030 bool
6031 arith_code_with_undefined_signed_overflow (tree_code code)
6033 switch (code)
6035 case PLUS_EXPR:
6036 case MINUS_EXPR:
6037 case MULT_EXPR:
6038 case NEGATE_EXPR:
6039 case POINTER_PLUS_EXPR:
6040 return true;
6041 default:
6042 return false;
6046 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6047 operation that can be transformed to unsigned arithmetic by converting
6048 its operand, carrying out the operation in the corresponding unsigned
6049 type and converting the result back to the original type.
6051 Returns a sequence of statements that replace STMT and also contain
6052 a modified form of STMT itself. */
6054 gimple_seq
6055 rewrite_to_defined_overflow (gimple *stmt)
6057 if (dump_file && (dump_flags & TDF_DETAILS))
6059 fprintf (dump_file, "rewriting stmt with undefined signed "
6060 "overflow ");
6061 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6064 tree lhs = gimple_assign_lhs (stmt);
6065 tree type = unsigned_type_for (TREE_TYPE (lhs));
6066 gimple_seq stmts = NULL;
6067 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6069 gimple_seq stmts2 = NULL;
6070 gimple_set_op (stmt, i,
6071 force_gimple_operand (fold_convert (type,
6072 gimple_op (stmt, i)),
6073 &stmts2, true, NULL_TREE));
6074 gimple_seq_add_seq (&stmts, stmts2);
6076 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6077 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6078 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6079 gimple_seq_add_stmt (&stmts, stmt);
6080 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6081 gimple_seq_add_stmt (&stmts, cvt);
6083 return stmts;
6087 /* The valueization hook we use for the gimple_build API simplification.
6088 This makes us match fold_buildN behavior by only combining with
6089 statements in the sequence(s) we are currently building. */
6091 static tree
6092 gimple_build_valueize (tree op)
6094 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6095 return op;
6096 return NULL_TREE;
6099 /* Build the expression CODE OP0 of type TYPE with location LOC,
6100 simplifying it first if possible. Returns the built
6101 expression value and appends statements possibly defining it
6102 to SEQ. */
6104 tree
6105 gimple_build (gimple_seq *seq, location_t loc,
6106 enum tree_code code, tree type, tree op0)
6108 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
6109 if (!res)
6111 if (gimple_in_ssa_p (cfun))
6112 res = make_ssa_name (type);
6113 else
6114 res = create_tmp_reg (type);
6115 gimple *stmt;
6116 if (code == REALPART_EXPR
6117 || code == IMAGPART_EXPR
6118 || code == VIEW_CONVERT_EXPR)
6119 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6120 else
6121 stmt = gimple_build_assign (res, code, op0);
6122 gimple_set_location (stmt, loc);
6123 gimple_seq_add_stmt_without_update (seq, stmt);
6125 return res;
6128 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6129 simplifying it first if possible. Returns the built
6130 expression value and appends statements possibly defining it
6131 to SEQ. */
6133 tree
6134 gimple_build (gimple_seq *seq, location_t loc,
6135 enum tree_code code, tree type, tree op0, tree op1)
6137 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6138 if (!res)
6140 if (gimple_in_ssa_p (cfun))
6141 res = make_ssa_name (type);
6142 else
6143 res = create_tmp_reg (type);
6144 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6145 gimple_set_location (stmt, loc);
6146 gimple_seq_add_stmt_without_update (seq, stmt);
6148 return res;
6151 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6152 simplifying it first if possible. Returns the built
6153 expression value and appends statements possibly defining it
6154 to SEQ. */
6156 tree
6157 gimple_build (gimple_seq *seq, location_t loc,
6158 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6160 tree res = gimple_simplify (code, type, op0, op1, op2,
6161 seq, gimple_build_valueize);
6162 if (!res)
6164 if (gimple_in_ssa_p (cfun))
6165 res = make_ssa_name (type);
6166 else
6167 res = create_tmp_reg (type);
6168 gimple *stmt;
6169 if (code == BIT_FIELD_REF)
6170 stmt = gimple_build_assign (res, code,
6171 build3 (code, type, op0, op1, op2));
6172 else
6173 stmt = gimple_build_assign (res, code, op0, op1, op2);
6174 gimple_set_location (stmt, loc);
6175 gimple_seq_add_stmt_without_update (seq, stmt);
6177 return res;
6180 /* Build the call FN (ARG0) with a result of type TYPE
6181 (or no result if TYPE is void) with location LOC,
6182 simplifying it first if possible. Returns the built
6183 expression value (or NULL_TREE if TYPE is void) and appends
6184 statements possibly defining it to SEQ. */
6186 tree
6187 gimple_build (gimple_seq *seq, location_t loc,
6188 enum built_in_function fn, tree type, tree arg0)
6190 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6191 if (!res)
6193 tree decl = builtin_decl_implicit (fn);
6194 gimple *stmt = gimple_build_call (decl, 1, arg0);
6195 if (!VOID_TYPE_P (type))
6197 if (gimple_in_ssa_p (cfun))
6198 res = make_ssa_name (type);
6199 else
6200 res = create_tmp_reg (type);
6201 gimple_call_set_lhs (stmt, res);
6203 gimple_set_location (stmt, loc);
6204 gimple_seq_add_stmt_without_update (seq, stmt);
6206 return res;
6209 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6210 (or no result if TYPE is void) with location LOC,
6211 simplifying it first if possible. Returns the built
6212 expression value (or NULL_TREE if TYPE is void) and appends
6213 statements possibly defining it to SEQ. */
6215 tree
6216 gimple_build (gimple_seq *seq, location_t loc,
6217 enum built_in_function fn, tree type, tree arg0, tree arg1)
6219 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6220 if (!res)
6222 tree decl = builtin_decl_implicit (fn);
6223 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6224 if (!VOID_TYPE_P (type))
6226 if (gimple_in_ssa_p (cfun))
6227 res = make_ssa_name (type);
6228 else
6229 res = create_tmp_reg (type);
6230 gimple_call_set_lhs (stmt, res);
6232 gimple_set_location (stmt, loc);
6233 gimple_seq_add_stmt_without_update (seq, stmt);
6235 return res;
6238 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6239 (or no result if TYPE is void) with location LOC,
6240 simplifying it first if possible. Returns the built
6241 expression value (or NULL_TREE if TYPE is void) and appends
6242 statements possibly defining it to SEQ. */
6244 tree
6245 gimple_build (gimple_seq *seq, location_t loc,
6246 enum built_in_function fn, tree type,
6247 tree arg0, tree arg1, tree arg2)
6249 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6250 seq, gimple_build_valueize);
6251 if (!res)
6253 tree decl = builtin_decl_implicit (fn);
6254 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6255 if (!VOID_TYPE_P (type))
6257 if (gimple_in_ssa_p (cfun))
6258 res = make_ssa_name (type);
6259 else
6260 res = create_tmp_reg (type);
6261 gimple_call_set_lhs (stmt, res);
6263 gimple_set_location (stmt, loc);
6264 gimple_seq_add_stmt_without_update (seq, stmt);
6266 return res;
6269 /* Build the conversion (TYPE) OP with a result of type TYPE
6270 with location LOC if such conversion is neccesary in GIMPLE,
6271 simplifying it first.
6272 Returns the built expression value and appends
6273 statements possibly defining it to SEQ. */
6275 tree
6276 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6278 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6279 return op;
6280 return gimple_build (seq, loc, NOP_EXPR, type, op);