Clean up some minor white space issues in trans-decl.c and trans-expr.c
[official-gcc.git] / gcc / gimple-fold.c
blob70871cfcf27a6fcf5557a81644c7569d7e1d668b
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2016 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stmt.h"
35 #include "expr.h"
36 #include "stor-layout.h"
37 #include "dumpfile.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "dbgcnt.h"
50 #include "builtins.h"
51 #include "tree-eh.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
55 #include "omp-low.h"
56 #include "ipa-chkp.h"
59 /* Return true when DECL can be referenced from current unit.
60 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
61 We can get declarations that are not possible to reference for various
62 reasons:
64 1) When analyzing C++ virtual tables.
65 C++ virtual tables do have known constructors even
66 when they are keyed to other compilation unit.
67 Those tables can contain pointers to methods and vars
68 in other units. Those methods have both STATIC and EXTERNAL
69 set.
70 2) In WHOPR mode devirtualization might lead to reference
71 to method that was partitioned elsehwere.
72 In this case we have static VAR_DECL or FUNCTION_DECL
73 that has no corresponding callgraph/varpool node
74 declaring the body.
75 3) COMDAT functions referred by external vtables that
76 we devirtualize only during final compilation stage.
77 At this time we already decided that we will not output
78 the function body and thus we can't reference the symbol
79 directly. */
81 static bool
82 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
84 varpool_node *vnode;
85 struct cgraph_node *node;
86 symtab_node *snode;
88 if (DECL_ABSTRACT_P (decl))
89 return false;
91 /* We are concerned only about static/external vars and functions. */
92 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
93 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
94 return true;
96 /* Static objects can be referred only if they was not optimized out yet. */
97 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
99 /* Before we start optimizing unreachable code we can be sure all
100 static objects are defined. */
101 if (symtab->function_flags_ready)
102 return true;
103 snode = symtab_node::get (decl);
104 if (!snode || !snode->definition)
105 return false;
106 node = dyn_cast <cgraph_node *> (snode);
107 return !node || !node->global.inlined_to;
110 /* We will later output the initializer, so we can refer to it.
111 So we are concerned only when DECL comes from initializer of
112 external var or var that has been optimized out. */
113 if (!from_decl
114 || TREE_CODE (from_decl) != VAR_DECL
115 || (!DECL_EXTERNAL (from_decl)
116 && (vnode = varpool_node::get (from_decl)) != NULL
117 && vnode->definition)
118 || (flag_ltrans
119 && (vnode = varpool_node::get (from_decl)) != NULL
120 && vnode->in_other_partition))
121 return true;
122 /* We are folding reference from external vtable. The vtable may reffer
123 to a symbol keyed to other compilation unit. The other compilation
124 unit may be in separate DSO and the symbol may be hidden. */
125 if (DECL_VISIBILITY_SPECIFIED (decl)
126 && DECL_EXTERNAL (decl)
127 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
128 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
129 return false;
130 /* When function is public, we always can introduce new reference.
131 Exception are the COMDAT functions where introducing a direct
132 reference imply need to include function body in the curren tunit. */
133 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
134 return true;
135 /* We have COMDAT. We are going to check if we still have definition
136 or if the definition is going to be output in other partition.
137 Bypass this when gimplifying; all needed functions will be produced.
139 As observed in PR20991 for already optimized out comdat virtual functions
140 it may be tempting to not necessarily give up because the copy will be
141 output elsewhere when corresponding vtable is output.
142 This is however not possible - ABI specify that COMDATs are output in
143 units where they are used and when the other unit was compiled with LTO
144 it is possible that vtable was kept public while the function itself
145 was privatized. */
146 if (!symtab->function_flags_ready)
147 return true;
149 snode = symtab_node::get (decl);
150 if (!snode
151 || ((!snode->definition || DECL_EXTERNAL (decl))
152 && (!snode->in_other_partition
153 || (!snode->forced_by_abi && !snode->force_output))))
154 return false;
155 node = dyn_cast <cgraph_node *> (snode);
156 return !node || !node->global.inlined_to;
159 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
160 acceptable form for is_gimple_min_invariant.
161 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
163 tree
164 canonicalize_constructor_val (tree cval, tree from_decl)
166 tree orig_cval = cval;
167 STRIP_NOPS (cval);
168 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
169 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
171 tree ptr = TREE_OPERAND (cval, 0);
172 if (is_gimple_min_invariant (ptr))
173 cval = build1_loc (EXPR_LOCATION (cval),
174 ADDR_EXPR, TREE_TYPE (ptr),
175 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
176 ptr,
177 fold_convert (ptr_type_node,
178 TREE_OPERAND (cval, 1))));
180 if (TREE_CODE (cval) == ADDR_EXPR)
182 tree base = NULL_TREE;
183 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
185 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
186 if (base)
187 TREE_OPERAND (cval, 0) = base;
189 else
190 base = get_base_address (TREE_OPERAND (cval, 0));
191 if (!base)
192 return NULL_TREE;
194 if ((TREE_CODE (base) == VAR_DECL
195 || TREE_CODE (base) == FUNCTION_DECL)
196 && !can_refer_decl_in_current_unit_p (base, from_decl))
197 return NULL_TREE;
198 if (TREE_CODE (base) == VAR_DECL)
199 TREE_ADDRESSABLE (base) = 1;
200 else if (TREE_CODE (base) == FUNCTION_DECL)
202 /* Make sure we create a cgraph node for functions we'll reference.
203 They can be non-existent if the reference comes from an entry
204 of an external vtable for example. */
205 cgraph_node::get_create (base);
207 /* Fixup types in global initializers. */
208 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
209 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
211 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
212 cval = fold_convert (TREE_TYPE (orig_cval), cval);
213 return cval;
215 if (TREE_OVERFLOW_P (cval))
216 return drop_tree_overflow (cval);
217 return orig_cval;
220 /* If SYM is a constant variable with known value, return the value.
221 NULL_TREE is returned otherwise. */
223 tree
224 get_symbol_constant_value (tree sym)
226 tree val = ctor_for_folding (sym);
227 if (val != error_mark_node)
229 if (val)
231 val = canonicalize_constructor_val (unshare_expr (val), sym);
232 if (val && is_gimple_min_invariant (val))
233 return val;
234 else
235 return NULL_TREE;
237 /* Variables declared 'const' without an initializer
238 have zero as the initializer if they may not be
239 overridden at link or run time. */
240 if (!val
241 && is_gimple_reg_type (TREE_TYPE (sym)))
242 return build_zero_cst (TREE_TYPE (sym));
245 return NULL_TREE;
250 /* Subroutine of fold_stmt. We perform several simplifications of the
251 memory reference tree EXPR and make sure to re-gimplify them properly
252 after propagation of constant addresses. IS_LHS is true if the
253 reference is supposed to be an lvalue. */
255 static tree
256 maybe_fold_reference (tree expr, bool is_lhs)
258 tree result;
260 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
261 || TREE_CODE (expr) == REALPART_EXPR
262 || TREE_CODE (expr) == IMAGPART_EXPR)
263 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
264 return fold_unary_loc (EXPR_LOCATION (expr),
265 TREE_CODE (expr),
266 TREE_TYPE (expr),
267 TREE_OPERAND (expr, 0));
268 else if (TREE_CODE (expr) == BIT_FIELD_REF
269 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
270 return fold_ternary_loc (EXPR_LOCATION (expr),
271 TREE_CODE (expr),
272 TREE_TYPE (expr),
273 TREE_OPERAND (expr, 0),
274 TREE_OPERAND (expr, 1),
275 TREE_OPERAND (expr, 2));
277 if (!is_lhs
278 && (result = fold_const_aggregate_ref (expr))
279 && is_gimple_min_invariant (result))
280 return result;
282 return NULL_TREE;
286 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
287 replacement rhs for the statement or NULL_TREE if no simplification
288 could be made. It is assumed that the operands have been previously
289 folded. */
291 static tree
292 fold_gimple_assign (gimple_stmt_iterator *si)
294 gimple *stmt = gsi_stmt (*si);
295 enum tree_code subcode = gimple_assign_rhs_code (stmt);
296 location_t loc = gimple_location (stmt);
298 tree result = NULL_TREE;
300 switch (get_gimple_rhs_class (subcode))
302 case GIMPLE_SINGLE_RHS:
304 tree rhs = gimple_assign_rhs1 (stmt);
306 if (TREE_CLOBBER_P (rhs))
307 return NULL_TREE;
309 if (REFERENCE_CLASS_P (rhs))
310 return maybe_fold_reference (rhs, false);
312 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
314 tree val = OBJ_TYPE_REF_EXPR (rhs);
315 if (is_gimple_min_invariant (val))
316 return val;
317 else if (flag_devirtualize && virtual_method_call_p (rhs))
319 bool final;
320 vec <cgraph_node *>targets
321 = possible_polymorphic_call_targets (rhs, stmt, &final);
322 if (final && targets.length () <= 1 && dbg_cnt (devirt))
324 if (dump_enabled_p ())
326 location_t loc = gimple_location_safe (stmt);
327 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
328 "resolving virtual function address "
329 "reference to function %s\n",
330 targets.length () == 1
331 ? targets[0]->name ()
332 : "NULL");
334 if (targets.length () == 1)
336 val = fold_convert (TREE_TYPE (val),
337 build_fold_addr_expr_loc
338 (loc, targets[0]->decl));
339 STRIP_USELESS_TYPE_CONVERSION (val);
341 else
342 /* We can not use __builtin_unreachable here because it
343 can not have address taken. */
344 val = build_int_cst (TREE_TYPE (val), 0);
345 return val;
350 else if (TREE_CODE (rhs) == ADDR_EXPR)
352 tree ref = TREE_OPERAND (rhs, 0);
353 tree tem = maybe_fold_reference (ref, true);
354 if (tem
355 && TREE_CODE (tem) == MEM_REF
356 && integer_zerop (TREE_OPERAND (tem, 1)))
357 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
358 else if (tem)
359 result = fold_convert (TREE_TYPE (rhs),
360 build_fold_addr_expr_loc (loc, tem));
361 else if (TREE_CODE (ref) == MEM_REF
362 && integer_zerop (TREE_OPERAND (ref, 1)))
363 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
365 if (result)
367 /* Strip away useless type conversions. Both the
368 NON_LVALUE_EXPR that may have been added by fold, and
369 "useless" type conversions that might now be apparent
370 due to propagation. */
371 STRIP_USELESS_TYPE_CONVERSION (result);
373 if (result != rhs && valid_gimple_rhs_p (result))
374 return result;
378 else if (TREE_CODE (rhs) == CONSTRUCTOR
379 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
381 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
382 unsigned i;
383 tree val;
385 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
386 if (! CONSTANT_CLASS_P (val))
387 return NULL_TREE;
389 return build_vector_from_ctor (TREE_TYPE (rhs),
390 CONSTRUCTOR_ELTS (rhs));
393 else if (DECL_P (rhs))
394 return get_symbol_constant_value (rhs);
396 break;
398 case GIMPLE_UNARY_RHS:
399 break;
401 case GIMPLE_BINARY_RHS:
402 break;
404 case GIMPLE_TERNARY_RHS:
405 result = fold_ternary_loc (loc, subcode,
406 TREE_TYPE (gimple_assign_lhs (stmt)),
407 gimple_assign_rhs1 (stmt),
408 gimple_assign_rhs2 (stmt),
409 gimple_assign_rhs3 (stmt));
411 if (result)
413 STRIP_USELESS_TYPE_CONVERSION (result);
414 if (valid_gimple_rhs_p (result))
415 return result;
417 break;
419 case GIMPLE_INVALID_RHS:
420 gcc_unreachable ();
423 return NULL_TREE;
427 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
428 adjusting the replacement stmts location and virtual operands.
429 If the statement has a lhs the last stmt in the sequence is expected
430 to assign to that lhs. */
432 static void
433 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
435 gimple *stmt = gsi_stmt (*si_p);
437 if (gimple_has_location (stmt))
438 annotate_all_with_location (stmts, gimple_location (stmt));
440 /* First iterate over the replacement statements backward, assigning
441 virtual operands to their defining statements. */
442 gimple *laststore = NULL;
443 for (gimple_stmt_iterator i = gsi_last (stmts);
444 !gsi_end_p (i); gsi_prev (&i))
446 gimple *new_stmt = gsi_stmt (i);
447 if ((gimple_assign_single_p (new_stmt)
448 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
449 || (is_gimple_call (new_stmt)
450 && (gimple_call_flags (new_stmt)
451 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
453 tree vdef;
454 if (!laststore)
455 vdef = gimple_vdef (stmt);
456 else
457 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
458 gimple_set_vdef (new_stmt, vdef);
459 if (vdef && TREE_CODE (vdef) == SSA_NAME)
460 SSA_NAME_DEF_STMT (vdef) = new_stmt;
461 laststore = new_stmt;
465 /* Second iterate over the statements forward, assigning virtual
466 operands to their uses. */
467 tree reaching_vuse = gimple_vuse (stmt);
468 for (gimple_stmt_iterator i = gsi_start (stmts);
469 !gsi_end_p (i); gsi_next (&i))
471 gimple *new_stmt = gsi_stmt (i);
472 /* If the new statement possibly has a VUSE, update it with exact SSA
473 name we know will reach this one. */
474 if (gimple_has_mem_ops (new_stmt))
475 gimple_set_vuse (new_stmt, reaching_vuse);
476 gimple_set_modified (new_stmt, true);
477 if (gimple_vdef (new_stmt))
478 reaching_vuse = gimple_vdef (new_stmt);
481 /* If the new sequence does not do a store release the virtual
482 definition of the original statement. */
483 if (reaching_vuse
484 && reaching_vuse == gimple_vuse (stmt))
486 tree vdef = gimple_vdef (stmt);
487 if (vdef
488 && TREE_CODE (vdef) == SSA_NAME)
490 unlink_stmt_vdef (stmt);
491 release_ssa_name (vdef);
495 /* Finally replace the original statement with the sequence. */
496 gsi_replace_with_seq (si_p, stmts, false);
499 /* Convert EXPR into a GIMPLE value suitable for substitution on the
500 RHS of an assignment. Insert the necessary statements before
501 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
502 is replaced. If the call is expected to produces a result, then it
503 is replaced by an assignment of the new RHS to the result variable.
504 If the result is to be ignored, then the call is replaced by a
505 GIMPLE_NOP. A proper VDEF chain is retained by making the first
506 VUSE and the last VDEF of the whole sequence be the same as the replaced
507 statement and using new SSA names for stores in between. */
509 void
510 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
512 tree lhs;
513 gimple *stmt, *new_stmt;
514 gimple_stmt_iterator i;
515 gimple_seq stmts = NULL;
517 stmt = gsi_stmt (*si_p);
519 gcc_assert (is_gimple_call (stmt));
521 push_gimplify_context (gimple_in_ssa_p (cfun));
523 lhs = gimple_call_lhs (stmt);
524 if (lhs == NULL_TREE)
526 gimplify_and_add (expr, &stmts);
527 /* We can end up with folding a memcpy of an empty class assignment
528 which gets optimized away by C++ gimplification. */
529 if (gimple_seq_empty_p (stmts))
531 pop_gimplify_context (NULL);
532 if (gimple_in_ssa_p (cfun))
534 unlink_stmt_vdef (stmt);
535 release_defs (stmt);
537 gsi_replace (si_p, gimple_build_nop (), false);
538 return;
541 else
543 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
544 new_stmt = gimple_build_assign (lhs, tmp);
545 i = gsi_last (stmts);
546 gsi_insert_after_without_update (&i, new_stmt,
547 GSI_CONTINUE_LINKING);
550 pop_gimplify_context (NULL);
552 gsi_replace_with_seq_vops (si_p, stmts);
556 /* Replace the call at *GSI with the gimple value VAL. */
558 static void
559 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
561 gimple *stmt = gsi_stmt (*gsi);
562 tree lhs = gimple_call_lhs (stmt);
563 gimple *repl;
564 if (lhs)
566 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
567 val = fold_convert (TREE_TYPE (lhs), val);
568 repl = gimple_build_assign (lhs, val);
570 else
571 repl = gimple_build_nop ();
572 tree vdef = gimple_vdef (stmt);
573 if (vdef && TREE_CODE (vdef) == SSA_NAME)
575 unlink_stmt_vdef (stmt);
576 release_ssa_name (vdef);
578 gsi_replace (gsi, repl, false);
581 /* Replace the call at *GSI with the new call REPL and fold that
582 again. */
584 static void
585 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
587 gimple *stmt = gsi_stmt (*gsi);
588 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
589 gimple_set_location (repl, gimple_location (stmt));
590 if (gimple_vdef (stmt)
591 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
593 gimple_set_vdef (repl, gimple_vdef (stmt));
594 gimple_set_vuse (repl, gimple_vuse (stmt));
595 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
597 gsi_replace (gsi, repl, false);
598 fold_stmt (gsi);
601 /* Return true if VAR is a VAR_DECL or a component thereof. */
603 static bool
604 var_decl_component_p (tree var)
606 tree inner = var;
607 while (handled_component_p (inner))
608 inner = TREE_OPERAND (inner, 0);
609 return SSA_VAR_P (inner);
612 /* Fold function call to builtin mem{{,p}cpy,move}. Return
613 false if no simplification can be made.
614 If ENDP is 0, return DEST (like memcpy).
615 If ENDP is 1, return DEST+LEN (like mempcpy).
616 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
617 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
618 (memmove). */
620 static bool
621 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
622 tree dest, tree src, int endp)
624 gimple *stmt = gsi_stmt (*gsi);
625 tree lhs = gimple_call_lhs (stmt);
626 tree len = gimple_call_arg (stmt, 2);
627 tree destvar, srcvar;
628 location_t loc = gimple_location (stmt);
630 /* If the LEN parameter is zero, return DEST. */
631 if (integer_zerop (len))
633 gimple *repl;
634 if (gimple_call_lhs (stmt))
635 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
636 else
637 repl = gimple_build_nop ();
638 tree vdef = gimple_vdef (stmt);
639 if (vdef && TREE_CODE (vdef) == SSA_NAME)
641 unlink_stmt_vdef (stmt);
642 release_ssa_name (vdef);
644 gsi_replace (gsi, repl, false);
645 return true;
648 /* If SRC and DEST are the same (and not volatile), return
649 DEST{,+LEN,+LEN-1}. */
650 if (operand_equal_p (src, dest, 0))
652 unlink_stmt_vdef (stmt);
653 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
654 release_ssa_name (gimple_vdef (stmt));
655 if (!lhs)
657 gsi_replace (gsi, gimple_build_nop (), false);
658 return true;
660 goto done;
662 else
664 tree srctype, desttype;
665 unsigned int src_align, dest_align;
666 tree off0;
668 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
669 pointers as wide integer) and also may result in huge function
670 size because of inlined bounds copy. Thus don't inline for
671 functions we want to instrument. */
672 if (flag_check_pointer_bounds
673 && chkp_instrumentable_p (cfun->decl)
674 /* Even if data may contain pointers we can inline if copy
675 less than a pointer size. */
676 && (!tree_fits_uhwi_p (len)
677 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
678 return false;
680 /* Build accesses at offset zero with a ref-all character type. */
681 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
682 ptr_mode, true), 0);
684 /* If we can perform the copy efficiently with first doing all loads
685 and then all stores inline it that way. Currently efficiently
686 means that we can load all the memory into a single integer
687 register which is what MOVE_MAX gives us. */
688 src_align = get_pointer_alignment (src);
689 dest_align = get_pointer_alignment (dest);
690 if (tree_fits_uhwi_p (len)
691 && compare_tree_int (len, MOVE_MAX) <= 0
692 /* ??? Don't transform copies from strings with known length this
693 confuses the tree-ssa-strlen.c. This doesn't handle
694 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
695 reason. */
696 && !c_strlen (src, 2))
698 unsigned ilen = tree_to_uhwi (len);
699 if (exact_log2 (ilen) != -1)
701 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
702 if (type
703 && TYPE_MODE (type) != BLKmode
704 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
705 == ilen * 8)
706 /* If the destination pointer is not aligned we must be able
707 to emit an unaligned store. */
708 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
709 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
710 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
711 != CODE_FOR_nothing)))
713 tree srctype = type;
714 tree desttype = type;
715 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
716 srctype = build_aligned_type (type, src_align);
717 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
718 tree tem = fold_const_aggregate_ref (srcmem);
719 if (tem)
720 srcmem = tem;
721 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
722 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
723 src_align)
724 && (optab_handler (movmisalign_optab,
725 TYPE_MODE (type))
726 == CODE_FOR_nothing))
727 srcmem = NULL_TREE;
728 if (srcmem)
730 gimple *new_stmt;
731 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
733 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
734 if (gimple_in_ssa_p (cfun))
735 srcmem = make_ssa_name (TREE_TYPE (srcmem),
736 new_stmt);
737 else
738 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
739 gimple_assign_set_lhs (new_stmt, srcmem);
740 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
741 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
743 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
744 desttype = build_aligned_type (type, dest_align);
745 new_stmt
746 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
747 dest, off0),
748 srcmem);
749 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
750 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
751 if (gimple_vdef (new_stmt)
752 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
753 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
754 if (!lhs)
756 gsi_replace (gsi, new_stmt, false);
757 return true;
759 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
760 goto done;
766 if (endp == 3)
768 /* Both DEST and SRC must be pointer types.
769 ??? This is what old code did. Is the testing for pointer types
770 really mandatory?
772 If either SRC is readonly or length is 1, we can use memcpy. */
773 if (!dest_align || !src_align)
774 return false;
775 if (readonly_data_expr (src)
776 || (tree_fits_uhwi_p (len)
777 && (MIN (src_align, dest_align) / BITS_PER_UNIT
778 >= tree_to_uhwi (len))))
780 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
781 if (!fn)
782 return false;
783 gimple_call_set_fndecl (stmt, fn);
784 gimple_call_set_arg (stmt, 0, dest);
785 gimple_call_set_arg (stmt, 1, src);
786 fold_stmt (gsi);
787 return true;
790 /* If *src and *dest can't overlap, optimize into memcpy as well. */
791 if (TREE_CODE (src) == ADDR_EXPR
792 && TREE_CODE (dest) == ADDR_EXPR)
794 tree src_base, dest_base, fn;
795 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
796 HOST_WIDE_INT size = -1;
797 HOST_WIDE_INT maxsize = -1;
798 bool reverse;
800 srcvar = TREE_OPERAND (src, 0);
801 src_base = get_ref_base_and_extent (srcvar, &src_offset,
802 &size, &maxsize, &reverse);
803 destvar = TREE_OPERAND (dest, 0);
804 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
805 &size, &maxsize, &reverse);
806 if (tree_fits_uhwi_p (len))
807 maxsize = tree_to_uhwi (len);
808 else
809 maxsize = -1;
810 src_offset /= BITS_PER_UNIT;
811 dest_offset /= BITS_PER_UNIT;
812 if (SSA_VAR_P (src_base)
813 && SSA_VAR_P (dest_base))
815 if (operand_equal_p (src_base, dest_base, 0)
816 && ranges_overlap_p (src_offset, maxsize,
817 dest_offset, maxsize))
818 return false;
820 else if (TREE_CODE (src_base) == MEM_REF
821 && TREE_CODE (dest_base) == MEM_REF)
823 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
824 TREE_OPERAND (dest_base, 0), 0))
825 return false;
826 offset_int off = mem_ref_offset (src_base) + src_offset;
827 if (!wi::fits_shwi_p (off))
828 return false;
829 src_offset = off.to_shwi ();
831 off = mem_ref_offset (dest_base) + dest_offset;
832 if (!wi::fits_shwi_p (off))
833 return false;
834 dest_offset = off.to_shwi ();
835 if (ranges_overlap_p (src_offset, maxsize,
836 dest_offset, maxsize))
837 return false;
839 else
840 return false;
842 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
843 if (!fn)
844 return false;
845 gimple_call_set_fndecl (stmt, fn);
846 gimple_call_set_arg (stmt, 0, dest);
847 gimple_call_set_arg (stmt, 1, src);
848 fold_stmt (gsi);
849 return true;
852 /* If the destination and source do not alias optimize into
853 memcpy as well. */
854 if ((is_gimple_min_invariant (dest)
855 || TREE_CODE (dest) == SSA_NAME)
856 && (is_gimple_min_invariant (src)
857 || TREE_CODE (src) == SSA_NAME))
859 ao_ref destr, srcr;
860 ao_ref_init_from_ptr_and_size (&destr, dest, len);
861 ao_ref_init_from_ptr_and_size (&srcr, src, len);
862 if (!refs_may_alias_p_1 (&destr, &srcr, false))
864 tree fn;
865 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
866 if (!fn)
867 return false;
868 gimple_call_set_fndecl (stmt, fn);
869 gimple_call_set_arg (stmt, 0, dest);
870 gimple_call_set_arg (stmt, 1, src);
871 fold_stmt (gsi);
872 return true;
876 return false;
879 if (!tree_fits_shwi_p (len))
880 return false;
881 /* FIXME:
882 This logic lose for arguments like (type *)malloc (sizeof (type)),
883 since we strip the casts of up to VOID return value from malloc.
884 Perhaps we ought to inherit type from non-VOID argument here? */
885 STRIP_NOPS (src);
886 STRIP_NOPS (dest);
887 if (!POINTER_TYPE_P (TREE_TYPE (src))
888 || !POINTER_TYPE_P (TREE_TYPE (dest)))
889 return false;
890 /* In the following try to find a type that is most natural to be
891 used for the memcpy source and destination and that allows
892 the most optimization when memcpy is turned into a plain assignment
893 using that type. In theory we could always use a char[len] type
894 but that only gains us that the destination and source possibly
895 no longer will have their address taken. */
896 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
897 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
899 tree tem = TREE_OPERAND (src, 0);
900 STRIP_NOPS (tem);
901 if (tem != TREE_OPERAND (src, 0))
902 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
904 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
906 tree tem = TREE_OPERAND (dest, 0);
907 STRIP_NOPS (tem);
908 if (tem != TREE_OPERAND (dest, 0))
909 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
911 srctype = TREE_TYPE (TREE_TYPE (src));
912 if (TREE_CODE (srctype) == ARRAY_TYPE
913 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
915 srctype = TREE_TYPE (srctype);
916 STRIP_NOPS (src);
917 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
919 desttype = TREE_TYPE (TREE_TYPE (dest));
920 if (TREE_CODE (desttype) == ARRAY_TYPE
921 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
923 desttype = TREE_TYPE (desttype);
924 STRIP_NOPS (dest);
925 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
927 if (TREE_ADDRESSABLE (srctype)
928 || TREE_ADDRESSABLE (desttype))
929 return false;
931 /* Make sure we are not copying using a floating-point mode or
932 a type whose size possibly does not match its precision. */
933 if (FLOAT_MODE_P (TYPE_MODE (desttype))
934 || TREE_CODE (desttype) == BOOLEAN_TYPE
935 || TREE_CODE (desttype) == ENUMERAL_TYPE)
936 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
937 if (FLOAT_MODE_P (TYPE_MODE (srctype))
938 || TREE_CODE (srctype) == BOOLEAN_TYPE
939 || TREE_CODE (srctype) == ENUMERAL_TYPE)
940 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
941 if (!srctype)
942 srctype = desttype;
943 if (!desttype)
944 desttype = srctype;
945 if (!srctype)
946 return false;
948 src_align = get_pointer_alignment (src);
949 dest_align = get_pointer_alignment (dest);
950 if (dest_align < TYPE_ALIGN (desttype)
951 || src_align < TYPE_ALIGN (srctype))
952 return false;
954 destvar = dest;
955 STRIP_NOPS (destvar);
956 if (TREE_CODE (destvar) == ADDR_EXPR
957 && var_decl_component_p (TREE_OPERAND (destvar, 0))
958 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
959 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
960 else
961 destvar = NULL_TREE;
963 srcvar = src;
964 STRIP_NOPS (srcvar);
965 if (TREE_CODE (srcvar) == ADDR_EXPR
966 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
967 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
969 if (!destvar
970 || src_align >= TYPE_ALIGN (desttype))
971 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
972 srcvar, off0);
973 else if (!STRICT_ALIGNMENT)
975 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
976 src_align);
977 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
979 else
980 srcvar = NULL_TREE;
982 else
983 srcvar = NULL_TREE;
985 if (srcvar == NULL_TREE && destvar == NULL_TREE)
986 return false;
988 if (srcvar == NULL_TREE)
990 STRIP_NOPS (src);
991 if (src_align >= TYPE_ALIGN (desttype))
992 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
993 else
995 if (STRICT_ALIGNMENT)
996 return false;
997 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
998 src_align);
999 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1002 else if (destvar == NULL_TREE)
1004 STRIP_NOPS (dest);
1005 if (dest_align >= TYPE_ALIGN (srctype))
1006 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1007 else
1009 if (STRICT_ALIGNMENT)
1010 return false;
1011 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1012 dest_align);
1013 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1017 gimple *new_stmt;
1018 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1020 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1021 if (gimple_in_ssa_p (cfun))
1022 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1023 else
1024 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1025 gimple_assign_set_lhs (new_stmt, srcvar);
1026 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1027 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1029 new_stmt = gimple_build_assign (destvar, srcvar);
1030 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1031 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1032 if (gimple_vdef (new_stmt)
1033 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1034 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1035 if (!lhs)
1037 gsi_replace (gsi, new_stmt, false);
1038 return true;
1040 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1043 done:
1044 gimple_seq stmts = NULL;
1045 if (endp == 0 || endp == 3)
1046 len = NULL_TREE;
1047 else if (endp == 2)
1048 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1049 ssize_int (1));
1050 if (endp == 2 || endp == 1)
1052 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1053 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1054 TREE_TYPE (dest), dest, len);
1057 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1058 gimple *repl = gimple_build_assign (lhs, dest);
1059 gsi_replace (gsi, repl, false);
1060 return true;
1063 /* Fold function call to builtin memset or bzero at *GSI setting the
1064 memory of size LEN to VAL. Return whether a simplification was made. */
1066 static bool
1067 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1069 gimple *stmt = gsi_stmt (*gsi);
1070 tree etype;
1071 unsigned HOST_WIDE_INT length, cval;
1073 /* If the LEN parameter is zero, return DEST. */
1074 if (integer_zerop (len))
1076 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1077 return true;
1080 if (! tree_fits_uhwi_p (len))
1081 return false;
1083 if (TREE_CODE (c) != INTEGER_CST)
1084 return false;
1086 tree dest = gimple_call_arg (stmt, 0);
1087 tree var = dest;
1088 if (TREE_CODE (var) != ADDR_EXPR)
1089 return false;
1091 var = TREE_OPERAND (var, 0);
1092 if (TREE_THIS_VOLATILE (var))
1093 return false;
1095 etype = TREE_TYPE (var);
1096 if (TREE_CODE (etype) == ARRAY_TYPE)
1097 etype = TREE_TYPE (etype);
1099 if (!INTEGRAL_TYPE_P (etype)
1100 && !POINTER_TYPE_P (etype))
1101 return NULL_TREE;
1103 if (! var_decl_component_p (var))
1104 return NULL_TREE;
1106 length = tree_to_uhwi (len);
1107 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1108 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1109 return NULL_TREE;
1111 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1112 return NULL_TREE;
1114 if (integer_zerop (c))
1115 cval = 0;
1116 else
1118 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1119 return NULL_TREE;
1121 cval = TREE_INT_CST_LOW (c);
1122 cval &= 0xff;
1123 cval |= cval << 8;
1124 cval |= cval << 16;
1125 cval |= (cval << 31) << 1;
1128 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1129 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1130 gimple_set_vuse (store, gimple_vuse (stmt));
1131 tree vdef = gimple_vdef (stmt);
1132 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1134 gimple_set_vdef (store, gimple_vdef (stmt));
1135 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1137 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1138 if (gimple_call_lhs (stmt))
1140 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1141 gsi_replace (gsi, asgn, false);
1143 else
1145 gimple_stmt_iterator gsi2 = *gsi;
1146 gsi_prev (gsi);
1147 gsi_remove (&gsi2, true);
1150 return true;
1154 /* Return the string length, maximum string length or maximum value of
1155 ARG in LENGTH.
1156 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1157 is not NULL and, for TYPE == 0, its value is not equal to the length
1158 we determine or if we are unable to determine the length or value,
1159 return false. VISITED is a bitmap of visited variables.
1160 TYPE is 0 if string length should be returned, 1 for maximum string
1161 length and 2 for maximum value ARG can have. */
1163 static bool
1164 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1166 tree var, val;
1167 gimple *def_stmt;
1169 if (TREE_CODE (arg) != SSA_NAME)
1171 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1172 if (TREE_CODE (arg) == ADDR_EXPR
1173 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1174 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1176 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1177 if (TREE_CODE (aop0) == INDIRECT_REF
1178 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1179 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1180 length, visited, type);
1183 if (type == 2)
1185 val = arg;
1186 if (TREE_CODE (val) != INTEGER_CST
1187 || tree_int_cst_sgn (val) < 0)
1188 return false;
1190 else
1191 val = c_strlen (arg, 1);
1192 if (!val)
1193 return false;
1195 if (*length)
1197 if (type > 0)
1199 if (TREE_CODE (*length) != INTEGER_CST
1200 || TREE_CODE (val) != INTEGER_CST)
1201 return false;
1203 if (tree_int_cst_lt (*length, val))
1204 *length = val;
1205 return true;
1207 else if (simple_cst_equal (val, *length) != 1)
1208 return false;
1211 *length = val;
1212 return true;
1215 /* If ARG is registered for SSA update we cannot look at its defining
1216 statement. */
1217 if (name_registered_for_update_p (arg))
1218 return false;
1220 /* If we were already here, break the infinite cycle. */
1221 if (!*visited)
1222 *visited = BITMAP_ALLOC (NULL);
1223 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1224 return true;
1226 var = arg;
1227 def_stmt = SSA_NAME_DEF_STMT (var);
1229 switch (gimple_code (def_stmt))
1231 case GIMPLE_ASSIGN:
1232 /* The RHS of the statement defining VAR must either have a
1233 constant length or come from another SSA_NAME with a constant
1234 length. */
1235 if (gimple_assign_single_p (def_stmt)
1236 || gimple_assign_unary_nop_p (def_stmt))
1238 tree rhs = gimple_assign_rhs1 (def_stmt);
1239 return get_maxval_strlen (rhs, length, visited, type);
1241 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1243 tree op2 = gimple_assign_rhs2 (def_stmt);
1244 tree op3 = gimple_assign_rhs3 (def_stmt);
1245 return get_maxval_strlen (op2, length, visited, type)
1246 && get_maxval_strlen (op3, length, visited, type);
1248 return false;
1250 case GIMPLE_PHI:
1252 /* All the arguments of the PHI node must have the same constant
1253 length. */
1254 unsigned i;
1256 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1258 tree arg = gimple_phi_arg (def_stmt, i)->def;
1260 /* If this PHI has itself as an argument, we cannot
1261 determine the string length of this argument. However,
1262 if we can find a constant string length for the other
1263 PHI args then we can still be sure that this is a
1264 constant string length. So be optimistic and just
1265 continue with the next argument. */
1266 if (arg == gimple_phi_result (def_stmt))
1267 continue;
1269 if (!get_maxval_strlen (arg, length, visited, type))
1270 return false;
1273 return true;
1275 default:
1276 return false;
1280 tree
1281 get_maxval_strlen (tree arg, int type)
1283 bitmap visited = NULL;
1284 tree len = NULL_TREE;
1285 if (!get_maxval_strlen (arg, &len, &visited, type))
1286 len = NULL_TREE;
1287 if (visited)
1288 BITMAP_FREE (visited);
1290 return len;
1294 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1295 If LEN is not NULL, it represents the length of the string to be
1296 copied. Return NULL_TREE if no simplification can be made. */
1298 static bool
1299 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1300 tree dest, tree src)
1302 location_t loc = gimple_location (gsi_stmt (*gsi));
1303 tree fn;
1305 /* If SRC and DEST are the same (and not volatile), return DEST. */
1306 if (operand_equal_p (src, dest, 0))
1308 replace_call_with_value (gsi, dest);
1309 return true;
1312 if (optimize_function_for_size_p (cfun))
1313 return false;
1315 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1316 if (!fn)
1317 return false;
1319 tree len = get_maxval_strlen (src, 0);
1320 if (!len)
1321 return false;
1323 len = fold_convert_loc (loc, size_type_node, len);
1324 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1325 len = force_gimple_operand_gsi (gsi, len, true,
1326 NULL_TREE, true, GSI_SAME_STMT);
1327 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1328 replace_call_with_call_and_fold (gsi, repl);
1329 return true;
1332 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1333 If SLEN is not NULL, it represents the length of the source string.
1334 Return NULL_TREE if no simplification can be made. */
1336 static bool
1337 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1338 tree dest, tree src, tree len)
1340 location_t loc = gimple_location (gsi_stmt (*gsi));
1341 tree fn;
1343 /* If the LEN parameter is zero, return DEST. */
1344 if (integer_zerop (len))
1346 replace_call_with_value (gsi, dest);
1347 return true;
1350 /* We can't compare slen with len as constants below if len is not a
1351 constant. */
1352 if (TREE_CODE (len) != INTEGER_CST)
1353 return false;
1355 /* Now, we must be passed a constant src ptr parameter. */
1356 tree slen = get_maxval_strlen (src, 0);
1357 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1358 return false;
1360 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1362 /* We do not support simplification of this case, though we do
1363 support it when expanding trees into RTL. */
1364 /* FIXME: generate a call to __builtin_memset. */
1365 if (tree_int_cst_lt (slen, len))
1366 return false;
1368 /* OK transform into builtin memcpy. */
1369 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1370 if (!fn)
1371 return false;
1373 len = fold_convert_loc (loc, size_type_node, len);
1374 len = force_gimple_operand_gsi (gsi, len, true,
1375 NULL_TREE, true, GSI_SAME_STMT);
1376 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1377 replace_call_with_call_and_fold (gsi, repl);
1378 return true;
1381 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1382 to the call.
1384 Return NULL_TREE if no simplification was possible, otherwise return the
1385 simplified form of the call as a tree.
1387 The simplified form may be a constant or other expression which
1388 computes the same value, but in a more efficient manner (including
1389 calls to other builtin functions).
1391 The call may contain arguments which need to be evaluated, but
1392 which are not useful to determine the result of the call. In
1393 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1394 COMPOUND_EXPR will be an argument which must be evaluated.
1395 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1396 COMPOUND_EXPR in the chain will contain the tree for the simplified
1397 form of the builtin function call. */
1399 static bool
1400 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1402 gimple *stmt = gsi_stmt (*gsi);
1403 location_t loc = gimple_location (stmt);
1405 const char *p = c_getstr (src);
1407 /* If the string length is zero, return the dst parameter. */
1408 if (p && *p == '\0')
1410 replace_call_with_value (gsi, dst);
1411 return true;
1414 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1415 return false;
1417 /* See if we can store by pieces into (dst + strlen(dst)). */
1418 tree newdst;
1419 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1420 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1422 if (!strlen_fn || !memcpy_fn)
1423 return false;
1425 /* If the length of the source string isn't computable don't
1426 split strcat into strlen and memcpy. */
1427 tree len = get_maxval_strlen (src, 0);
1428 if (! len)
1429 return false;
1431 /* Create strlen (dst). */
1432 gimple_seq stmts = NULL, stmts2;
1433 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1434 gimple_set_location (repl, loc);
1435 if (gimple_in_ssa_p (cfun))
1436 newdst = make_ssa_name (size_type_node);
1437 else
1438 newdst = create_tmp_reg (size_type_node);
1439 gimple_call_set_lhs (repl, newdst);
1440 gimple_seq_add_stmt_without_update (&stmts, repl);
1442 /* Create (dst p+ strlen (dst)). */
1443 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1444 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1445 gimple_seq_add_seq_without_update (&stmts, stmts2);
1447 len = fold_convert_loc (loc, size_type_node, len);
1448 len = size_binop_loc (loc, PLUS_EXPR, len,
1449 build_int_cst (size_type_node, 1));
1450 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1451 gimple_seq_add_seq_without_update (&stmts, stmts2);
1453 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1454 gimple_seq_add_stmt_without_update (&stmts, repl);
1455 if (gimple_call_lhs (stmt))
1457 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1458 gimple_seq_add_stmt_without_update (&stmts, repl);
1459 gsi_replace_with_seq_vops (gsi, stmts);
1460 /* gsi now points at the assignment to the lhs, get a
1461 stmt iterator to the memcpy call.
1462 ??? We can't use gsi_for_stmt as that doesn't work when the
1463 CFG isn't built yet. */
1464 gimple_stmt_iterator gsi2 = *gsi;
1465 gsi_prev (&gsi2);
1466 fold_stmt (&gsi2);
1468 else
1470 gsi_replace_with_seq_vops (gsi, stmts);
1471 fold_stmt (gsi);
1473 return true;
1476 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1477 are the arguments to the call. */
1479 static bool
1480 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1482 gimple *stmt = gsi_stmt (*gsi);
1483 tree dest = gimple_call_arg (stmt, 0);
1484 tree src = gimple_call_arg (stmt, 1);
1485 tree size = gimple_call_arg (stmt, 2);
1486 tree fn;
1487 const char *p;
1490 p = c_getstr (src);
1491 /* If the SRC parameter is "", return DEST. */
1492 if (p && *p == '\0')
1494 replace_call_with_value (gsi, dest);
1495 return true;
1498 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1499 return false;
1501 /* If __builtin_strcat_chk is used, assume strcat is available. */
1502 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1503 if (!fn)
1504 return false;
1506 gimple *repl = gimple_build_call (fn, 2, dest, src);
1507 replace_call_with_call_and_fold (gsi, repl);
1508 return true;
1511 /* Simplify a call to the strncat builtin. */
1513 static bool
1514 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1516 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1517 tree dst = gimple_call_arg (stmt, 0);
1518 tree src = gimple_call_arg (stmt, 1);
1519 tree len = gimple_call_arg (stmt, 2);
1521 const char *p = c_getstr (src);
1523 /* If the requested length is zero, or the src parameter string
1524 length is zero, return the dst parameter. */
1525 if (integer_zerop (len) || (p && *p == '\0'))
1527 replace_call_with_value (gsi, dst);
1528 return true;
1531 /* If the requested len is greater than or equal to the string
1532 length, call strcat. */
1533 if (TREE_CODE (len) == INTEGER_CST && p
1534 && compare_tree_int (len, strlen (p)) >= 0)
1536 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1538 /* If the replacement _DECL isn't initialized, don't do the
1539 transformation. */
1540 if (!fn)
1541 return false;
1543 gcall *repl = gimple_build_call (fn, 2, dst, src);
1544 replace_call_with_call_and_fold (gsi, repl);
1545 return true;
1548 return false;
1551 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1552 LEN, and SIZE. */
1554 static bool
1555 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1557 gimple *stmt = gsi_stmt (*gsi);
1558 tree dest = gimple_call_arg (stmt, 0);
1559 tree src = gimple_call_arg (stmt, 1);
1560 tree len = gimple_call_arg (stmt, 2);
1561 tree size = gimple_call_arg (stmt, 3);
1562 tree fn;
1563 const char *p;
1565 p = c_getstr (src);
1566 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1567 if ((p && *p == '\0')
1568 || integer_zerop (len))
1570 replace_call_with_value (gsi, dest);
1571 return true;
1574 if (! tree_fits_uhwi_p (size))
1575 return false;
1577 if (! integer_all_onesp (size))
1579 tree src_len = c_strlen (src, 1);
1580 if (src_len
1581 && tree_fits_uhwi_p (src_len)
1582 && tree_fits_uhwi_p (len)
1583 && ! tree_int_cst_lt (len, src_len))
1585 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1586 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1587 if (!fn)
1588 return false;
1590 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1591 replace_call_with_call_and_fold (gsi, repl);
1592 return true;
1594 return false;
1597 /* If __builtin_strncat_chk is used, assume strncat is available. */
1598 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1599 if (!fn)
1600 return false;
1602 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1603 replace_call_with_call_and_fold (gsi, repl);
1604 return true;
1607 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1608 to the call. IGNORE is true if the value returned
1609 by the builtin will be ignored. UNLOCKED is true is true if this
1610 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1611 the known length of the string. Return NULL_TREE if no simplification
1612 was possible. */
1614 static bool
1615 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1616 tree arg0, tree arg1,
1617 bool unlocked)
1619 gimple *stmt = gsi_stmt (*gsi);
1621 /* If we're using an unlocked function, assume the other unlocked
1622 functions exist explicitly. */
1623 tree const fn_fputc = (unlocked
1624 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1625 : builtin_decl_implicit (BUILT_IN_FPUTC));
1626 tree const fn_fwrite = (unlocked
1627 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1628 : builtin_decl_implicit (BUILT_IN_FWRITE));
1630 /* If the return value is used, don't do the transformation. */
1631 if (gimple_call_lhs (stmt))
1632 return false;
1634 /* Get the length of the string passed to fputs. If the length
1635 can't be determined, punt. */
1636 tree len = get_maxval_strlen (arg0, 0);
1637 if (!len
1638 || TREE_CODE (len) != INTEGER_CST)
1639 return false;
1641 switch (compare_tree_int (len, 1))
1643 case -1: /* length is 0, delete the call entirely . */
1644 replace_call_with_value (gsi, integer_zero_node);
1645 return true;
1647 case 0: /* length is 1, call fputc. */
1649 const char *p = c_getstr (arg0);
1650 if (p != NULL)
1652 if (!fn_fputc)
1653 return false;
1655 gimple *repl = gimple_build_call (fn_fputc, 2,
1656 build_int_cst
1657 (integer_type_node, p[0]), arg1);
1658 replace_call_with_call_and_fold (gsi, repl);
1659 return true;
1662 /* FALLTHROUGH */
1663 case 1: /* length is greater than 1, call fwrite. */
1665 /* If optimizing for size keep fputs. */
1666 if (optimize_function_for_size_p (cfun))
1667 return false;
1668 /* New argument list transforming fputs(string, stream) to
1669 fwrite(string, 1, len, stream). */
1670 if (!fn_fwrite)
1671 return false;
1673 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
1674 size_one_node, len, arg1);
1675 replace_call_with_call_and_fold (gsi, repl);
1676 return true;
1678 default:
1679 gcc_unreachable ();
1681 return false;
1684 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1685 DEST, SRC, LEN, and SIZE are the arguments to the call.
1686 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1687 code of the builtin. If MAXLEN is not NULL, it is maximum length
1688 passed as third argument. */
1690 static bool
1691 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1692 tree dest, tree src, tree len, tree size,
1693 enum built_in_function fcode)
1695 gimple *stmt = gsi_stmt (*gsi);
1696 location_t loc = gimple_location (stmt);
1697 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1698 tree fn;
1700 /* If SRC and DEST are the same (and not volatile), return DEST
1701 (resp. DEST+LEN for __mempcpy_chk). */
1702 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1704 if (fcode != BUILT_IN_MEMPCPY_CHK)
1706 replace_call_with_value (gsi, dest);
1707 return true;
1709 else
1711 gimple_seq stmts = NULL;
1712 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1713 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1714 TREE_TYPE (dest), dest, len);
1715 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1716 replace_call_with_value (gsi, temp);
1717 return true;
1721 if (! tree_fits_uhwi_p (size))
1722 return false;
1724 tree maxlen = get_maxval_strlen (len, 2);
1725 if (! integer_all_onesp (size))
1727 if (! tree_fits_uhwi_p (len))
1729 /* If LEN is not constant, try MAXLEN too.
1730 For MAXLEN only allow optimizing into non-_ocs function
1731 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1732 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1734 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1736 /* (void) __mempcpy_chk () can be optimized into
1737 (void) __memcpy_chk (). */
1738 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1739 if (!fn)
1740 return false;
1742 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1743 replace_call_with_call_and_fold (gsi, repl);
1744 return true;
1746 return false;
1749 else
1750 maxlen = len;
1752 if (tree_int_cst_lt (size, maxlen))
1753 return false;
1756 fn = NULL_TREE;
1757 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1758 mem{cpy,pcpy,move,set} is available. */
1759 switch (fcode)
1761 case BUILT_IN_MEMCPY_CHK:
1762 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1763 break;
1764 case BUILT_IN_MEMPCPY_CHK:
1765 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1766 break;
1767 case BUILT_IN_MEMMOVE_CHK:
1768 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1769 break;
1770 case BUILT_IN_MEMSET_CHK:
1771 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1772 break;
1773 default:
1774 break;
1777 if (!fn)
1778 return false;
1780 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1781 replace_call_with_call_and_fold (gsi, repl);
1782 return true;
1785 /* Fold a call to the __st[rp]cpy_chk builtin.
1786 DEST, SRC, and SIZE are the arguments to the call.
1787 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1788 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1789 strings passed as second argument. */
1791 static bool
1792 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1793 tree dest,
1794 tree src, tree size,
1795 enum built_in_function fcode)
1797 gimple *stmt = gsi_stmt (*gsi);
1798 location_t loc = gimple_location (stmt);
1799 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1800 tree len, fn;
1802 /* If SRC and DEST are the same (and not volatile), return DEST. */
1803 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1805 replace_call_with_value (gsi, dest);
1806 return true;
1809 if (! tree_fits_uhwi_p (size))
1810 return false;
1812 tree maxlen = get_maxval_strlen (src, 1);
1813 if (! integer_all_onesp (size))
1815 len = c_strlen (src, 1);
1816 if (! len || ! tree_fits_uhwi_p (len))
1818 /* If LEN is not constant, try MAXLEN too.
1819 For MAXLEN only allow optimizing into non-_ocs function
1820 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1821 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1823 if (fcode == BUILT_IN_STPCPY_CHK)
1825 if (! ignore)
1826 return false;
1828 /* If return value of __stpcpy_chk is ignored,
1829 optimize into __strcpy_chk. */
1830 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1831 if (!fn)
1832 return false;
1834 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1835 replace_call_with_call_and_fold (gsi, repl);
1836 return true;
1839 if (! len || TREE_SIDE_EFFECTS (len))
1840 return false;
1842 /* If c_strlen returned something, but not a constant,
1843 transform __strcpy_chk into __memcpy_chk. */
1844 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1845 if (!fn)
1846 return false;
1848 gimple_seq stmts = NULL;
1849 len = gimple_convert (&stmts, loc, size_type_node, len);
1850 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
1851 build_int_cst (size_type_node, 1));
1852 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1853 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1854 replace_call_with_call_and_fold (gsi, repl);
1855 return true;
1858 else
1859 maxlen = len;
1861 if (! tree_int_cst_lt (maxlen, size))
1862 return false;
1865 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1866 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1867 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1868 if (!fn)
1869 return false;
1871 gimple *repl = gimple_build_call (fn, 2, dest, src);
1872 replace_call_with_call_and_fold (gsi, repl);
1873 return true;
1876 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1877 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1878 length passed as third argument. IGNORE is true if return value can be
1879 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1881 static bool
1882 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1883 tree dest, tree src,
1884 tree len, tree size,
1885 enum built_in_function fcode)
1887 gimple *stmt = gsi_stmt (*gsi);
1888 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1889 tree fn;
1891 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1893 /* If return value of __stpncpy_chk is ignored,
1894 optimize into __strncpy_chk. */
1895 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1896 if (fn)
1898 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1899 replace_call_with_call_and_fold (gsi, repl);
1900 return true;
1904 if (! tree_fits_uhwi_p (size))
1905 return false;
1907 tree maxlen = get_maxval_strlen (len, 2);
1908 if (! integer_all_onesp (size))
1910 if (! tree_fits_uhwi_p (len))
1912 /* If LEN is not constant, try MAXLEN too.
1913 For MAXLEN only allow optimizing into non-_ocs function
1914 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1915 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1916 return false;
1918 else
1919 maxlen = len;
1921 if (tree_int_cst_lt (size, maxlen))
1922 return false;
1925 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1926 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1927 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1928 if (!fn)
1929 return false;
1931 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1932 replace_call_with_call_and_fold (gsi, repl);
1933 return true;
1936 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
1937 Return NULL_TREE if no simplification can be made. */
1939 static bool
1940 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
1942 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1943 location_t loc = gimple_location (stmt);
1944 tree dest = gimple_call_arg (stmt, 0);
1945 tree src = gimple_call_arg (stmt, 1);
1946 tree fn, len, lenp1;
1948 /* If the result is unused, replace stpcpy with strcpy. */
1949 if (gimple_call_lhs (stmt) == NULL_TREE)
1951 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
1952 if (!fn)
1953 return false;
1954 gimple_call_set_fndecl (stmt, fn);
1955 fold_stmt (gsi);
1956 return true;
1959 len = c_strlen (src, 1);
1960 if (!len
1961 || TREE_CODE (len) != INTEGER_CST)
1962 return false;
1964 if (optimize_function_for_size_p (cfun)
1965 /* If length is zero it's small enough. */
1966 && !integer_zerop (len))
1967 return false;
1969 /* If the source has a known length replace stpcpy with memcpy. */
1970 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1971 if (!fn)
1972 return false;
1974 gimple_seq stmts = NULL;
1975 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
1976 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
1977 tem, build_int_cst (size_type_node, 1));
1978 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1979 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
1980 gimple_set_vuse (repl, gimple_vuse (stmt));
1981 gimple_set_vdef (repl, gimple_vdef (stmt));
1982 if (gimple_vdef (repl)
1983 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
1984 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
1985 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
1986 /* Replace the result with dest + len. */
1987 stmts = NULL;
1988 tem = gimple_convert (&stmts, loc, sizetype, len);
1989 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1990 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
1991 POINTER_PLUS_EXPR, dest, tem);
1992 gsi_replace (gsi, ret, false);
1993 /* Finally fold the memcpy call. */
1994 gimple_stmt_iterator gsi2 = *gsi;
1995 gsi_prev (&gsi2);
1996 fold_stmt (&gsi2);
1997 return true;
2000 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2001 NULL_TREE if a normal call should be emitted rather than expanding
2002 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2003 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2004 passed as second argument. */
2006 static bool
2007 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2008 enum built_in_function fcode)
2010 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2011 tree dest, size, len, fn, fmt, flag;
2012 const char *fmt_str;
2014 /* Verify the required arguments in the original call. */
2015 if (gimple_call_num_args (stmt) < 5)
2016 return false;
2018 dest = gimple_call_arg (stmt, 0);
2019 len = gimple_call_arg (stmt, 1);
2020 flag = gimple_call_arg (stmt, 2);
2021 size = gimple_call_arg (stmt, 3);
2022 fmt = gimple_call_arg (stmt, 4);
2024 if (! tree_fits_uhwi_p (size))
2025 return false;
2027 if (! integer_all_onesp (size))
2029 tree maxlen = get_maxval_strlen (len, 2);
2030 if (! tree_fits_uhwi_p (len))
2032 /* If LEN is not constant, try MAXLEN too.
2033 For MAXLEN only allow optimizing into non-_ocs function
2034 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2035 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2036 return false;
2038 else
2039 maxlen = len;
2041 if (tree_int_cst_lt (size, maxlen))
2042 return false;
2045 if (!init_target_chars ())
2046 return false;
2048 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2049 or if format doesn't contain % chars or is "%s". */
2050 if (! integer_zerop (flag))
2052 fmt_str = c_getstr (fmt);
2053 if (fmt_str == NULL)
2054 return false;
2055 if (strchr (fmt_str, target_percent) != NULL
2056 && strcmp (fmt_str, target_percent_s))
2057 return false;
2060 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2061 available. */
2062 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2063 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2064 if (!fn)
2065 return false;
2067 /* Replace the called function and the first 5 argument by 3 retaining
2068 trailing varargs. */
2069 gimple_call_set_fndecl (stmt, fn);
2070 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2071 gimple_call_set_arg (stmt, 0, dest);
2072 gimple_call_set_arg (stmt, 1, len);
2073 gimple_call_set_arg (stmt, 2, fmt);
2074 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2075 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2076 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2077 fold_stmt (gsi);
2078 return true;
2081 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2082 Return NULL_TREE if a normal call should be emitted rather than
2083 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2084 or BUILT_IN_VSPRINTF_CHK. */
2086 static bool
2087 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2088 enum built_in_function fcode)
2090 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2091 tree dest, size, len, fn, fmt, flag;
2092 const char *fmt_str;
2093 unsigned nargs = gimple_call_num_args (stmt);
2095 /* Verify the required arguments in the original call. */
2096 if (nargs < 4)
2097 return false;
2098 dest = gimple_call_arg (stmt, 0);
2099 flag = gimple_call_arg (stmt, 1);
2100 size = gimple_call_arg (stmt, 2);
2101 fmt = gimple_call_arg (stmt, 3);
2103 if (! tree_fits_uhwi_p (size))
2104 return false;
2106 len = NULL_TREE;
2108 if (!init_target_chars ())
2109 return false;
2111 /* Check whether the format is a literal string constant. */
2112 fmt_str = c_getstr (fmt);
2113 if (fmt_str != NULL)
2115 /* If the format doesn't contain % args or %%, we know the size. */
2116 if (strchr (fmt_str, target_percent) == 0)
2118 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2119 len = build_int_cstu (size_type_node, strlen (fmt_str));
2121 /* If the format is "%s" and first ... argument is a string literal,
2122 we know the size too. */
2123 else if (fcode == BUILT_IN_SPRINTF_CHK
2124 && strcmp (fmt_str, target_percent_s) == 0)
2126 tree arg;
2128 if (nargs == 5)
2130 arg = gimple_call_arg (stmt, 4);
2131 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2133 len = c_strlen (arg, 1);
2134 if (! len || ! tree_fits_uhwi_p (len))
2135 len = NULL_TREE;
2141 if (! integer_all_onesp (size))
2143 if (! len || ! tree_int_cst_lt (len, size))
2144 return false;
2147 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2148 or if format doesn't contain % chars or is "%s". */
2149 if (! integer_zerop (flag))
2151 if (fmt_str == NULL)
2152 return false;
2153 if (strchr (fmt_str, target_percent) != NULL
2154 && strcmp (fmt_str, target_percent_s))
2155 return false;
2158 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2159 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2160 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2161 if (!fn)
2162 return false;
2164 /* Replace the called function and the first 4 argument by 2 retaining
2165 trailing varargs. */
2166 gimple_call_set_fndecl (stmt, fn);
2167 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2168 gimple_call_set_arg (stmt, 0, dest);
2169 gimple_call_set_arg (stmt, 1, fmt);
2170 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2171 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2172 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2173 fold_stmt (gsi);
2174 return true;
2177 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2178 ORIG may be null if this is a 2-argument call. We don't attempt to
2179 simplify calls with more than 3 arguments.
2181 Return NULL_TREE if no simplification was possible, otherwise return the
2182 simplified form of the call as a tree. If IGNORED is true, it means that
2183 the caller does not use the returned value of the function. */
2185 static bool
2186 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2188 gimple *stmt = gsi_stmt (*gsi);
2189 tree dest = gimple_call_arg (stmt, 0);
2190 tree fmt = gimple_call_arg (stmt, 1);
2191 tree orig = NULL_TREE;
2192 const char *fmt_str = NULL;
2194 /* Verify the required arguments in the original call. We deal with two
2195 types of sprintf() calls: 'sprintf (str, fmt)' and
2196 'sprintf (dest, "%s", orig)'. */
2197 if (gimple_call_num_args (stmt) > 3)
2198 return false;
2200 if (gimple_call_num_args (stmt) == 3)
2201 orig = gimple_call_arg (stmt, 2);
2203 /* Check whether the format is a literal string constant. */
2204 fmt_str = c_getstr (fmt);
2205 if (fmt_str == NULL)
2206 return false;
2208 if (!init_target_chars ())
2209 return false;
2211 /* If the format doesn't contain % args or %%, use strcpy. */
2212 if (strchr (fmt_str, target_percent) == NULL)
2214 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2216 if (!fn)
2217 return false;
2219 /* Don't optimize sprintf (buf, "abc", ptr++). */
2220 if (orig)
2221 return false;
2223 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2224 'format' is known to contain no % formats. */
2225 gimple_seq stmts = NULL;
2226 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2227 gimple_seq_add_stmt_without_update (&stmts, repl);
2228 if (gimple_call_lhs (stmt))
2230 repl = gimple_build_assign (gimple_call_lhs (stmt),
2231 build_int_cst (integer_type_node,
2232 strlen (fmt_str)));
2233 gimple_seq_add_stmt_without_update (&stmts, repl);
2234 gsi_replace_with_seq_vops (gsi, stmts);
2235 /* gsi now points at the assignment to the lhs, get a
2236 stmt iterator to the memcpy call.
2237 ??? We can't use gsi_for_stmt as that doesn't work when the
2238 CFG isn't built yet. */
2239 gimple_stmt_iterator gsi2 = *gsi;
2240 gsi_prev (&gsi2);
2241 fold_stmt (&gsi2);
2243 else
2245 gsi_replace_with_seq_vops (gsi, stmts);
2246 fold_stmt (gsi);
2248 return true;
2251 /* If the format is "%s", use strcpy if the result isn't used. */
2252 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2254 tree fn;
2255 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2257 if (!fn)
2258 return false;
2260 /* Don't crash on sprintf (str1, "%s"). */
2261 if (!orig)
2262 return false;
2264 tree orig_len = NULL_TREE;
2265 if (gimple_call_lhs (stmt))
2267 orig_len = get_maxval_strlen (orig, 0);
2268 if (!orig_len)
2269 return false;
2272 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2273 gimple_seq stmts = NULL;
2274 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2275 gimple_seq_add_stmt_without_update (&stmts, repl);
2276 if (gimple_call_lhs (stmt))
2278 if (!useless_type_conversion_p (integer_type_node,
2279 TREE_TYPE (orig_len)))
2280 orig_len = fold_convert (integer_type_node, orig_len);
2281 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2282 gimple_seq_add_stmt_without_update (&stmts, repl);
2283 gsi_replace_with_seq_vops (gsi, stmts);
2284 /* gsi now points at the assignment to the lhs, get a
2285 stmt iterator to the memcpy call.
2286 ??? We can't use gsi_for_stmt as that doesn't work when the
2287 CFG isn't built yet. */
2288 gimple_stmt_iterator gsi2 = *gsi;
2289 gsi_prev (&gsi2);
2290 fold_stmt (&gsi2);
2292 else
2294 gsi_replace_with_seq_vops (gsi, stmts);
2295 fold_stmt (gsi);
2297 return true;
2299 return false;
2302 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2303 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2304 attempt to simplify calls with more than 4 arguments.
2306 Return NULL_TREE if no simplification was possible, otherwise return the
2307 simplified form of the call as a tree. If IGNORED is true, it means that
2308 the caller does not use the returned value of the function. */
2310 static bool
2311 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2313 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2314 tree dest = gimple_call_arg (stmt, 0);
2315 tree destsize = gimple_call_arg (stmt, 1);
2316 tree fmt = gimple_call_arg (stmt, 2);
2317 tree orig = NULL_TREE;
2318 const char *fmt_str = NULL;
2320 if (gimple_call_num_args (stmt) > 4)
2321 return false;
2323 if (gimple_call_num_args (stmt) == 4)
2324 orig = gimple_call_arg (stmt, 3);
2326 if (!tree_fits_uhwi_p (destsize))
2327 return false;
2328 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2330 /* Check whether the format is a literal string constant. */
2331 fmt_str = c_getstr (fmt);
2332 if (fmt_str == NULL)
2333 return false;
2335 if (!init_target_chars ())
2336 return false;
2338 /* If the format doesn't contain % args or %%, use strcpy. */
2339 if (strchr (fmt_str, target_percent) == NULL)
2341 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2342 if (!fn)
2343 return false;
2345 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2346 if (orig)
2347 return false;
2349 /* We could expand this as
2350 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2351 or to
2352 memcpy (str, fmt_with_nul_at_cstm1, cst);
2353 but in the former case that might increase code size
2354 and in the latter case grow .rodata section too much.
2355 So punt for now. */
2356 size_t len = strlen (fmt_str);
2357 if (len >= destlen)
2358 return false;
2360 gimple_seq stmts = NULL;
2361 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2362 gimple_seq_add_stmt_without_update (&stmts, repl);
2363 if (gimple_call_lhs (stmt))
2365 repl = gimple_build_assign (gimple_call_lhs (stmt),
2366 build_int_cst (integer_type_node, len));
2367 gimple_seq_add_stmt_without_update (&stmts, repl);
2368 gsi_replace_with_seq_vops (gsi, stmts);
2369 /* gsi now points at the assignment to the lhs, get a
2370 stmt iterator to the memcpy call.
2371 ??? We can't use gsi_for_stmt as that doesn't work when the
2372 CFG isn't built yet. */
2373 gimple_stmt_iterator gsi2 = *gsi;
2374 gsi_prev (&gsi2);
2375 fold_stmt (&gsi2);
2377 else
2379 gsi_replace_with_seq_vops (gsi, stmts);
2380 fold_stmt (gsi);
2382 return true;
2385 /* If the format is "%s", use strcpy if the result isn't used. */
2386 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2388 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2389 if (!fn)
2390 return false;
2392 /* Don't crash on snprintf (str1, cst, "%s"). */
2393 if (!orig)
2394 return false;
2396 tree orig_len = get_maxval_strlen (orig, 0);
2397 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2398 return false;
2400 /* We could expand this as
2401 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2402 or to
2403 memcpy (str1, str2_with_nul_at_cstm1, cst);
2404 but in the former case that might increase code size
2405 and in the latter case grow .rodata section too much.
2406 So punt for now. */
2407 if (compare_tree_int (orig_len, destlen) >= 0)
2408 return false;
2410 /* Convert snprintf (str1, cst, "%s", str2) into
2411 strcpy (str1, str2) if strlen (str2) < cst. */
2412 gimple_seq stmts = NULL;
2413 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2414 gimple_seq_add_stmt_without_update (&stmts, repl);
2415 if (gimple_call_lhs (stmt))
2417 if (!useless_type_conversion_p (integer_type_node,
2418 TREE_TYPE (orig_len)))
2419 orig_len = fold_convert (integer_type_node, orig_len);
2420 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2421 gimple_seq_add_stmt_without_update (&stmts, repl);
2422 gsi_replace_with_seq_vops (gsi, stmts);
2423 /* gsi now points at the assignment to the lhs, get a
2424 stmt iterator to the memcpy call.
2425 ??? We can't use gsi_for_stmt as that doesn't work when the
2426 CFG isn't built yet. */
2427 gimple_stmt_iterator gsi2 = *gsi;
2428 gsi_prev (&gsi2);
2429 fold_stmt (&gsi2);
2431 else
2433 gsi_replace_with_seq_vops (gsi, stmts);
2434 fold_stmt (gsi);
2436 return true;
2438 return false;
2441 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2442 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2443 more than 3 arguments, and ARG may be null in the 2-argument case.
2445 Return NULL_TREE if no simplification was possible, otherwise return the
2446 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2447 code of the function to be simplified. */
2449 static bool
2450 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2451 tree fp, tree fmt, tree arg,
2452 enum built_in_function fcode)
2454 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2455 tree fn_fputc, fn_fputs;
2456 const char *fmt_str = NULL;
2458 /* If the return value is used, don't do the transformation. */
2459 if (gimple_call_lhs (stmt) != NULL_TREE)
2460 return false;
2462 /* Check whether the format is a literal string constant. */
2463 fmt_str = c_getstr (fmt);
2464 if (fmt_str == NULL)
2465 return false;
2467 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2469 /* If we're using an unlocked function, assume the other
2470 unlocked functions exist explicitly. */
2471 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2472 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2474 else
2476 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2477 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2480 if (!init_target_chars ())
2481 return false;
2483 /* If the format doesn't contain % args or %%, use strcpy. */
2484 if (strchr (fmt_str, target_percent) == NULL)
2486 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2487 && arg)
2488 return false;
2490 /* If the format specifier was "", fprintf does nothing. */
2491 if (fmt_str[0] == '\0')
2493 replace_call_with_value (gsi, NULL_TREE);
2494 return true;
2497 /* When "string" doesn't contain %, replace all cases of
2498 fprintf (fp, string) with fputs (string, fp). The fputs
2499 builtin will take care of special cases like length == 1. */
2500 if (fn_fputs)
2502 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2503 replace_call_with_call_and_fold (gsi, repl);
2504 return true;
2508 /* The other optimizations can be done only on the non-va_list variants. */
2509 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2510 return false;
2512 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2513 else if (strcmp (fmt_str, target_percent_s) == 0)
2515 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2516 return false;
2517 if (fn_fputs)
2519 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2520 replace_call_with_call_and_fold (gsi, repl);
2521 return true;
2525 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2526 else if (strcmp (fmt_str, target_percent_c) == 0)
2528 if (!arg
2529 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2530 return false;
2531 if (fn_fputc)
2533 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2534 replace_call_with_call_and_fold (gsi, repl);
2535 return true;
2539 return false;
2542 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2543 FMT and ARG are the arguments to the call; we don't fold cases with
2544 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2546 Return NULL_TREE if no simplification was possible, otherwise return the
2547 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2548 code of the function to be simplified. */
2550 static bool
2551 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2552 tree arg, enum built_in_function fcode)
2554 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2555 tree fn_putchar, fn_puts, newarg;
2556 const char *fmt_str = NULL;
2558 /* If the return value is used, don't do the transformation. */
2559 if (gimple_call_lhs (stmt) != NULL_TREE)
2560 return false;
2562 /* Check whether the format is a literal string constant. */
2563 fmt_str = c_getstr (fmt);
2564 if (fmt_str == NULL)
2565 return false;
2567 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2569 /* If we're using an unlocked function, assume the other
2570 unlocked functions exist explicitly. */
2571 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2572 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2574 else
2576 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2577 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2580 if (!init_target_chars ())
2581 return false;
2583 if (strcmp (fmt_str, target_percent_s) == 0
2584 || strchr (fmt_str, target_percent) == NULL)
2586 const char *str;
2588 if (strcmp (fmt_str, target_percent_s) == 0)
2590 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2591 return false;
2593 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2594 return false;
2596 str = c_getstr (arg);
2597 if (str == NULL)
2598 return false;
2600 else
2602 /* The format specifier doesn't contain any '%' characters. */
2603 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2604 && arg)
2605 return false;
2606 str = fmt_str;
2609 /* If the string was "", printf does nothing. */
2610 if (str[0] == '\0')
2612 replace_call_with_value (gsi, NULL_TREE);
2613 return true;
2616 /* If the string has length of 1, call putchar. */
2617 if (str[1] == '\0')
2619 /* Given printf("c"), (where c is any one character,)
2620 convert "c"[0] to an int and pass that to the replacement
2621 function. */
2622 newarg = build_int_cst (integer_type_node, str[0]);
2623 if (fn_putchar)
2625 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2626 replace_call_with_call_and_fold (gsi, repl);
2627 return true;
2630 else
2632 /* If the string was "string\n", call puts("string"). */
2633 size_t len = strlen (str);
2634 if ((unsigned char)str[len - 1] == target_newline
2635 && (size_t) (int) len == len
2636 && (int) len > 0)
2638 char *newstr;
2639 tree offset_node, string_cst;
2641 /* Create a NUL-terminated string that's one char shorter
2642 than the original, stripping off the trailing '\n'. */
2643 newarg = build_string_literal (len, str);
2644 string_cst = string_constant (newarg, &offset_node);
2645 gcc_checking_assert (string_cst
2646 && (TREE_STRING_LENGTH (string_cst)
2647 == (int) len)
2648 && integer_zerop (offset_node)
2649 && (unsigned char)
2650 TREE_STRING_POINTER (string_cst)[len - 1]
2651 == target_newline);
2652 /* build_string_literal creates a new STRING_CST,
2653 modify it in place to avoid double copying. */
2654 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2655 newstr[len - 1] = '\0';
2656 if (fn_puts)
2658 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2659 replace_call_with_call_and_fold (gsi, repl);
2660 return true;
2663 else
2664 /* We'd like to arrange to call fputs(string,stdout) here,
2665 but we need stdout and don't have a way to get it yet. */
2666 return false;
2670 /* The other optimizations can be done only on the non-va_list variants. */
2671 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2672 return false;
2674 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2675 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2677 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2678 return false;
2679 if (fn_puts)
2681 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2682 replace_call_with_call_and_fold (gsi, repl);
2683 return true;
2687 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2688 else if (strcmp (fmt_str, target_percent_c) == 0)
2690 if (!arg || ! useless_type_conversion_p (integer_type_node,
2691 TREE_TYPE (arg)))
2692 return false;
2693 if (fn_putchar)
2695 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2696 replace_call_with_call_and_fold (gsi, repl);
2697 return true;
2701 return false;
2706 /* Fold a call to __builtin_strlen with known length LEN. */
2708 static bool
2709 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2711 gimple *stmt = gsi_stmt (*gsi);
2712 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2713 if (!len)
2714 return false;
2715 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2716 replace_call_with_value (gsi, len);
2717 return true;
2720 /* Fold a call to __builtin_acc_on_device. */
2722 static bool
2723 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2725 /* Defer folding until we know which compiler we're in. */
2726 if (symtab->state != EXPANSION)
2727 return false;
2729 unsigned val_host = GOMP_DEVICE_HOST;
2730 unsigned val_dev = GOMP_DEVICE_NONE;
2732 #ifdef ACCEL_COMPILER
2733 val_host = GOMP_DEVICE_NOT_HOST;
2734 val_dev = ACCEL_COMPILER_acc_device;
2735 #endif
2737 location_t loc = gimple_location (gsi_stmt (*gsi));
2739 tree host_eq = make_ssa_name (boolean_type_node);
2740 gimple *host_ass = gimple_build_assign
2741 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2742 gimple_set_location (host_ass, loc);
2743 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2745 tree dev_eq = make_ssa_name (boolean_type_node);
2746 gimple *dev_ass = gimple_build_assign
2747 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2748 gimple_set_location (dev_ass, loc);
2749 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2751 tree result = make_ssa_name (boolean_type_node);
2752 gimple *result_ass = gimple_build_assign
2753 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2754 gimple_set_location (result_ass, loc);
2755 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2757 replace_call_with_value (gsi, result);
2759 return true;
2762 /* Fold the non-target builtin at *GSI and return whether any simplification
2763 was made. */
2765 static bool
2766 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2768 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2769 tree callee = gimple_call_fndecl (stmt);
2771 /* Give up for always_inline inline builtins until they are
2772 inlined. */
2773 if (avoid_folding_inline_builtin (callee))
2774 return false;
2776 unsigned n = gimple_call_num_args (stmt);
2777 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2778 switch (fcode)
2780 case BUILT_IN_BZERO:
2781 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2782 gimple_call_arg (stmt, 1));
2783 case BUILT_IN_MEMSET:
2784 return gimple_fold_builtin_memset (gsi,
2785 gimple_call_arg (stmt, 1),
2786 gimple_call_arg (stmt, 2));
2787 case BUILT_IN_BCOPY:
2788 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2789 gimple_call_arg (stmt, 0), 3);
2790 case BUILT_IN_MEMCPY:
2791 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2792 gimple_call_arg (stmt, 1), 0);
2793 case BUILT_IN_MEMPCPY:
2794 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2795 gimple_call_arg (stmt, 1), 1);
2796 case BUILT_IN_MEMMOVE:
2797 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2798 gimple_call_arg (stmt, 1), 3);
2799 case BUILT_IN_SPRINTF_CHK:
2800 case BUILT_IN_VSPRINTF_CHK:
2801 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2802 case BUILT_IN_STRCAT_CHK:
2803 return gimple_fold_builtin_strcat_chk (gsi);
2804 case BUILT_IN_STRNCAT_CHK:
2805 return gimple_fold_builtin_strncat_chk (gsi);
2806 case BUILT_IN_STRLEN:
2807 return gimple_fold_builtin_strlen (gsi);
2808 case BUILT_IN_STRCPY:
2809 return gimple_fold_builtin_strcpy (gsi,
2810 gimple_call_arg (stmt, 0),
2811 gimple_call_arg (stmt, 1));
2812 case BUILT_IN_STRNCPY:
2813 return gimple_fold_builtin_strncpy (gsi,
2814 gimple_call_arg (stmt, 0),
2815 gimple_call_arg (stmt, 1),
2816 gimple_call_arg (stmt, 2));
2817 case BUILT_IN_STRCAT:
2818 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2819 gimple_call_arg (stmt, 1));
2820 case BUILT_IN_STRNCAT:
2821 return gimple_fold_builtin_strncat (gsi);
2822 case BUILT_IN_FPUTS:
2823 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2824 gimple_call_arg (stmt, 1), false);
2825 case BUILT_IN_FPUTS_UNLOCKED:
2826 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2827 gimple_call_arg (stmt, 1), true);
2828 case BUILT_IN_MEMCPY_CHK:
2829 case BUILT_IN_MEMPCPY_CHK:
2830 case BUILT_IN_MEMMOVE_CHK:
2831 case BUILT_IN_MEMSET_CHK:
2832 return gimple_fold_builtin_memory_chk (gsi,
2833 gimple_call_arg (stmt, 0),
2834 gimple_call_arg (stmt, 1),
2835 gimple_call_arg (stmt, 2),
2836 gimple_call_arg (stmt, 3),
2837 fcode);
2838 case BUILT_IN_STPCPY:
2839 return gimple_fold_builtin_stpcpy (gsi);
2840 case BUILT_IN_STRCPY_CHK:
2841 case BUILT_IN_STPCPY_CHK:
2842 return gimple_fold_builtin_stxcpy_chk (gsi,
2843 gimple_call_arg (stmt, 0),
2844 gimple_call_arg (stmt, 1),
2845 gimple_call_arg (stmt, 2),
2846 fcode);
2847 case BUILT_IN_STRNCPY_CHK:
2848 case BUILT_IN_STPNCPY_CHK:
2849 return gimple_fold_builtin_stxncpy_chk (gsi,
2850 gimple_call_arg (stmt, 0),
2851 gimple_call_arg (stmt, 1),
2852 gimple_call_arg (stmt, 2),
2853 gimple_call_arg (stmt, 3),
2854 fcode);
2855 case BUILT_IN_SNPRINTF_CHK:
2856 case BUILT_IN_VSNPRINTF_CHK:
2857 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2858 case BUILT_IN_SNPRINTF:
2859 return gimple_fold_builtin_snprintf (gsi);
2860 case BUILT_IN_SPRINTF:
2861 return gimple_fold_builtin_sprintf (gsi);
2862 case BUILT_IN_FPRINTF:
2863 case BUILT_IN_FPRINTF_UNLOCKED:
2864 case BUILT_IN_VFPRINTF:
2865 if (n == 2 || n == 3)
2866 return gimple_fold_builtin_fprintf (gsi,
2867 gimple_call_arg (stmt, 0),
2868 gimple_call_arg (stmt, 1),
2869 n == 3
2870 ? gimple_call_arg (stmt, 2)
2871 : NULL_TREE,
2872 fcode);
2873 break;
2874 case BUILT_IN_FPRINTF_CHK:
2875 case BUILT_IN_VFPRINTF_CHK:
2876 if (n == 3 || n == 4)
2877 return gimple_fold_builtin_fprintf (gsi,
2878 gimple_call_arg (stmt, 0),
2879 gimple_call_arg (stmt, 2),
2880 n == 4
2881 ? gimple_call_arg (stmt, 3)
2882 : NULL_TREE,
2883 fcode);
2884 break;
2885 case BUILT_IN_PRINTF:
2886 case BUILT_IN_PRINTF_UNLOCKED:
2887 case BUILT_IN_VPRINTF:
2888 if (n == 1 || n == 2)
2889 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2890 n == 2
2891 ? gimple_call_arg (stmt, 1)
2892 : NULL_TREE, fcode);
2893 break;
2894 case BUILT_IN_PRINTF_CHK:
2895 case BUILT_IN_VPRINTF_CHK:
2896 if (n == 2 || n == 3)
2897 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2898 n == 3
2899 ? gimple_call_arg (stmt, 2)
2900 : NULL_TREE, fcode);
2901 break;
2902 case BUILT_IN_ACC_ON_DEVICE:
2903 return gimple_fold_builtin_acc_on_device (gsi,
2904 gimple_call_arg (stmt, 0));
2905 default:;
2908 /* Try the generic builtin folder. */
2909 bool ignore = (gimple_call_lhs (stmt) == NULL);
2910 tree result = fold_call_stmt (stmt, ignore);
2911 if (result)
2913 if (ignore)
2914 STRIP_NOPS (result);
2915 else
2916 result = fold_convert (gimple_call_return_type (stmt), result);
2917 if (!update_call_from_tree (gsi, result))
2918 gimplify_and_update_call_from_tree (gsi, result);
2919 return true;
2922 return false;
2925 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
2926 function calls to constants, where possible. */
2928 static tree
2929 fold_internal_goacc_dim (const gimple *call)
2931 int axis = get_oacc_ifn_dim_arg (call);
2932 int size = get_oacc_fn_dim_size (current_function_decl, axis);
2933 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
2934 tree result = NULL_TREE;
2936 /* If the size is 1, or we only want the size and it is not dynamic,
2937 we know the answer. */
2938 if (size == 1 || (!is_pos && size))
2940 tree type = TREE_TYPE (gimple_call_lhs (call));
2941 result = build_int_cst (type, size - is_pos);
2944 return result;
2947 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2948 doesn't fit into TYPE. The test for overflow should be regardless of
2949 -fwrapv, and even for unsigned types. */
2951 bool
2952 arith_overflowed_p (enum tree_code code, const_tree type,
2953 const_tree arg0, const_tree arg1)
2955 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2956 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2957 widest2_int_cst;
2958 widest2_int warg0 = widest2_int_cst (arg0);
2959 widest2_int warg1 = widest2_int_cst (arg1);
2960 widest2_int wres;
2961 switch (code)
2963 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2964 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2965 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2966 default: gcc_unreachable ();
2968 signop sign = TYPE_SIGN (type);
2969 if (sign == UNSIGNED && wi::neg_p (wres))
2970 return true;
2971 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2974 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2975 The statement may be replaced by another statement, e.g., if the call
2976 simplifies to a constant value. Return true if any changes were made.
2977 It is assumed that the operands have been previously folded. */
2979 static bool
2980 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
2982 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2983 tree callee;
2984 bool changed = false;
2985 unsigned i;
2987 /* Fold *& in call arguments. */
2988 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2989 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2991 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2992 if (tmp)
2994 gimple_call_set_arg (stmt, i, tmp);
2995 changed = true;
2999 /* Check for virtual calls that became direct calls. */
3000 callee = gimple_call_fn (stmt);
3001 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3003 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3005 if (dump_file && virtual_method_call_p (callee)
3006 && !possible_polymorphic_call_target_p
3007 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3008 (OBJ_TYPE_REF_EXPR (callee)))))
3010 fprintf (dump_file,
3011 "Type inheritance inconsistent devirtualization of ");
3012 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3013 fprintf (dump_file, " to ");
3014 print_generic_expr (dump_file, callee, TDF_SLIM);
3015 fprintf (dump_file, "\n");
3018 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3019 changed = true;
3021 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3023 bool final;
3024 vec <cgraph_node *>targets
3025 = possible_polymorphic_call_targets (callee, stmt, &final);
3026 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3028 tree lhs = gimple_call_lhs (stmt);
3029 if (dump_enabled_p ())
3031 location_t loc = gimple_location_safe (stmt);
3032 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3033 "folding virtual function call to %s\n",
3034 targets.length () == 1
3035 ? targets[0]->name ()
3036 : "__builtin_unreachable");
3038 if (targets.length () == 1)
3040 gimple_call_set_fndecl (stmt, targets[0]->decl);
3041 changed = true;
3042 /* If the call becomes noreturn, remove the lhs. */
3043 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3045 if (TREE_CODE (lhs) == SSA_NAME)
3047 tree var = create_tmp_var (TREE_TYPE (lhs));
3048 tree def = get_or_create_ssa_default_def (cfun, var);
3049 gimple *new_stmt = gimple_build_assign (lhs, def);
3050 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3052 gimple_call_set_lhs (stmt, NULL_TREE);
3054 maybe_remove_unused_call_args (cfun, stmt);
3056 else
3058 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3059 gimple *new_stmt = gimple_build_call (fndecl, 0);
3060 gimple_set_location (new_stmt, gimple_location (stmt));
3061 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3063 tree var = create_tmp_var (TREE_TYPE (lhs));
3064 tree def = get_or_create_ssa_default_def (cfun, var);
3066 /* To satisfy condition for
3067 cgraph_update_edges_for_call_stmt_node,
3068 we need to preserve GIMPLE_CALL statement
3069 at position of GSI iterator. */
3070 update_call_from_tree (gsi, def);
3071 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3073 else
3075 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3076 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3077 gsi_replace (gsi, new_stmt, false);
3079 return true;
3085 /* Check for indirect calls that became direct calls, and then
3086 no longer require a static chain. */
3087 if (gimple_call_chain (stmt))
3089 tree fn = gimple_call_fndecl (stmt);
3090 if (fn && !DECL_STATIC_CHAIN (fn))
3092 gimple_call_set_chain (stmt, NULL);
3093 changed = true;
3095 else
3097 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3098 if (tmp)
3100 gimple_call_set_chain (stmt, tmp);
3101 changed = true;
3106 if (inplace)
3107 return changed;
3109 /* Check for builtins that CCP can handle using information not
3110 available in the generic fold routines. */
3111 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3113 if (gimple_fold_builtin (gsi))
3114 changed = true;
3116 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3118 changed |= targetm.gimple_fold_builtin (gsi);
3120 else if (gimple_call_internal_p (stmt))
3122 enum tree_code subcode = ERROR_MARK;
3123 tree result = NULL_TREE;
3124 bool cplx_result = false;
3125 tree overflow = NULL_TREE;
3126 switch (gimple_call_internal_fn (stmt))
3128 case IFN_BUILTIN_EXPECT:
3129 result = fold_builtin_expect (gimple_location (stmt),
3130 gimple_call_arg (stmt, 0),
3131 gimple_call_arg (stmt, 1),
3132 gimple_call_arg (stmt, 2));
3133 break;
3134 case IFN_UBSAN_OBJECT_SIZE:
3135 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3136 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3137 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3138 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3139 gimple_call_arg (stmt, 2))))
3141 gsi_replace (gsi, gimple_build_nop (), false);
3142 unlink_stmt_vdef (stmt);
3143 release_defs (stmt);
3144 return true;
3146 break;
3147 case IFN_GOACC_DIM_SIZE:
3148 case IFN_GOACC_DIM_POS:
3149 result = fold_internal_goacc_dim (stmt);
3150 break;
3151 case IFN_UBSAN_CHECK_ADD:
3152 subcode = PLUS_EXPR;
3153 break;
3154 case IFN_UBSAN_CHECK_SUB:
3155 subcode = MINUS_EXPR;
3156 break;
3157 case IFN_UBSAN_CHECK_MUL:
3158 subcode = MULT_EXPR;
3159 break;
3160 case IFN_ADD_OVERFLOW:
3161 subcode = PLUS_EXPR;
3162 cplx_result = true;
3163 break;
3164 case IFN_SUB_OVERFLOW:
3165 subcode = MINUS_EXPR;
3166 cplx_result = true;
3167 break;
3168 case IFN_MUL_OVERFLOW:
3169 subcode = MULT_EXPR;
3170 cplx_result = true;
3171 break;
3172 default:
3173 break;
3175 if (subcode != ERROR_MARK)
3177 tree arg0 = gimple_call_arg (stmt, 0);
3178 tree arg1 = gimple_call_arg (stmt, 1);
3179 tree type = TREE_TYPE (arg0);
3180 if (cplx_result)
3182 tree lhs = gimple_call_lhs (stmt);
3183 if (lhs == NULL_TREE)
3184 type = NULL_TREE;
3185 else
3186 type = TREE_TYPE (TREE_TYPE (lhs));
3188 if (type == NULL_TREE)
3190 /* x = y + 0; x = y - 0; x = y * 0; */
3191 else if (integer_zerop (arg1))
3192 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3193 /* x = 0 + y; x = 0 * y; */
3194 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3195 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3196 /* x = y - y; */
3197 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3198 result = integer_zero_node;
3199 /* x = y * 1; x = 1 * y; */
3200 else if (subcode == MULT_EXPR && integer_onep (arg1))
3201 result = arg0;
3202 else if (subcode == MULT_EXPR && integer_onep (arg0))
3203 result = arg1;
3204 else if (TREE_CODE (arg0) == INTEGER_CST
3205 && TREE_CODE (arg1) == INTEGER_CST)
3207 if (cplx_result)
3208 result = int_const_binop (subcode, fold_convert (type, arg0),
3209 fold_convert (type, arg1));
3210 else
3211 result = int_const_binop (subcode, arg0, arg1);
3212 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3214 if (cplx_result)
3215 overflow = build_one_cst (type);
3216 else
3217 result = NULL_TREE;
3220 if (result)
3222 if (result == integer_zero_node)
3223 result = build_zero_cst (type);
3224 else if (cplx_result && TREE_TYPE (result) != type)
3226 if (TREE_CODE (result) == INTEGER_CST)
3228 if (arith_overflowed_p (PLUS_EXPR, type, result,
3229 integer_zero_node))
3230 overflow = build_one_cst (type);
3232 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3233 && TYPE_UNSIGNED (type))
3234 || (TYPE_PRECISION (type)
3235 < (TYPE_PRECISION (TREE_TYPE (result))
3236 + (TYPE_UNSIGNED (TREE_TYPE (result))
3237 && !TYPE_UNSIGNED (type)))))
3238 result = NULL_TREE;
3239 if (result)
3240 result = fold_convert (type, result);
3245 if (result)
3247 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3248 result = drop_tree_overflow (result);
3249 if (cplx_result)
3251 if (overflow == NULL_TREE)
3252 overflow = build_zero_cst (TREE_TYPE (result));
3253 tree ctype = build_complex_type (TREE_TYPE (result));
3254 if (TREE_CODE (result) == INTEGER_CST
3255 && TREE_CODE (overflow) == INTEGER_CST)
3256 result = build_complex (ctype, result, overflow);
3257 else
3258 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3259 ctype, result, overflow);
3261 if (!update_call_from_tree (gsi, result))
3262 gimplify_and_update_call_from_tree (gsi, result);
3263 changed = true;
3267 return changed;
3271 /* Return true whether NAME has a use on STMT. */
3273 static bool
3274 has_use_on_stmt (tree name, gimple *stmt)
3276 imm_use_iterator iter;
3277 use_operand_p use_p;
3278 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3279 if (USE_STMT (use_p) == stmt)
3280 return true;
3281 return false;
3284 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3285 gimple_simplify.
3287 Replaces *GSI with the simplification result in RCODE and OPS
3288 and the associated statements in *SEQ. Does the replacement
3289 according to INPLACE and returns true if the operation succeeded. */
3291 static bool
3292 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3293 code_helper rcode, tree *ops,
3294 gimple_seq *seq, bool inplace)
3296 gimple *stmt = gsi_stmt (*gsi);
3298 /* Play safe and do not allow abnormals to be mentioned in
3299 newly created statements. See also maybe_push_res_to_seq.
3300 As an exception allow such uses if there was a use of the
3301 same SSA name on the old stmt. */
3302 if ((TREE_CODE (ops[0]) == SSA_NAME
3303 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3304 && !has_use_on_stmt (ops[0], stmt))
3305 || (ops[1]
3306 && TREE_CODE (ops[1]) == SSA_NAME
3307 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3308 && !has_use_on_stmt (ops[1], stmt))
3309 || (ops[2]
3310 && TREE_CODE (ops[2]) == SSA_NAME
3311 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3312 && !has_use_on_stmt (ops[2], stmt))
3313 || (COMPARISON_CLASS_P (ops[0])
3314 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
3315 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
3316 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
3317 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
3318 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
3319 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
3320 return false;
3322 /* Don't insert new statements when INPLACE is true, even if we could
3323 reuse STMT for the final statement. */
3324 if (inplace && !gimple_seq_empty_p (*seq))
3325 return false;
3327 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3329 gcc_assert (rcode.is_tree_code ());
3330 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3331 /* GIMPLE_CONDs condition may not throw. */
3332 && (!flag_exceptions
3333 || !cfun->can_throw_non_call_exceptions
3334 || !operation_could_trap_p (rcode,
3335 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3336 false, NULL_TREE)))
3337 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3338 else if (rcode == SSA_NAME)
3339 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3340 build_zero_cst (TREE_TYPE (ops[0])));
3341 else if (rcode == INTEGER_CST)
3343 if (integer_zerop (ops[0]))
3344 gimple_cond_make_false (cond_stmt);
3345 else
3346 gimple_cond_make_true (cond_stmt);
3348 else if (!inplace)
3350 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3351 ops, seq);
3352 if (!res)
3353 return false;
3354 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3355 build_zero_cst (TREE_TYPE (res)));
3357 else
3358 return false;
3359 if (dump_file && (dump_flags & TDF_DETAILS))
3361 fprintf (dump_file, "gimple_simplified to ");
3362 if (!gimple_seq_empty_p (*seq))
3363 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3364 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3365 0, TDF_SLIM);
3367 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3368 return true;
3370 else if (is_gimple_assign (stmt)
3371 && rcode.is_tree_code ())
3373 if (!inplace
3374 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3376 maybe_build_generic_op (rcode,
3377 TREE_TYPE (gimple_assign_lhs (stmt)),
3378 &ops[0], ops[1], ops[2]);
3379 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3380 if (dump_file && (dump_flags & TDF_DETAILS))
3382 fprintf (dump_file, "gimple_simplified to ");
3383 if (!gimple_seq_empty_p (*seq))
3384 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3385 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3386 0, TDF_SLIM);
3388 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3389 return true;
3392 else if (rcode.is_fn_code ()
3393 && gimple_call_combined_fn (stmt) == rcode)
3395 unsigned i;
3396 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3398 gcc_assert (ops[i] != NULL_TREE);
3399 gimple_call_set_arg (stmt, i, ops[i]);
3401 if (i < 3)
3402 gcc_assert (ops[i] == NULL_TREE);
3403 if (dump_file && (dump_flags & TDF_DETAILS))
3405 fprintf (dump_file, "gimple_simplified to ");
3406 if (!gimple_seq_empty_p (*seq))
3407 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3408 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3410 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3411 return true;
3413 else if (!inplace)
3415 if (gimple_has_lhs (stmt))
3417 tree lhs = gimple_get_lhs (stmt);
3418 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3419 ops, seq, lhs))
3420 return false;
3421 if (dump_file && (dump_flags & TDF_DETAILS))
3423 fprintf (dump_file, "gimple_simplified to ");
3424 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3426 gsi_replace_with_seq_vops (gsi, *seq);
3427 return true;
3429 else
3430 gcc_unreachable ();
3433 return false;
3436 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3438 static bool
3439 maybe_canonicalize_mem_ref_addr (tree *t)
3441 bool res = false;
3443 if (TREE_CODE (*t) == ADDR_EXPR)
3444 t = &TREE_OPERAND (*t, 0);
3446 while (handled_component_p (*t))
3447 t = &TREE_OPERAND (*t, 0);
3449 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3450 of invariant addresses into a SSA name MEM_REF address. */
3451 if (TREE_CODE (*t) == MEM_REF
3452 || TREE_CODE (*t) == TARGET_MEM_REF)
3454 tree addr = TREE_OPERAND (*t, 0);
3455 if (TREE_CODE (addr) == ADDR_EXPR
3456 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3457 || handled_component_p (TREE_OPERAND (addr, 0))))
3459 tree base;
3460 HOST_WIDE_INT coffset;
3461 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3462 &coffset);
3463 if (!base)
3464 gcc_unreachable ();
3466 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3467 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3468 TREE_OPERAND (*t, 1),
3469 size_int (coffset));
3470 res = true;
3472 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3473 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3476 /* Canonicalize back MEM_REFs to plain reference trees if the object
3477 accessed is a decl that has the same access semantics as the MEM_REF. */
3478 if (TREE_CODE (*t) == MEM_REF
3479 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3480 && integer_zerop (TREE_OPERAND (*t, 1))
3481 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3483 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3484 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3485 if (/* Same volatile qualification. */
3486 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3487 /* Same TBAA behavior with -fstrict-aliasing. */
3488 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3489 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3490 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3491 /* Same alignment. */
3492 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3493 /* We have to look out here to not drop a required conversion
3494 from the rhs to the lhs if *t appears on the lhs or vice-versa
3495 if it appears on the rhs. Thus require strict type
3496 compatibility. */
3497 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3499 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3500 res = true;
3504 /* Canonicalize TARGET_MEM_REF in particular with respect to
3505 the indexes becoming constant. */
3506 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3508 tree tem = maybe_fold_tmr (*t);
3509 if (tem)
3511 *t = tem;
3512 res = true;
3516 return res;
3519 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3520 distinguishes both cases. */
3522 static bool
3523 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3525 bool changed = false;
3526 gimple *stmt = gsi_stmt (*gsi);
3527 unsigned i;
3529 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3530 after propagation.
3531 ??? This shouldn't be done in generic folding but in the
3532 propagation helpers which also know whether an address was
3533 propagated.
3534 Also canonicalize operand order. */
3535 switch (gimple_code (stmt))
3537 case GIMPLE_ASSIGN:
3538 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3540 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3541 if ((REFERENCE_CLASS_P (*rhs)
3542 || TREE_CODE (*rhs) == ADDR_EXPR)
3543 && maybe_canonicalize_mem_ref_addr (rhs))
3544 changed = true;
3545 tree *lhs = gimple_assign_lhs_ptr (stmt);
3546 if (REFERENCE_CLASS_P (*lhs)
3547 && maybe_canonicalize_mem_ref_addr (lhs))
3548 changed = true;
3550 else
3552 /* Canonicalize operand order. */
3553 enum tree_code code = gimple_assign_rhs_code (stmt);
3554 if (TREE_CODE_CLASS (code) == tcc_comparison
3555 || commutative_tree_code (code)
3556 || commutative_ternary_tree_code (code))
3558 tree rhs1 = gimple_assign_rhs1 (stmt);
3559 tree rhs2 = gimple_assign_rhs2 (stmt);
3560 if (tree_swap_operands_p (rhs1, rhs2, false))
3562 gimple_assign_set_rhs1 (stmt, rhs2);
3563 gimple_assign_set_rhs2 (stmt, rhs1);
3564 if (TREE_CODE_CLASS (code) == tcc_comparison)
3565 gimple_assign_set_rhs_code (stmt,
3566 swap_tree_comparison (code));
3567 changed = true;
3571 break;
3572 case GIMPLE_CALL:
3574 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3576 tree *arg = gimple_call_arg_ptr (stmt, i);
3577 if (REFERENCE_CLASS_P (*arg)
3578 && maybe_canonicalize_mem_ref_addr (arg))
3579 changed = true;
3581 tree *lhs = gimple_call_lhs_ptr (stmt);
3582 if (*lhs
3583 && REFERENCE_CLASS_P (*lhs)
3584 && maybe_canonicalize_mem_ref_addr (lhs))
3585 changed = true;
3586 break;
3588 case GIMPLE_ASM:
3590 gasm *asm_stmt = as_a <gasm *> (stmt);
3591 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3593 tree link = gimple_asm_output_op (asm_stmt, i);
3594 tree op = TREE_VALUE (link);
3595 if (REFERENCE_CLASS_P (op)
3596 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3597 changed = true;
3599 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3601 tree link = gimple_asm_input_op (asm_stmt, i);
3602 tree op = TREE_VALUE (link);
3603 if ((REFERENCE_CLASS_P (op)
3604 || TREE_CODE (op) == ADDR_EXPR)
3605 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3606 changed = true;
3609 break;
3610 case GIMPLE_DEBUG:
3611 if (gimple_debug_bind_p (stmt))
3613 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3614 if (*val
3615 && (REFERENCE_CLASS_P (*val)
3616 || TREE_CODE (*val) == ADDR_EXPR)
3617 && maybe_canonicalize_mem_ref_addr (val))
3618 changed = true;
3620 break;
3621 case GIMPLE_COND:
3623 /* Canonicalize operand order. */
3624 tree lhs = gimple_cond_lhs (stmt);
3625 tree rhs = gimple_cond_rhs (stmt);
3626 if (tree_swap_operands_p (lhs, rhs, false))
3628 gcond *gc = as_a <gcond *> (stmt);
3629 gimple_cond_set_lhs (gc, rhs);
3630 gimple_cond_set_rhs (gc, lhs);
3631 gimple_cond_set_code (gc,
3632 swap_tree_comparison (gimple_cond_code (gc)));
3633 changed = true;
3636 default:;
3639 /* Dispatch to pattern-based folding. */
3640 if (!inplace
3641 || is_gimple_assign (stmt)
3642 || gimple_code (stmt) == GIMPLE_COND)
3644 gimple_seq seq = NULL;
3645 code_helper rcode;
3646 tree ops[3] = {};
3647 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3648 valueize, valueize))
3650 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3651 changed = true;
3652 else
3653 gimple_seq_discard (seq);
3657 stmt = gsi_stmt (*gsi);
3659 /* Fold the main computation performed by the statement. */
3660 switch (gimple_code (stmt))
3662 case GIMPLE_ASSIGN:
3664 /* Try to canonicalize for boolean-typed X the comparisons
3665 X == 0, X == 1, X != 0, and X != 1. */
3666 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3667 || gimple_assign_rhs_code (stmt) == NE_EXPR)
3669 tree lhs = gimple_assign_lhs (stmt);
3670 tree op1 = gimple_assign_rhs1 (stmt);
3671 tree op2 = gimple_assign_rhs2 (stmt);
3672 tree type = TREE_TYPE (op1);
3674 /* Check whether the comparison operands are of the same boolean
3675 type as the result type is.
3676 Check that second operand is an integer-constant with value
3677 one or zero. */
3678 if (TREE_CODE (op2) == INTEGER_CST
3679 && (integer_zerop (op2) || integer_onep (op2))
3680 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3682 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3683 bool is_logical_not = false;
3685 /* X == 0 and X != 1 is a logical-not.of X
3686 X == 1 and X != 0 is X */
3687 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3688 || (cmp_code == NE_EXPR && integer_onep (op2)))
3689 is_logical_not = true;
3691 if (is_logical_not == false)
3692 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3693 /* Only for one-bit precision typed X the transformation
3694 !X -> ~X is valied. */
3695 else if (TYPE_PRECISION (type) == 1)
3696 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3697 /* Otherwise we use !X -> X ^ 1. */
3698 else
3699 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3700 build_int_cst (type, 1));
3701 changed = true;
3702 break;
3706 unsigned old_num_ops = gimple_num_ops (stmt);
3707 tree lhs = gimple_assign_lhs (stmt);
3708 tree new_rhs = fold_gimple_assign (gsi);
3709 if (new_rhs
3710 && !useless_type_conversion_p (TREE_TYPE (lhs),
3711 TREE_TYPE (new_rhs)))
3712 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3713 if (new_rhs
3714 && (!inplace
3715 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3717 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3718 changed = true;
3720 break;
3723 case GIMPLE_CALL:
3724 changed |= gimple_fold_call (gsi, inplace);
3725 break;
3727 case GIMPLE_ASM:
3728 /* Fold *& in asm operands. */
3730 gasm *asm_stmt = as_a <gasm *> (stmt);
3731 size_t noutputs;
3732 const char **oconstraints;
3733 const char *constraint;
3734 bool allows_mem, allows_reg;
3736 noutputs = gimple_asm_noutputs (asm_stmt);
3737 oconstraints = XALLOCAVEC (const char *, noutputs);
3739 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3741 tree link = gimple_asm_output_op (asm_stmt, i);
3742 tree op = TREE_VALUE (link);
3743 oconstraints[i]
3744 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3745 if (REFERENCE_CLASS_P (op)
3746 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3748 TREE_VALUE (link) = op;
3749 changed = true;
3752 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3754 tree link = gimple_asm_input_op (asm_stmt, i);
3755 tree op = TREE_VALUE (link);
3756 constraint
3757 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3758 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3759 oconstraints, &allows_mem, &allows_reg);
3760 if (REFERENCE_CLASS_P (op)
3761 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3762 != NULL_TREE)
3764 TREE_VALUE (link) = op;
3765 changed = true;
3769 break;
3771 case GIMPLE_DEBUG:
3772 if (gimple_debug_bind_p (stmt))
3774 tree val = gimple_debug_bind_get_value (stmt);
3775 if (val
3776 && REFERENCE_CLASS_P (val))
3778 tree tem = maybe_fold_reference (val, false);
3779 if (tem)
3781 gimple_debug_bind_set_value (stmt, tem);
3782 changed = true;
3785 else if (val
3786 && TREE_CODE (val) == ADDR_EXPR)
3788 tree ref = TREE_OPERAND (val, 0);
3789 tree tem = maybe_fold_reference (ref, false);
3790 if (tem)
3792 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3793 gimple_debug_bind_set_value (stmt, tem);
3794 changed = true;
3798 break;
3800 default:;
3803 stmt = gsi_stmt (*gsi);
3805 /* Fold *& on the lhs. */
3806 if (gimple_has_lhs (stmt))
3808 tree lhs = gimple_get_lhs (stmt);
3809 if (lhs && REFERENCE_CLASS_P (lhs))
3811 tree new_lhs = maybe_fold_reference (lhs, true);
3812 if (new_lhs)
3814 gimple_set_lhs (stmt, new_lhs);
3815 changed = true;
3820 return changed;
3823 /* Valueziation callback that ends up not following SSA edges. */
3825 tree
3826 no_follow_ssa_edges (tree)
3828 return NULL_TREE;
3831 /* Valueization callback that ends up following single-use SSA edges only. */
3833 tree
3834 follow_single_use_edges (tree val)
3836 if (TREE_CODE (val) == SSA_NAME
3837 && !has_single_use (val))
3838 return NULL_TREE;
3839 return val;
3842 /* Fold the statement pointed to by GSI. In some cases, this function may
3843 replace the whole statement with a new one. Returns true iff folding
3844 makes any changes.
3845 The statement pointed to by GSI should be in valid gimple form but may
3846 be in unfolded state as resulting from for example constant propagation
3847 which can produce *&x = 0. */
3849 bool
3850 fold_stmt (gimple_stmt_iterator *gsi)
3852 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3855 bool
3856 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3858 return fold_stmt_1 (gsi, false, valueize);
3861 /* Perform the minimal folding on statement *GSI. Only operations like
3862 *&x created by constant propagation are handled. The statement cannot
3863 be replaced with a new one. Return true if the statement was
3864 changed, false otherwise.
3865 The statement *GSI should be in valid gimple form but may
3866 be in unfolded state as resulting from for example constant propagation
3867 which can produce *&x = 0. */
3869 bool
3870 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3872 gimple *stmt = gsi_stmt (*gsi);
3873 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3874 gcc_assert (gsi_stmt (*gsi) == stmt);
3875 return changed;
3878 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3879 if EXPR is null or we don't know how.
3880 If non-null, the result always has boolean type. */
3882 static tree
3883 canonicalize_bool (tree expr, bool invert)
3885 if (!expr)
3886 return NULL_TREE;
3887 else if (invert)
3889 if (integer_nonzerop (expr))
3890 return boolean_false_node;
3891 else if (integer_zerop (expr))
3892 return boolean_true_node;
3893 else if (TREE_CODE (expr) == SSA_NAME)
3894 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3895 build_int_cst (TREE_TYPE (expr), 0));
3896 else if (COMPARISON_CLASS_P (expr))
3897 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3898 boolean_type_node,
3899 TREE_OPERAND (expr, 0),
3900 TREE_OPERAND (expr, 1));
3901 else
3902 return NULL_TREE;
3904 else
3906 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3907 return expr;
3908 if (integer_nonzerop (expr))
3909 return boolean_true_node;
3910 else if (integer_zerop (expr))
3911 return boolean_false_node;
3912 else if (TREE_CODE (expr) == SSA_NAME)
3913 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3914 build_int_cst (TREE_TYPE (expr), 0));
3915 else if (COMPARISON_CLASS_P (expr))
3916 return fold_build2 (TREE_CODE (expr),
3917 boolean_type_node,
3918 TREE_OPERAND (expr, 0),
3919 TREE_OPERAND (expr, 1));
3920 else
3921 return NULL_TREE;
3925 /* Check to see if a boolean expression EXPR is logically equivalent to the
3926 comparison (OP1 CODE OP2). Check for various identities involving
3927 SSA_NAMEs. */
3929 static bool
3930 same_bool_comparison_p (const_tree expr, enum tree_code code,
3931 const_tree op1, const_tree op2)
3933 gimple *s;
3935 /* The obvious case. */
3936 if (TREE_CODE (expr) == code
3937 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3938 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3939 return true;
3941 /* Check for comparing (name, name != 0) and the case where expr
3942 is an SSA_NAME with a definition matching the comparison. */
3943 if (TREE_CODE (expr) == SSA_NAME
3944 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3946 if (operand_equal_p (expr, op1, 0))
3947 return ((code == NE_EXPR && integer_zerop (op2))
3948 || (code == EQ_EXPR && integer_nonzerop (op2)));
3949 s = SSA_NAME_DEF_STMT (expr);
3950 if (is_gimple_assign (s)
3951 && gimple_assign_rhs_code (s) == code
3952 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3953 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3954 return true;
3957 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3958 of name is a comparison, recurse. */
3959 if (TREE_CODE (op1) == SSA_NAME
3960 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3962 s = SSA_NAME_DEF_STMT (op1);
3963 if (is_gimple_assign (s)
3964 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3966 enum tree_code c = gimple_assign_rhs_code (s);
3967 if ((c == NE_EXPR && integer_zerop (op2))
3968 || (c == EQ_EXPR && integer_nonzerop (op2)))
3969 return same_bool_comparison_p (expr, c,
3970 gimple_assign_rhs1 (s),
3971 gimple_assign_rhs2 (s));
3972 if ((c == EQ_EXPR && integer_zerop (op2))
3973 || (c == NE_EXPR && integer_nonzerop (op2)))
3974 return same_bool_comparison_p (expr,
3975 invert_tree_comparison (c, false),
3976 gimple_assign_rhs1 (s),
3977 gimple_assign_rhs2 (s));
3980 return false;
3983 /* Check to see if two boolean expressions OP1 and OP2 are logically
3984 equivalent. */
3986 static bool
3987 same_bool_result_p (const_tree op1, const_tree op2)
3989 /* Simple cases first. */
3990 if (operand_equal_p (op1, op2, 0))
3991 return true;
3993 /* Check the cases where at least one of the operands is a comparison.
3994 These are a bit smarter than operand_equal_p in that they apply some
3995 identifies on SSA_NAMEs. */
3996 if (COMPARISON_CLASS_P (op2)
3997 && same_bool_comparison_p (op1, TREE_CODE (op2),
3998 TREE_OPERAND (op2, 0),
3999 TREE_OPERAND (op2, 1)))
4000 return true;
4001 if (COMPARISON_CLASS_P (op1)
4002 && same_bool_comparison_p (op2, TREE_CODE (op1),
4003 TREE_OPERAND (op1, 0),
4004 TREE_OPERAND (op1, 1)))
4005 return true;
4007 /* Default case. */
4008 return false;
4011 /* Forward declarations for some mutually recursive functions. */
4013 static tree
4014 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4015 enum tree_code code2, tree op2a, tree op2b);
4016 static tree
4017 and_var_with_comparison (tree var, bool invert,
4018 enum tree_code code2, tree op2a, tree op2b);
4019 static tree
4020 and_var_with_comparison_1 (gimple *stmt,
4021 enum tree_code code2, tree op2a, tree op2b);
4022 static tree
4023 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4024 enum tree_code code2, tree op2a, tree op2b);
4025 static tree
4026 or_var_with_comparison (tree var, bool invert,
4027 enum tree_code code2, tree op2a, tree op2b);
4028 static tree
4029 or_var_with_comparison_1 (gimple *stmt,
4030 enum tree_code code2, tree op2a, tree op2b);
4032 /* Helper function for and_comparisons_1: try to simplify the AND of the
4033 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4034 If INVERT is true, invert the value of the VAR before doing the AND.
4035 Return NULL_EXPR if we can't simplify this to a single expression. */
4037 static tree
4038 and_var_with_comparison (tree var, bool invert,
4039 enum tree_code code2, tree op2a, tree op2b)
4041 tree t;
4042 gimple *stmt = SSA_NAME_DEF_STMT (var);
4044 /* We can only deal with variables whose definitions are assignments. */
4045 if (!is_gimple_assign (stmt))
4046 return NULL_TREE;
4048 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4049 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4050 Then we only have to consider the simpler non-inverted cases. */
4051 if (invert)
4052 t = or_var_with_comparison_1 (stmt,
4053 invert_tree_comparison (code2, false),
4054 op2a, op2b);
4055 else
4056 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4057 return canonicalize_bool (t, invert);
4060 /* Try to simplify the AND of the ssa variable defined by the assignment
4061 STMT with the comparison specified by (OP2A CODE2 OP2B).
4062 Return NULL_EXPR if we can't simplify this to a single expression. */
4064 static tree
4065 and_var_with_comparison_1 (gimple *stmt,
4066 enum tree_code code2, tree op2a, tree op2b)
4068 tree var = gimple_assign_lhs (stmt);
4069 tree true_test_var = NULL_TREE;
4070 tree false_test_var = NULL_TREE;
4071 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4073 /* Check for identities like (var AND (var == 0)) => false. */
4074 if (TREE_CODE (op2a) == SSA_NAME
4075 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4077 if ((code2 == NE_EXPR && integer_zerop (op2b))
4078 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4080 true_test_var = op2a;
4081 if (var == true_test_var)
4082 return var;
4084 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4085 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4087 false_test_var = op2a;
4088 if (var == false_test_var)
4089 return boolean_false_node;
4093 /* If the definition is a comparison, recurse on it. */
4094 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4096 tree t = and_comparisons_1 (innercode,
4097 gimple_assign_rhs1 (stmt),
4098 gimple_assign_rhs2 (stmt),
4099 code2,
4100 op2a,
4101 op2b);
4102 if (t)
4103 return t;
4106 /* If the definition is an AND or OR expression, we may be able to
4107 simplify by reassociating. */
4108 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4109 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4111 tree inner1 = gimple_assign_rhs1 (stmt);
4112 tree inner2 = gimple_assign_rhs2 (stmt);
4113 gimple *s;
4114 tree t;
4115 tree partial = NULL_TREE;
4116 bool is_and = (innercode == BIT_AND_EXPR);
4118 /* Check for boolean identities that don't require recursive examination
4119 of inner1/inner2:
4120 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4121 inner1 AND (inner1 OR inner2) => inner1
4122 !inner1 AND (inner1 AND inner2) => false
4123 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4124 Likewise for similar cases involving inner2. */
4125 if (inner1 == true_test_var)
4126 return (is_and ? var : inner1);
4127 else if (inner2 == true_test_var)
4128 return (is_and ? var : inner2);
4129 else if (inner1 == false_test_var)
4130 return (is_and
4131 ? boolean_false_node
4132 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4133 else if (inner2 == false_test_var)
4134 return (is_and
4135 ? boolean_false_node
4136 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4138 /* Next, redistribute/reassociate the AND across the inner tests.
4139 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4140 if (TREE_CODE (inner1) == SSA_NAME
4141 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4142 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4143 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4144 gimple_assign_rhs1 (s),
4145 gimple_assign_rhs2 (s),
4146 code2, op2a, op2b)))
4148 /* Handle the AND case, where we are reassociating:
4149 (inner1 AND inner2) AND (op2a code2 op2b)
4150 => (t AND inner2)
4151 If the partial result t is a constant, we win. Otherwise
4152 continue on to try reassociating with the other inner test. */
4153 if (is_and)
4155 if (integer_onep (t))
4156 return inner2;
4157 else if (integer_zerop (t))
4158 return boolean_false_node;
4161 /* Handle the OR case, where we are redistributing:
4162 (inner1 OR inner2) AND (op2a code2 op2b)
4163 => (t OR (inner2 AND (op2a code2 op2b))) */
4164 else if (integer_onep (t))
4165 return boolean_true_node;
4167 /* Save partial result for later. */
4168 partial = t;
4171 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4172 if (TREE_CODE (inner2) == SSA_NAME
4173 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4174 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4175 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4176 gimple_assign_rhs1 (s),
4177 gimple_assign_rhs2 (s),
4178 code2, op2a, op2b)))
4180 /* Handle the AND case, where we are reassociating:
4181 (inner1 AND inner2) AND (op2a code2 op2b)
4182 => (inner1 AND t) */
4183 if (is_and)
4185 if (integer_onep (t))
4186 return inner1;
4187 else if (integer_zerop (t))
4188 return boolean_false_node;
4189 /* If both are the same, we can apply the identity
4190 (x AND x) == x. */
4191 else if (partial && same_bool_result_p (t, partial))
4192 return t;
4195 /* Handle the OR case. where we are redistributing:
4196 (inner1 OR inner2) AND (op2a code2 op2b)
4197 => (t OR (inner1 AND (op2a code2 op2b)))
4198 => (t OR partial) */
4199 else
4201 if (integer_onep (t))
4202 return boolean_true_node;
4203 else if (partial)
4205 /* We already got a simplification for the other
4206 operand to the redistributed OR expression. The
4207 interesting case is when at least one is false.
4208 Or, if both are the same, we can apply the identity
4209 (x OR x) == x. */
4210 if (integer_zerop (partial))
4211 return t;
4212 else if (integer_zerop (t))
4213 return partial;
4214 else if (same_bool_result_p (t, partial))
4215 return t;
4220 return NULL_TREE;
4223 /* Try to simplify the AND of two comparisons defined by
4224 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4225 If this can be done without constructing an intermediate value,
4226 return the resulting tree; otherwise NULL_TREE is returned.
4227 This function is deliberately asymmetric as it recurses on SSA_DEFs
4228 in the first comparison but not the second. */
4230 static tree
4231 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4232 enum tree_code code2, tree op2a, tree op2b)
4234 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4236 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4237 if (operand_equal_p (op1a, op2a, 0)
4238 && operand_equal_p (op1b, op2b, 0))
4240 /* Result will be either NULL_TREE, or a combined comparison. */
4241 tree t = combine_comparisons (UNKNOWN_LOCATION,
4242 TRUTH_ANDIF_EXPR, code1, code2,
4243 truth_type, op1a, op1b);
4244 if (t)
4245 return t;
4248 /* Likewise the swapped case of the above. */
4249 if (operand_equal_p (op1a, op2b, 0)
4250 && operand_equal_p (op1b, op2a, 0))
4252 /* Result will be either NULL_TREE, or a combined comparison. */
4253 tree t = combine_comparisons (UNKNOWN_LOCATION,
4254 TRUTH_ANDIF_EXPR, code1,
4255 swap_tree_comparison (code2),
4256 truth_type, op1a, op1b);
4257 if (t)
4258 return t;
4261 /* If both comparisons are of the same value against constants, we might
4262 be able to merge them. */
4263 if (operand_equal_p (op1a, op2a, 0)
4264 && TREE_CODE (op1b) == INTEGER_CST
4265 && TREE_CODE (op2b) == INTEGER_CST)
4267 int cmp = tree_int_cst_compare (op1b, op2b);
4269 /* If we have (op1a == op1b), we should either be able to
4270 return that or FALSE, depending on whether the constant op1b
4271 also satisfies the other comparison against op2b. */
4272 if (code1 == EQ_EXPR)
4274 bool done = true;
4275 bool val;
4276 switch (code2)
4278 case EQ_EXPR: val = (cmp == 0); break;
4279 case NE_EXPR: val = (cmp != 0); break;
4280 case LT_EXPR: val = (cmp < 0); break;
4281 case GT_EXPR: val = (cmp > 0); break;
4282 case LE_EXPR: val = (cmp <= 0); break;
4283 case GE_EXPR: val = (cmp >= 0); break;
4284 default: done = false;
4286 if (done)
4288 if (val)
4289 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4290 else
4291 return boolean_false_node;
4294 /* Likewise if the second comparison is an == comparison. */
4295 else if (code2 == EQ_EXPR)
4297 bool done = true;
4298 bool val;
4299 switch (code1)
4301 case EQ_EXPR: val = (cmp == 0); break;
4302 case NE_EXPR: val = (cmp != 0); break;
4303 case LT_EXPR: val = (cmp > 0); break;
4304 case GT_EXPR: val = (cmp < 0); break;
4305 case LE_EXPR: val = (cmp >= 0); break;
4306 case GE_EXPR: val = (cmp <= 0); break;
4307 default: done = false;
4309 if (done)
4311 if (val)
4312 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4313 else
4314 return boolean_false_node;
4318 /* Same business with inequality tests. */
4319 else if (code1 == NE_EXPR)
4321 bool val;
4322 switch (code2)
4324 case EQ_EXPR: val = (cmp != 0); break;
4325 case NE_EXPR: val = (cmp == 0); break;
4326 case LT_EXPR: val = (cmp >= 0); break;
4327 case GT_EXPR: val = (cmp <= 0); break;
4328 case LE_EXPR: val = (cmp > 0); break;
4329 case GE_EXPR: val = (cmp < 0); break;
4330 default:
4331 val = false;
4333 if (val)
4334 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4336 else if (code2 == NE_EXPR)
4338 bool val;
4339 switch (code1)
4341 case EQ_EXPR: val = (cmp == 0); break;
4342 case NE_EXPR: val = (cmp != 0); break;
4343 case LT_EXPR: val = (cmp <= 0); break;
4344 case GT_EXPR: val = (cmp >= 0); break;
4345 case LE_EXPR: val = (cmp < 0); break;
4346 case GE_EXPR: val = (cmp > 0); break;
4347 default:
4348 val = false;
4350 if (val)
4351 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4354 /* Chose the more restrictive of two < or <= comparisons. */
4355 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4356 && (code2 == LT_EXPR || code2 == LE_EXPR))
4358 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4359 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4360 else
4361 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4364 /* Likewise chose the more restrictive of two > or >= comparisons. */
4365 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4366 && (code2 == GT_EXPR || code2 == GE_EXPR))
4368 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4369 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4370 else
4371 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4374 /* Check for singleton ranges. */
4375 else if (cmp == 0
4376 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4377 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4378 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4380 /* Check for disjoint ranges. */
4381 else if (cmp <= 0
4382 && (code1 == LT_EXPR || code1 == LE_EXPR)
4383 && (code2 == GT_EXPR || code2 == GE_EXPR))
4384 return boolean_false_node;
4385 else if (cmp >= 0
4386 && (code1 == GT_EXPR || code1 == GE_EXPR)
4387 && (code2 == LT_EXPR || code2 == LE_EXPR))
4388 return boolean_false_node;
4391 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4392 NAME's definition is a truth value. See if there are any simplifications
4393 that can be done against the NAME's definition. */
4394 if (TREE_CODE (op1a) == SSA_NAME
4395 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4396 && (integer_zerop (op1b) || integer_onep (op1b)))
4398 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4399 || (code1 == NE_EXPR && integer_onep (op1b)));
4400 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4401 switch (gimple_code (stmt))
4403 case GIMPLE_ASSIGN:
4404 /* Try to simplify by copy-propagating the definition. */
4405 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4407 case GIMPLE_PHI:
4408 /* If every argument to the PHI produces the same result when
4409 ANDed with the second comparison, we win.
4410 Do not do this unless the type is bool since we need a bool
4411 result here anyway. */
4412 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4414 tree result = NULL_TREE;
4415 unsigned i;
4416 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4418 tree arg = gimple_phi_arg_def (stmt, i);
4420 /* If this PHI has itself as an argument, ignore it.
4421 If all the other args produce the same result,
4422 we're still OK. */
4423 if (arg == gimple_phi_result (stmt))
4424 continue;
4425 else if (TREE_CODE (arg) == INTEGER_CST)
4427 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4429 if (!result)
4430 result = boolean_false_node;
4431 else if (!integer_zerop (result))
4432 return NULL_TREE;
4434 else if (!result)
4435 result = fold_build2 (code2, boolean_type_node,
4436 op2a, op2b);
4437 else if (!same_bool_comparison_p (result,
4438 code2, op2a, op2b))
4439 return NULL_TREE;
4441 else if (TREE_CODE (arg) == SSA_NAME
4442 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4444 tree temp;
4445 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4446 /* In simple cases we can look through PHI nodes,
4447 but we have to be careful with loops.
4448 See PR49073. */
4449 if (! dom_info_available_p (CDI_DOMINATORS)
4450 || gimple_bb (def_stmt) == gimple_bb (stmt)
4451 || dominated_by_p (CDI_DOMINATORS,
4452 gimple_bb (def_stmt),
4453 gimple_bb (stmt)))
4454 return NULL_TREE;
4455 temp = and_var_with_comparison (arg, invert, code2,
4456 op2a, op2b);
4457 if (!temp)
4458 return NULL_TREE;
4459 else if (!result)
4460 result = temp;
4461 else if (!same_bool_result_p (result, temp))
4462 return NULL_TREE;
4464 else
4465 return NULL_TREE;
4467 return result;
4470 default:
4471 break;
4474 return NULL_TREE;
4477 /* Try to simplify the AND of two comparisons, specified by
4478 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4479 If this can be simplified to a single expression (without requiring
4480 introducing more SSA variables to hold intermediate values),
4481 return the resulting tree. Otherwise return NULL_TREE.
4482 If the result expression is non-null, it has boolean type. */
4484 tree
4485 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4486 enum tree_code code2, tree op2a, tree op2b)
4488 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4489 if (t)
4490 return t;
4491 else
4492 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4495 /* Helper function for or_comparisons_1: try to simplify the OR of the
4496 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4497 If INVERT is true, invert the value of VAR before doing the OR.
4498 Return NULL_EXPR if we can't simplify this to a single expression. */
4500 static tree
4501 or_var_with_comparison (tree var, bool invert,
4502 enum tree_code code2, tree op2a, tree op2b)
4504 tree t;
4505 gimple *stmt = SSA_NAME_DEF_STMT (var);
4507 /* We can only deal with variables whose definitions are assignments. */
4508 if (!is_gimple_assign (stmt))
4509 return NULL_TREE;
4511 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4512 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4513 Then we only have to consider the simpler non-inverted cases. */
4514 if (invert)
4515 t = and_var_with_comparison_1 (stmt,
4516 invert_tree_comparison (code2, false),
4517 op2a, op2b);
4518 else
4519 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4520 return canonicalize_bool (t, invert);
4523 /* Try to simplify the OR of the ssa variable defined by the assignment
4524 STMT with the comparison specified by (OP2A CODE2 OP2B).
4525 Return NULL_EXPR if we can't simplify this to a single expression. */
4527 static tree
4528 or_var_with_comparison_1 (gimple *stmt,
4529 enum tree_code code2, tree op2a, tree op2b)
4531 tree var = gimple_assign_lhs (stmt);
4532 tree true_test_var = NULL_TREE;
4533 tree false_test_var = NULL_TREE;
4534 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4536 /* Check for identities like (var OR (var != 0)) => true . */
4537 if (TREE_CODE (op2a) == SSA_NAME
4538 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4540 if ((code2 == NE_EXPR && integer_zerop (op2b))
4541 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4543 true_test_var = op2a;
4544 if (var == true_test_var)
4545 return var;
4547 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4548 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4550 false_test_var = op2a;
4551 if (var == false_test_var)
4552 return boolean_true_node;
4556 /* If the definition is a comparison, recurse on it. */
4557 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4559 tree t = or_comparisons_1 (innercode,
4560 gimple_assign_rhs1 (stmt),
4561 gimple_assign_rhs2 (stmt),
4562 code2,
4563 op2a,
4564 op2b);
4565 if (t)
4566 return t;
4569 /* If the definition is an AND or OR expression, we may be able to
4570 simplify by reassociating. */
4571 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4572 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4574 tree inner1 = gimple_assign_rhs1 (stmt);
4575 tree inner2 = gimple_assign_rhs2 (stmt);
4576 gimple *s;
4577 tree t;
4578 tree partial = NULL_TREE;
4579 bool is_or = (innercode == BIT_IOR_EXPR);
4581 /* Check for boolean identities that don't require recursive examination
4582 of inner1/inner2:
4583 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4584 inner1 OR (inner1 AND inner2) => inner1
4585 !inner1 OR (inner1 OR inner2) => true
4586 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4588 if (inner1 == true_test_var)
4589 return (is_or ? var : inner1);
4590 else if (inner2 == true_test_var)
4591 return (is_or ? var : inner2);
4592 else if (inner1 == false_test_var)
4593 return (is_or
4594 ? boolean_true_node
4595 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4596 else if (inner2 == false_test_var)
4597 return (is_or
4598 ? boolean_true_node
4599 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4601 /* Next, redistribute/reassociate the OR across the inner tests.
4602 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4603 if (TREE_CODE (inner1) == SSA_NAME
4604 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4605 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4606 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4607 gimple_assign_rhs1 (s),
4608 gimple_assign_rhs2 (s),
4609 code2, op2a, op2b)))
4611 /* Handle the OR case, where we are reassociating:
4612 (inner1 OR inner2) OR (op2a code2 op2b)
4613 => (t OR inner2)
4614 If the partial result t is a constant, we win. Otherwise
4615 continue on to try reassociating with the other inner test. */
4616 if (is_or)
4618 if (integer_onep (t))
4619 return boolean_true_node;
4620 else if (integer_zerop (t))
4621 return inner2;
4624 /* Handle the AND case, where we are redistributing:
4625 (inner1 AND inner2) OR (op2a code2 op2b)
4626 => (t AND (inner2 OR (op2a code op2b))) */
4627 else if (integer_zerop (t))
4628 return boolean_false_node;
4630 /* Save partial result for later. */
4631 partial = t;
4634 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4635 if (TREE_CODE (inner2) == SSA_NAME
4636 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4637 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4638 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4639 gimple_assign_rhs1 (s),
4640 gimple_assign_rhs2 (s),
4641 code2, op2a, op2b)))
4643 /* Handle the OR case, where we are reassociating:
4644 (inner1 OR inner2) OR (op2a code2 op2b)
4645 => (inner1 OR t)
4646 => (t OR partial) */
4647 if (is_or)
4649 if (integer_zerop (t))
4650 return inner1;
4651 else if (integer_onep (t))
4652 return boolean_true_node;
4653 /* If both are the same, we can apply the identity
4654 (x OR x) == x. */
4655 else if (partial && same_bool_result_p (t, partial))
4656 return t;
4659 /* Handle the AND case, where we are redistributing:
4660 (inner1 AND inner2) OR (op2a code2 op2b)
4661 => (t AND (inner1 OR (op2a code2 op2b)))
4662 => (t AND partial) */
4663 else
4665 if (integer_zerop (t))
4666 return boolean_false_node;
4667 else if (partial)
4669 /* We already got a simplification for the other
4670 operand to the redistributed AND expression. The
4671 interesting case is when at least one is true.
4672 Or, if both are the same, we can apply the identity
4673 (x AND x) == x. */
4674 if (integer_onep (partial))
4675 return t;
4676 else if (integer_onep (t))
4677 return partial;
4678 else if (same_bool_result_p (t, partial))
4679 return t;
4684 return NULL_TREE;
4687 /* Try to simplify the OR of two comparisons defined by
4688 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4689 If this can be done without constructing an intermediate value,
4690 return the resulting tree; otherwise NULL_TREE is returned.
4691 This function is deliberately asymmetric as it recurses on SSA_DEFs
4692 in the first comparison but not the second. */
4694 static tree
4695 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4696 enum tree_code code2, tree op2a, tree op2b)
4698 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4700 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4701 if (operand_equal_p (op1a, op2a, 0)
4702 && operand_equal_p (op1b, op2b, 0))
4704 /* Result will be either NULL_TREE, or a combined comparison. */
4705 tree t = combine_comparisons (UNKNOWN_LOCATION,
4706 TRUTH_ORIF_EXPR, code1, code2,
4707 truth_type, op1a, op1b);
4708 if (t)
4709 return t;
4712 /* Likewise the swapped case of the above. */
4713 if (operand_equal_p (op1a, op2b, 0)
4714 && operand_equal_p (op1b, op2a, 0))
4716 /* Result will be either NULL_TREE, or a combined comparison. */
4717 tree t = combine_comparisons (UNKNOWN_LOCATION,
4718 TRUTH_ORIF_EXPR, code1,
4719 swap_tree_comparison (code2),
4720 truth_type, op1a, op1b);
4721 if (t)
4722 return t;
4725 /* If both comparisons are of the same value against constants, we might
4726 be able to merge them. */
4727 if (operand_equal_p (op1a, op2a, 0)
4728 && TREE_CODE (op1b) == INTEGER_CST
4729 && TREE_CODE (op2b) == INTEGER_CST)
4731 int cmp = tree_int_cst_compare (op1b, op2b);
4733 /* If we have (op1a != op1b), we should either be able to
4734 return that or TRUE, depending on whether the constant op1b
4735 also satisfies the other comparison against op2b. */
4736 if (code1 == NE_EXPR)
4738 bool done = true;
4739 bool val;
4740 switch (code2)
4742 case EQ_EXPR: val = (cmp == 0); break;
4743 case NE_EXPR: val = (cmp != 0); break;
4744 case LT_EXPR: val = (cmp < 0); break;
4745 case GT_EXPR: val = (cmp > 0); break;
4746 case LE_EXPR: val = (cmp <= 0); break;
4747 case GE_EXPR: val = (cmp >= 0); break;
4748 default: done = false;
4750 if (done)
4752 if (val)
4753 return boolean_true_node;
4754 else
4755 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4758 /* Likewise if the second comparison is a != comparison. */
4759 else if (code2 == NE_EXPR)
4761 bool done = true;
4762 bool val;
4763 switch (code1)
4765 case EQ_EXPR: val = (cmp == 0); break;
4766 case NE_EXPR: val = (cmp != 0); break;
4767 case LT_EXPR: val = (cmp > 0); break;
4768 case GT_EXPR: val = (cmp < 0); break;
4769 case LE_EXPR: val = (cmp >= 0); break;
4770 case GE_EXPR: val = (cmp <= 0); break;
4771 default: done = false;
4773 if (done)
4775 if (val)
4776 return boolean_true_node;
4777 else
4778 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4782 /* See if an equality test is redundant with the other comparison. */
4783 else if (code1 == EQ_EXPR)
4785 bool val;
4786 switch (code2)
4788 case EQ_EXPR: val = (cmp == 0); break;
4789 case NE_EXPR: val = (cmp != 0); break;
4790 case LT_EXPR: val = (cmp < 0); break;
4791 case GT_EXPR: val = (cmp > 0); break;
4792 case LE_EXPR: val = (cmp <= 0); break;
4793 case GE_EXPR: val = (cmp >= 0); break;
4794 default:
4795 val = false;
4797 if (val)
4798 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4800 else if (code2 == EQ_EXPR)
4802 bool val;
4803 switch (code1)
4805 case EQ_EXPR: val = (cmp == 0); break;
4806 case NE_EXPR: val = (cmp != 0); break;
4807 case LT_EXPR: val = (cmp > 0); break;
4808 case GT_EXPR: val = (cmp < 0); break;
4809 case LE_EXPR: val = (cmp >= 0); break;
4810 case GE_EXPR: val = (cmp <= 0); break;
4811 default:
4812 val = false;
4814 if (val)
4815 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4818 /* Chose the less restrictive of two < or <= comparisons. */
4819 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4820 && (code2 == LT_EXPR || code2 == LE_EXPR))
4822 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4823 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4824 else
4825 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4828 /* Likewise chose the less restrictive of two > or >= comparisons. */
4829 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4830 && (code2 == GT_EXPR || code2 == GE_EXPR))
4832 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4833 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4834 else
4835 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4838 /* Check for singleton ranges. */
4839 else if (cmp == 0
4840 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4841 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4842 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4844 /* Check for less/greater pairs that don't restrict the range at all. */
4845 else if (cmp >= 0
4846 && (code1 == LT_EXPR || code1 == LE_EXPR)
4847 && (code2 == GT_EXPR || code2 == GE_EXPR))
4848 return boolean_true_node;
4849 else if (cmp <= 0
4850 && (code1 == GT_EXPR || code1 == GE_EXPR)
4851 && (code2 == LT_EXPR || code2 == LE_EXPR))
4852 return boolean_true_node;
4855 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4856 NAME's definition is a truth value. See if there are any simplifications
4857 that can be done against the NAME's definition. */
4858 if (TREE_CODE (op1a) == SSA_NAME
4859 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4860 && (integer_zerop (op1b) || integer_onep (op1b)))
4862 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4863 || (code1 == NE_EXPR && integer_onep (op1b)));
4864 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4865 switch (gimple_code (stmt))
4867 case GIMPLE_ASSIGN:
4868 /* Try to simplify by copy-propagating the definition. */
4869 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4871 case GIMPLE_PHI:
4872 /* If every argument to the PHI produces the same result when
4873 ORed with the second comparison, we win.
4874 Do not do this unless the type is bool since we need a bool
4875 result here anyway. */
4876 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4878 tree result = NULL_TREE;
4879 unsigned i;
4880 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4882 tree arg = gimple_phi_arg_def (stmt, i);
4884 /* If this PHI has itself as an argument, ignore it.
4885 If all the other args produce the same result,
4886 we're still OK. */
4887 if (arg == gimple_phi_result (stmt))
4888 continue;
4889 else if (TREE_CODE (arg) == INTEGER_CST)
4891 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4893 if (!result)
4894 result = boolean_true_node;
4895 else if (!integer_onep (result))
4896 return NULL_TREE;
4898 else if (!result)
4899 result = fold_build2 (code2, boolean_type_node,
4900 op2a, op2b);
4901 else if (!same_bool_comparison_p (result,
4902 code2, op2a, op2b))
4903 return NULL_TREE;
4905 else if (TREE_CODE (arg) == SSA_NAME
4906 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4908 tree temp;
4909 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4910 /* In simple cases we can look through PHI nodes,
4911 but we have to be careful with loops.
4912 See PR49073. */
4913 if (! dom_info_available_p (CDI_DOMINATORS)
4914 || gimple_bb (def_stmt) == gimple_bb (stmt)
4915 || dominated_by_p (CDI_DOMINATORS,
4916 gimple_bb (def_stmt),
4917 gimple_bb (stmt)))
4918 return NULL_TREE;
4919 temp = or_var_with_comparison (arg, invert, code2,
4920 op2a, op2b);
4921 if (!temp)
4922 return NULL_TREE;
4923 else if (!result)
4924 result = temp;
4925 else if (!same_bool_result_p (result, temp))
4926 return NULL_TREE;
4928 else
4929 return NULL_TREE;
4931 return result;
4934 default:
4935 break;
4938 return NULL_TREE;
4941 /* Try to simplify the OR of two comparisons, specified by
4942 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4943 If this can be simplified to a single expression (without requiring
4944 introducing more SSA variables to hold intermediate values),
4945 return the resulting tree. Otherwise return NULL_TREE.
4946 If the result expression is non-null, it has boolean type. */
4948 tree
4949 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4950 enum tree_code code2, tree op2a, tree op2b)
4952 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4953 if (t)
4954 return t;
4955 else
4956 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4960 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4962 Either NULL_TREE, a simplified but non-constant or a constant
4963 is returned.
4965 ??? This should go into a gimple-fold-inline.h file to be eventually
4966 privatized with the single valueize function used in the various TUs
4967 to avoid the indirect function call overhead. */
4969 tree
4970 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
4971 tree (*gvalueize) (tree))
4973 code_helper rcode;
4974 tree ops[3] = {};
4975 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4976 edges if there are intermediate VARYING defs. For this reason
4977 do not follow SSA edges here even though SCCVN can technically
4978 just deal fine with that. */
4979 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
4981 tree res = NULL_TREE;
4982 if (gimple_simplified_result_is_gimple_val (rcode, ops))
4983 res = ops[0];
4984 else if (mprts_hook)
4985 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
4986 if (res)
4988 if (dump_file && dump_flags & TDF_DETAILS)
4990 fprintf (dump_file, "Match-and-simplified ");
4991 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4992 fprintf (dump_file, " to ");
4993 print_generic_expr (dump_file, res, 0);
4994 fprintf (dump_file, "\n");
4996 return res;
5000 location_t loc = gimple_location (stmt);
5001 switch (gimple_code (stmt))
5003 case GIMPLE_ASSIGN:
5005 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5007 switch (get_gimple_rhs_class (subcode))
5009 case GIMPLE_SINGLE_RHS:
5011 tree rhs = gimple_assign_rhs1 (stmt);
5012 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5014 if (TREE_CODE (rhs) == SSA_NAME)
5016 /* If the RHS is an SSA_NAME, return its known constant value,
5017 if any. */
5018 return (*valueize) (rhs);
5020 /* Handle propagating invariant addresses into address
5021 operations. */
5022 else if (TREE_CODE (rhs) == ADDR_EXPR
5023 && !is_gimple_min_invariant (rhs))
5025 HOST_WIDE_INT offset = 0;
5026 tree base;
5027 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5028 &offset,
5029 valueize);
5030 if (base
5031 && (CONSTANT_CLASS_P (base)
5032 || decl_address_invariant_p (base)))
5033 return build_invariant_address (TREE_TYPE (rhs),
5034 base, offset);
5036 else if (TREE_CODE (rhs) == CONSTRUCTOR
5037 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5038 && (CONSTRUCTOR_NELTS (rhs)
5039 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5041 unsigned i;
5042 tree val, *vec;
5044 vec = XALLOCAVEC (tree,
5045 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5046 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5048 val = (*valueize) (val);
5049 if (TREE_CODE (val) == INTEGER_CST
5050 || TREE_CODE (val) == REAL_CST
5051 || TREE_CODE (val) == FIXED_CST)
5052 vec[i] = val;
5053 else
5054 return NULL_TREE;
5057 return build_vector (TREE_TYPE (rhs), vec);
5059 if (subcode == OBJ_TYPE_REF)
5061 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5062 /* If callee is constant, we can fold away the wrapper. */
5063 if (is_gimple_min_invariant (val))
5064 return val;
5067 if (kind == tcc_reference)
5069 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5070 || TREE_CODE (rhs) == REALPART_EXPR
5071 || TREE_CODE (rhs) == IMAGPART_EXPR)
5072 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5074 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5075 return fold_unary_loc (EXPR_LOCATION (rhs),
5076 TREE_CODE (rhs),
5077 TREE_TYPE (rhs), val);
5079 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5080 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5082 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5083 return fold_ternary_loc (EXPR_LOCATION (rhs),
5084 TREE_CODE (rhs),
5085 TREE_TYPE (rhs), val,
5086 TREE_OPERAND (rhs, 1),
5087 TREE_OPERAND (rhs, 2));
5089 else if (TREE_CODE (rhs) == MEM_REF
5090 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5092 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5093 if (TREE_CODE (val) == ADDR_EXPR
5094 && is_gimple_min_invariant (val))
5096 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5097 unshare_expr (val),
5098 TREE_OPERAND (rhs, 1));
5099 if (tem)
5100 rhs = tem;
5103 return fold_const_aggregate_ref_1 (rhs, valueize);
5105 else if (kind == tcc_declaration)
5106 return get_symbol_constant_value (rhs);
5107 return rhs;
5110 case GIMPLE_UNARY_RHS:
5111 return NULL_TREE;
5113 case GIMPLE_BINARY_RHS:
5114 /* Translate &x + CST into an invariant form suitable for
5115 further propagation. */
5116 if (subcode == POINTER_PLUS_EXPR)
5118 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5119 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5120 if (TREE_CODE (op0) == ADDR_EXPR
5121 && TREE_CODE (op1) == INTEGER_CST)
5123 tree off = fold_convert (ptr_type_node, op1);
5124 return build_fold_addr_expr_loc
5125 (loc,
5126 fold_build2 (MEM_REF,
5127 TREE_TYPE (TREE_TYPE (op0)),
5128 unshare_expr (op0), off));
5131 /* Canonicalize bool != 0 and bool == 0 appearing after
5132 valueization. While gimple_simplify handles this
5133 it can get confused by the ~X == 1 -> X == 0 transform
5134 which we cant reduce to a SSA name or a constant
5135 (and we have no way to tell gimple_simplify to not
5136 consider those transforms in the first place). */
5137 else if (subcode == EQ_EXPR
5138 || subcode == NE_EXPR)
5140 tree lhs = gimple_assign_lhs (stmt);
5141 tree op0 = gimple_assign_rhs1 (stmt);
5142 if (useless_type_conversion_p (TREE_TYPE (lhs),
5143 TREE_TYPE (op0)))
5145 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5146 op0 = (*valueize) (op0);
5147 if (TREE_CODE (op0) == INTEGER_CST)
5148 std::swap (op0, op1);
5149 if (TREE_CODE (op1) == INTEGER_CST
5150 && ((subcode == NE_EXPR && integer_zerop (op1))
5151 || (subcode == EQ_EXPR && integer_onep (op1))))
5152 return op0;
5155 return NULL_TREE;
5157 case GIMPLE_TERNARY_RHS:
5159 /* Handle ternary operators that can appear in GIMPLE form. */
5160 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5161 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5162 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5163 return fold_ternary_loc (loc, subcode,
5164 gimple_expr_type (stmt), op0, op1, op2);
5167 default:
5168 gcc_unreachable ();
5172 case GIMPLE_CALL:
5174 tree fn;
5175 gcall *call_stmt = as_a <gcall *> (stmt);
5177 if (gimple_call_internal_p (stmt))
5179 enum tree_code subcode = ERROR_MARK;
5180 switch (gimple_call_internal_fn (stmt))
5182 case IFN_UBSAN_CHECK_ADD:
5183 subcode = PLUS_EXPR;
5184 break;
5185 case IFN_UBSAN_CHECK_SUB:
5186 subcode = MINUS_EXPR;
5187 break;
5188 case IFN_UBSAN_CHECK_MUL:
5189 subcode = MULT_EXPR;
5190 break;
5191 default:
5192 return NULL_TREE;
5194 tree arg0 = gimple_call_arg (stmt, 0);
5195 tree arg1 = gimple_call_arg (stmt, 1);
5196 tree op0 = (*valueize) (arg0);
5197 tree op1 = (*valueize) (arg1);
5199 if (TREE_CODE (op0) != INTEGER_CST
5200 || TREE_CODE (op1) != INTEGER_CST)
5202 switch (subcode)
5204 case MULT_EXPR:
5205 /* x * 0 = 0 * x = 0 without overflow. */
5206 if (integer_zerop (op0) || integer_zerop (op1))
5207 return build_zero_cst (TREE_TYPE (arg0));
5208 break;
5209 case MINUS_EXPR:
5210 /* y - y = 0 without overflow. */
5211 if (operand_equal_p (op0, op1, 0))
5212 return build_zero_cst (TREE_TYPE (arg0));
5213 break;
5214 default:
5215 break;
5218 tree res
5219 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5220 if (res
5221 && TREE_CODE (res) == INTEGER_CST
5222 && !TREE_OVERFLOW (res))
5223 return res;
5224 return NULL_TREE;
5227 fn = (*valueize) (gimple_call_fn (stmt));
5228 if (TREE_CODE (fn) == ADDR_EXPR
5229 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5230 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5231 && gimple_builtin_call_types_compatible_p (stmt,
5232 TREE_OPERAND (fn, 0)))
5234 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5235 tree retval;
5236 unsigned i;
5237 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5238 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5239 retval = fold_builtin_call_array (loc,
5240 gimple_call_return_type (call_stmt),
5241 fn, gimple_call_num_args (stmt), args);
5242 if (retval)
5244 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5245 STRIP_NOPS (retval);
5246 retval = fold_convert (gimple_call_return_type (call_stmt),
5247 retval);
5249 return retval;
5251 return NULL_TREE;
5254 default:
5255 return NULL_TREE;
5259 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5260 Returns NULL_TREE if folding to a constant is not possible, otherwise
5261 returns a constant according to is_gimple_min_invariant. */
5263 tree
5264 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5266 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5267 if (res && is_gimple_min_invariant (res))
5268 return res;
5269 return NULL_TREE;
5273 /* The following set of functions are supposed to fold references using
5274 their constant initializers. */
5276 /* See if we can find constructor defining value of BASE.
5277 When we know the consructor with constant offset (such as
5278 base is array[40] and we do know constructor of array), then
5279 BIT_OFFSET is adjusted accordingly.
5281 As a special case, return error_mark_node when constructor
5282 is not explicitly available, but it is known to be zero
5283 such as 'static const int a;'. */
5284 static tree
5285 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5286 tree (*valueize)(tree))
5288 HOST_WIDE_INT bit_offset2, size, max_size;
5289 bool reverse;
5291 if (TREE_CODE (base) == MEM_REF)
5293 if (!integer_zerop (TREE_OPERAND (base, 1)))
5295 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5296 return NULL_TREE;
5297 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5298 * BITS_PER_UNIT);
5301 if (valueize
5302 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5303 base = valueize (TREE_OPERAND (base, 0));
5304 if (!base || TREE_CODE (base) != ADDR_EXPR)
5305 return NULL_TREE;
5306 base = TREE_OPERAND (base, 0);
5309 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5310 DECL_INITIAL. If BASE is a nested reference into another
5311 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5312 the inner reference. */
5313 switch (TREE_CODE (base))
5315 case VAR_DECL:
5316 case CONST_DECL:
5318 tree init = ctor_for_folding (base);
5320 /* Our semantic is exact opposite of ctor_for_folding;
5321 NULL means unknown, while error_mark_node is 0. */
5322 if (init == error_mark_node)
5323 return NULL_TREE;
5324 if (!init)
5325 return error_mark_node;
5326 return init;
5329 case ARRAY_REF:
5330 case COMPONENT_REF:
5331 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
5332 &reverse);
5333 if (max_size == -1 || size != max_size)
5334 return NULL_TREE;
5335 *bit_offset += bit_offset2;
5336 return get_base_constructor (base, bit_offset, valueize);
5338 case STRING_CST:
5339 case CONSTRUCTOR:
5340 return base;
5342 default:
5343 return NULL_TREE;
5347 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5348 SIZE to the memory at bit OFFSET. */
5350 static tree
5351 fold_array_ctor_reference (tree type, tree ctor,
5352 unsigned HOST_WIDE_INT offset,
5353 unsigned HOST_WIDE_INT size,
5354 tree from_decl)
5356 offset_int low_bound;
5357 offset_int elt_size;
5358 offset_int access_index;
5359 tree domain_type = NULL_TREE;
5360 HOST_WIDE_INT inner_offset;
5362 /* Compute low bound and elt size. */
5363 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5364 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5365 if (domain_type && TYPE_MIN_VALUE (domain_type))
5367 /* Static constructors for variably sized objects makes no sense. */
5368 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
5369 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5371 else
5372 low_bound = 0;
5373 /* Static constructors for variably sized objects makes no sense. */
5374 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
5375 == INTEGER_CST);
5376 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5378 /* We can handle only constantly sized accesses that are known to not
5379 be larger than size of array element. */
5380 if (!TYPE_SIZE_UNIT (type)
5381 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5382 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5383 || elt_size == 0)
5384 return NULL_TREE;
5386 /* Compute the array index we look for. */
5387 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5388 elt_size);
5389 access_index += low_bound;
5391 /* And offset within the access. */
5392 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5394 /* See if the array field is large enough to span whole access. We do not
5395 care to fold accesses spanning multiple array indexes. */
5396 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5397 return NULL_TREE;
5398 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
5399 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
5401 /* When memory is not explicitely mentioned in constructor,
5402 it is 0 (or out of range). */
5403 return build_zero_cst (type);
5406 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5407 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5409 static tree
5410 fold_nonarray_ctor_reference (tree type, tree ctor,
5411 unsigned HOST_WIDE_INT offset,
5412 unsigned HOST_WIDE_INT size,
5413 tree from_decl)
5415 unsigned HOST_WIDE_INT cnt;
5416 tree cfield, cval;
5418 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5419 cval)
5421 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5422 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5423 tree field_size = DECL_SIZE (cfield);
5424 offset_int bitoffset;
5425 offset_int bitoffset_end, access_end;
5427 /* Variable sized objects in static constructors makes no sense,
5428 but field_size can be NULL for flexible array members. */
5429 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5430 && TREE_CODE (byte_offset) == INTEGER_CST
5431 && (field_size != NULL_TREE
5432 ? TREE_CODE (field_size) == INTEGER_CST
5433 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5435 /* Compute bit offset of the field. */
5436 bitoffset = (wi::to_offset (field_offset)
5437 + wi::lshift (wi::to_offset (byte_offset),
5438 LOG2_BITS_PER_UNIT));
5439 /* Compute bit offset where the field ends. */
5440 if (field_size != NULL_TREE)
5441 bitoffset_end = bitoffset + wi::to_offset (field_size);
5442 else
5443 bitoffset_end = 0;
5445 access_end = offset_int (offset) + size;
5447 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5448 [BITOFFSET, BITOFFSET_END)? */
5449 if (wi::cmps (access_end, bitoffset) > 0
5450 && (field_size == NULL_TREE
5451 || wi::lts_p (offset, bitoffset_end)))
5453 offset_int inner_offset = offset_int (offset) - bitoffset;
5454 /* We do have overlap. Now see if field is large enough to
5455 cover the access. Give up for accesses spanning multiple
5456 fields. */
5457 if (wi::cmps (access_end, bitoffset_end) > 0)
5458 return NULL_TREE;
5459 if (wi::lts_p (offset, bitoffset))
5460 return NULL_TREE;
5461 return fold_ctor_reference (type, cval,
5462 inner_offset.to_uhwi (), size,
5463 from_decl);
5466 /* When memory is not explicitely mentioned in constructor, it is 0. */
5467 return build_zero_cst (type);
5470 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5471 to the memory at bit OFFSET. */
5473 tree
5474 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5475 unsigned HOST_WIDE_INT size, tree from_decl)
5477 tree ret;
5479 /* We found the field with exact match. */
5480 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5481 && !offset)
5482 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5484 /* We are at the end of walk, see if we can view convert the
5485 result. */
5486 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5487 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5488 && !compare_tree_int (TYPE_SIZE (type), size)
5489 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5491 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5492 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5493 if (ret)
5494 STRIP_USELESS_TYPE_CONVERSION (ret);
5495 return ret;
5497 /* For constants and byte-aligned/sized reads try to go through
5498 native_encode/interpret. */
5499 if (CONSTANT_CLASS_P (ctor)
5500 && BITS_PER_UNIT == 8
5501 && offset % BITS_PER_UNIT == 0
5502 && size % BITS_PER_UNIT == 0
5503 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5505 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5506 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5507 offset / BITS_PER_UNIT);
5508 if (len > 0)
5509 return native_interpret_expr (type, buf, len);
5511 if (TREE_CODE (ctor) == CONSTRUCTOR)
5514 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5515 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5516 return fold_array_ctor_reference (type, ctor, offset, size,
5517 from_decl);
5518 else
5519 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5520 from_decl);
5523 return NULL_TREE;
5526 /* Return the tree representing the element referenced by T if T is an
5527 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5528 names using VALUEIZE. Return NULL_TREE otherwise. */
5530 tree
5531 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5533 tree ctor, idx, base;
5534 HOST_WIDE_INT offset, size, max_size;
5535 tree tem;
5536 bool reverse;
5538 if (TREE_THIS_VOLATILE (t))
5539 return NULL_TREE;
5541 if (DECL_P (t))
5542 return get_symbol_constant_value (t);
5544 tem = fold_read_from_constant_string (t);
5545 if (tem)
5546 return tem;
5548 switch (TREE_CODE (t))
5550 case ARRAY_REF:
5551 case ARRAY_RANGE_REF:
5552 /* Constant indexes are handled well by get_base_constructor.
5553 Only special case variable offsets.
5554 FIXME: This code can't handle nested references with variable indexes
5555 (they will be handled only by iteration of ccp). Perhaps we can bring
5556 get_ref_base_and_extent here and make it use a valueize callback. */
5557 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5558 && valueize
5559 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5560 && TREE_CODE (idx) == INTEGER_CST)
5562 tree low_bound, unit_size;
5564 /* If the resulting bit-offset is constant, track it. */
5565 if ((low_bound = array_ref_low_bound (t),
5566 TREE_CODE (low_bound) == INTEGER_CST)
5567 && (unit_size = array_ref_element_size (t),
5568 tree_fits_uhwi_p (unit_size)))
5570 offset_int woffset
5571 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5572 TYPE_PRECISION (TREE_TYPE (idx)));
5574 if (wi::fits_shwi_p (woffset))
5576 offset = woffset.to_shwi ();
5577 /* TODO: This code seems wrong, multiply then check
5578 to see if it fits. */
5579 offset *= tree_to_uhwi (unit_size);
5580 offset *= BITS_PER_UNIT;
5582 base = TREE_OPERAND (t, 0);
5583 ctor = get_base_constructor (base, &offset, valueize);
5584 /* Empty constructor. Always fold to 0. */
5585 if (ctor == error_mark_node)
5586 return build_zero_cst (TREE_TYPE (t));
5587 /* Out of bound array access. Value is undefined,
5588 but don't fold. */
5589 if (offset < 0)
5590 return NULL_TREE;
5591 /* We can not determine ctor. */
5592 if (!ctor)
5593 return NULL_TREE;
5594 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5595 tree_to_uhwi (unit_size)
5596 * BITS_PER_UNIT,
5597 base);
5601 /* Fallthru. */
5603 case COMPONENT_REF:
5604 case BIT_FIELD_REF:
5605 case TARGET_MEM_REF:
5606 case MEM_REF:
5607 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
5608 ctor = get_base_constructor (base, &offset, valueize);
5610 /* Empty constructor. Always fold to 0. */
5611 if (ctor == error_mark_node)
5612 return build_zero_cst (TREE_TYPE (t));
5613 /* We do not know precise address. */
5614 if (max_size == -1 || max_size != size)
5615 return NULL_TREE;
5616 /* We can not determine ctor. */
5617 if (!ctor)
5618 return NULL_TREE;
5620 /* Out of bound array access. Value is undefined, but don't fold. */
5621 if (offset < 0)
5622 return NULL_TREE;
5624 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5625 base);
5627 case REALPART_EXPR:
5628 case IMAGPART_EXPR:
5630 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5631 if (c && TREE_CODE (c) == COMPLEX_CST)
5632 return fold_build1_loc (EXPR_LOCATION (t),
5633 TREE_CODE (t), TREE_TYPE (t), c);
5634 break;
5637 default:
5638 break;
5641 return NULL_TREE;
5644 tree
5645 fold_const_aggregate_ref (tree t)
5647 return fold_const_aggregate_ref_1 (t, NULL);
5650 /* Lookup virtual method with index TOKEN in a virtual table V
5651 at OFFSET.
5652 Set CAN_REFER if non-NULL to false if method
5653 is not referable or if the virtual table is ill-formed (such as rewriten
5654 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5656 tree
5657 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5658 tree v,
5659 unsigned HOST_WIDE_INT offset,
5660 bool *can_refer)
5662 tree vtable = v, init, fn;
5663 unsigned HOST_WIDE_INT size;
5664 unsigned HOST_WIDE_INT elt_size, access_index;
5665 tree domain_type;
5667 if (can_refer)
5668 *can_refer = true;
5670 /* First of all double check we have virtual table. */
5671 if (TREE_CODE (v) != VAR_DECL
5672 || !DECL_VIRTUAL_P (v))
5674 /* Pass down that we lost track of the target. */
5675 if (can_refer)
5676 *can_refer = false;
5677 return NULL_TREE;
5680 init = ctor_for_folding (v);
5682 /* The virtual tables should always be born with constructors
5683 and we always should assume that they are avaialble for
5684 folding. At the moment we do not stream them in all cases,
5685 but it should never happen that ctor seem unreachable. */
5686 gcc_assert (init);
5687 if (init == error_mark_node)
5689 gcc_assert (in_lto_p);
5690 /* Pass down that we lost track of the target. */
5691 if (can_refer)
5692 *can_refer = false;
5693 return NULL_TREE;
5695 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5696 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5697 offset *= BITS_PER_UNIT;
5698 offset += token * size;
5700 /* Lookup the value in the constructor that is assumed to be array.
5701 This is equivalent to
5702 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5703 offset, size, NULL);
5704 but in a constant time. We expect that frontend produced a simple
5705 array without indexed initializers. */
5707 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5708 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5709 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5710 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5712 access_index = offset / BITS_PER_UNIT / elt_size;
5713 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5715 /* This code makes an assumption that there are no
5716 indexed fileds produced by C++ FE, so we can directly index the array. */
5717 if (access_index < CONSTRUCTOR_NELTS (init))
5719 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5720 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5721 STRIP_NOPS (fn);
5723 else
5724 fn = NULL;
5726 /* For type inconsistent program we may end up looking up virtual method
5727 in virtual table that does not contain TOKEN entries. We may overrun
5728 the virtual table and pick up a constant or RTTI info pointer.
5729 In any case the call is undefined. */
5730 if (!fn
5731 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5732 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5733 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5734 else
5736 fn = TREE_OPERAND (fn, 0);
5738 /* When cgraph node is missing and function is not public, we cannot
5739 devirtualize. This can happen in WHOPR when the actual method
5740 ends up in other partition, because we found devirtualization
5741 possibility too late. */
5742 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5744 if (can_refer)
5746 *can_refer = false;
5747 return fn;
5749 return NULL_TREE;
5753 /* Make sure we create a cgraph node for functions we'll reference.
5754 They can be non-existent if the reference comes from an entry
5755 of an external vtable for example. */
5756 cgraph_node::get_create (fn);
5758 return fn;
5761 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5762 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5763 KNOWN_BINFO carries the binfo describing the true type of
5764 OBJ_TYPE_REF_OBJECT(REF).
5765 Set CAN_REFER if non-NULL to false if method
5766 is not referable or if the virtual table is ill-formed (such as rewriten
5767 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5769 tree
5770 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5771 bool *can_refer)
5773 unsigned HOST_WIDE_INT offset;
5774 tree v;
5776 v = BINFO_VTABLE (known_binfo);
5777 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5778 if (!v)
5779 return NULL_TREE;
5781 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5783 if (can_refer)
5784 *can_refer = false;
5785 return NULL_TREE;
5787 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5790 /* Given a pointer value OP0, return a simplified version of an
5791 indirection through OP0, or NULL_TREE if no simplification is
5792 possible. Note that the resulting type may be different from
5793 the type pointed to in the sense that it is still compatible
5794 from the langhooks point of view. */
5796 tree
5797 gimple_fold_indirect_ref (tree t)
5799 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5800 tree sub = t;
5801 tree subtype;
5803 STRIP_NOPS (sub);
5804 subtype = TREE_TYPE (sub);
5805 if (!POINTER_TYPE_P (subtype))
5806 return NULL_TREE;
5808 if (TREE_CODE (sub) == ADDR_EXPR)
5810 tree op = TREE_OPERAND (sub, 0);
5811 tree optype = TREE_TYPE (op);
5812 /* *&p => p */
5813 if (useless_type_conversion_p (type, optype))
5814 return op;
5816 /* *(foo *)&fooarray => fooarray[0] */
5817 if (TREE_CODE (optype) == ARRAY_TYPE
5818 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5819 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5821 tree type_domain = TYPE_DOMAIN (optype);
5822 tree min_val = size_zero_node;
5823 if (type_domain && TYPE_MIN_VALUE (type_domain))
5824 min_val = TYPE_MIN_VALUE (type_domain);
5825 if (TREE_CODE (min_val) == INTEGER_CST)
5826 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5828 /* *(foo *)&complexfoo => __real__ complexfoo */
5829 else if (TREE_CODE (optype) == COMPLEX_TYPE
5830 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5831 return fold_build1 (REALPART_EXPR, type, op);
5832 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5833 else if (TREE_CODE (optype) == VECTOR_TYPE
5834 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5836 tree part_width = TYPE_SIZE (type);
5837 tree index = bitsize_int (0);
5838 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5842 /* *(p + CST) -> ... */
5843 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5844 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5846 tree addr = TREE_OPERAND (sub, 0);
5847 tree off = TREE_OPERAND (sub, 1);
5848 tree addrtype;
5850 STRIP_NOPS (addr);
5851 addrtype = TREE_TYPE (addr);
5853 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5854 if (TREE_CODE (addr) == ADDR_EXPR
5855 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5856 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5857 && tree_fits_uhwi_p (off))
5859 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5860 tree part_width = TYPE_SIZE (type);
5861 unsigned HOST_WIDE_INT part_widthi
5862 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5863 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5864 tree index = bitsize_int (indexi);
5865 if (offset / part_widthi
5866 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5867 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5868 part_width, index);
5871 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5872 if (TREE_CODE (addr) == ADDR_EXPR
5873 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5874 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5876 tree size = TYPE_SIZE_UNIT (type);
5877 if (tree_int_cst_equal (size, off))
5878 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5881 /* *(p + CST) -> MEM_REF <p, CST>. */
5882 if (TREE_CODE (addr) != ADDR_EXPR
5883 || DECL_P (TREE_OPERAND (addr, 0)))
5884 return fold_build2 (MEM_REF, type,
5885 addr,
5886 wide_int_to_tree (ptype, off));
5889 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5890 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5891 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5892 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5894 tree type_domain;
5895 tree min_val = size_zero_node;
5896 tree osub = sub;
5897 sub = gimple_fold_indirect_ref (sub);
5898 if (! sub)
5899 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5900 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5901 if (type_domain && TYPE_MIN_VALUE (type_domain))
5902 min_val = TYPE_MIN_VALUE (type_domain);
5903 if (TREE_CODE (min_val) == INTEGER_CST)
5904 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5907 return NULL_TREE;
5910 /* Return true if CODE is an operation that when operating on signed
5911 integer types involves undefined behavior on overflow and the
5912 operation can be expressed with unsigned arithmetic. */
5914 bool
5915 arith_code_with_undefined_signed_overflow (tree_code code)
5917 switch (code)
5919 case PLUS_EXPR:
5920 case MINUS_EXPR:
5921 case MULT_EXPR:
5922 case NEGATE_EXPR:
5923 case POINTER_PLUS_EXPR:
5924 return true;
5925 default:
5926 return false;
5930 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5931 operation that can be transformed to unsigned arithmetic by converting
5932 its operand, carrying out the operation in the corresponding unsigned
5933 type and converting the result back to the original type.
5935 Returns a sequence of statements that replace STMT and also contain
5936 a modified form of STMT itself. */
5938 gimple_seq
5939 rewrite_to_defined_overflow (gimple *stmt)
5941 if (dump_file && (dump_flags & TDF_DETAILS))
5943 fprintf (dump_file, "rewriting stmt with undefined signed "
5944 "overflow ");
5945 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5948 tree lhs = gimple_assign_lhs (stmt);
5949 tree type = unsigned_type_for (TREE_TYPE (lhs));
5950 gimple_seq stmts = NULL;
5951 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5953 tree op = gimple_op (stmt, i);
5954 op = gimple_convert (&stmts, type, op);
5955 gimple_set_op (stmt, i, op);
5957 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5958 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5959 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5960 gimple_seq_add_stmt (&stmts, stmt);
5961 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
5962 gimple_seq_add_stmt (&stmts, cvt);
5964 return stmts;
5968 /* The valueization hook we use for the gimple_build API simplification.
5969 This makes us match fold_buildN behavior by only combining with
5970 statements in the sequence(s) we are currently building. */
5972 static tree
5973 gimple_build_valueize (tree op)
5975 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
5976 return op;
5977 return NULL_TREE;
5980 /* Build the expression CODE OP0 of type TYPE with location LOC,
5981 simplifying it first if possible. Returns the built
5982 expression value and appends statements possibly defining it
5983 to SEQ. */
5985 tree
5986 gimple_build (gimple_seq *seq, location_t loc,
5987 enum tree_code code, tree type, tree op0)
5989 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
5990 if (!res)
5992 if (gimple_in_ssa_p (cfun))
5993 res = make_ssa_name (type);
5994 else
5995 res = create_tmp_reg (type);
5996 gimple *stmt;
5997 if (code == REALPART_EXPR
5998 || code == IMAGPART_EXPR
5999 || code == VIEW_CONVERT_EXPR)
6000 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6001 else
6002 stmt = gimple_build_assign (res, code, op0);
6003 gimple_set_location (stmt, loc);
6004 gimple_seq_add_stmt_without_update (seq, stmt);
6006 return res;
6009 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6010 simplifying it first if possible. Returns the built
6011 expression value and appends statements possibly defining it
6012 to SEQ. */
6014 tree
6015 gimple_build (gimple_seq *seq, location_t loc,
6016 enum tree_code code, tree type, tree op0, tree op1)
6018 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6019 if (!res)
6021 if (gimple_in_ssa_p (cfun))
6022 res = make_ssa_name (type);
6023 else
6024 res = create_tmp_reg (type);
6025 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6026 gimple_set_location (stmt, loc);
6027 gimple_seq_add_stmt_without_update (seq, stmt);
6029 return res;
6032 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6033 simplifying it first if possible. Returns the built
6034 expression value and appends statements possibly defining it
6035 to SEQ. */
6037 tree
6038 gimple_build (gimple_seq *seq, location_t loc,
6039 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6041 tree res = gimple_simplify (code, type, op0, op1, op2,
6042 seq, gimple_build_valueize);
6043 if (!res)
6045 if (gimple_in_ssa_p (cfun))
6046 res = make_ssa_name (type);
6047 else
6048 res = create_tmp_reg (type);
6049 gimple *stmt;
6050 if (code == BIT_FIELD_REF)
6051 stmt = gimple_build_assign (res, code,
6052 build3 (code, type, op0, op1, op2));
6053 else
6054 stmt = gimple_build_assign (res, code, op0, op1, op2);
6055 gimple_set_location (stmt, loc);
6056 gimple_seq_add_stmt_without_update (seq, stmt);
6058 return res;
6061 /* Build the call FN (ARG0) with a result of type TYPE
6062 (or no result if TYPE is void) with location LOC,
6063 simplifying it first if possible. Returns the built
6064 expression value (or NULL_TREE if TYPE is void) and appends
6065 statements possibly defining it to SEQ. */
6067 tree
6068 gimple_build (gimple_seq *seq, location_t loc,
6069 enum built_in_function fn, tree type, tree arg0)
6071 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6072 if (!res)
6074 tree decl = builtin_decl_implicit (fn);
6075 gimple *stmt = gimple_build_call (decl, 1, arg0);
6076 if (!VOID_TYPE_P (type))
6078 if (gimple_in_ssa_p (cfun))
6079 res = make_ssa_name (type);
6080 else
6081 res = create_tmp_reg (type);
6082 gimple_call_set_lhs (stmt, res);
6084 gimple_set_location (stmt, loc);
6085 gimple_seq_add_stmt_without_update (seq, stmt);
6087 return res;
6090 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6091 (or no result if TYPE is void) with location LOC,
6092 simplifying it first if possible. Returns the built
6093 expression value (or NULL_TREE if TYPE is void) and appends
6094 statements possibly defining it to SEQ. */
6096 tree
6097 gimple_build (gimple_seq *seq, location_t loc,
6098 enum built_in_function fn, tree type, tree arg0, tree arg1)
6100 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6101 if (!res)
6103 tree decl = builtin_decl_implicit (fn);
6104 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6105 if (!VOID_TYPE_P (type))
6107 if (gimple_in_ssa_p (cfun))
6108 res = make_ssa_name (type);
6109 else
6110 res = create_tmp_reg (type);
6111 gimple_call_set_lhs (stmt, res);
6113 gimple_set_location (stmt, loc);
6114 gimple_seq_add_stmt_without_update (seq, stmt);
6116 return res;
6119 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6120 (or no result if TYPE is void) with location LOC,
6121 simplifying it first if possible. Returns the built
6122 expression value (or NULL_TREE if TYPE is void) and appends
6123 statements possibly defining it to SEQ. */
6125 tree
6126 gimple_build (gimple_seq *seq, location_t loc,
6127 enum built_in_function fn, tree type,
6128 tree arg0, tree arg1, tree arg2)
6130 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6131 seq, gimple_build_valueize);
6132 if (!res)
6134 tree decl = builtin_decl_implicit (fn);
6135 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6136 if (!VOID_TYPE_P (type))
6138 if (gimple_in_ssa_p (cfun))
6139 res = make_ssa_name (type);
6140 else
6141 res = create_tmp_reg (type);
6142 gimple_call_set_lhs (stmt, res);
6144 gimple_set_location (stmt, loc);
6145 gimple_seq_add_stmt_without_update (seq, stmt);
6147 return res;
6150 /* Build the conversion (TYPE) OP with a result of type TYPE
6151 with location LOC if such conversion is neccesary in GIMPLE,
6152 simplifying it first.
6153 Returns the built expression value and appends
6154 statements possibly defining it to SEQ. */
6156 tree
6157 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6159 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6160 return op;
6161 return gimple_build (seq, loc, NOP_EXPR, type, op);
6164 /* Build the conversion (ptrofftype) OP with a result of a type
6165 compatible with ptrofftype with location LOC if such conversion
6166 is neccesary in GIMPLE, simplifying it first.
6167 Returns the built expression value and appends
6168 statements possibly defining it to SEQ. */
6170 tree
6171 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6173 if (ptrofftype_p (TREE_TYPE (op)))
6174 return op;
6175 return gimple_convert (seq, loc, sizetype, op);
6178 /* Return true if the result of assignment STMT is known to be non-negative.
6179 If the return value is based on the assumption that signed overflow is
6180 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6181 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6183 static bool
6184 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6185 int depth)
6187 enum tree_code code = gimple_assign_rhs_code (stmt);
6188 switch (get_gimple_rhs_class (code))
6190 case GIMPLE_UNARY_RHS:
6191 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6192 gimple_expr_type (stmt),
6193 gimple_assign_rhs1 (stmt),
6194 strict_overflow_p, depth);
6195 case GIMPLE_BINARY_RHS:
6196 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6197 gimple_expr_type (stmt),
6198 gimple_assign_rhs1 (stmt),
6199 gimple_assign_rhs2 (stmt),
6200 strict_overflow_p, depth);
6201 case GIMPLE_TERNARY_RHS:
6202 return false;
6203 case GIMPLE_SINGLE_RHS:
6204 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6205 strict_overflow_p, depth);
6206 case GIMPLE_INVALID_RHS:
6207 break;
6209 gcc_unreachable ();
6212 /* Return true if return value of call STMT is known to be non-negative.
6213 If the return value is based on the assumption that signed overflow is
6214 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6215 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6217 static bool
6218 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6219 int depth)
6221 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6222 gimple_call_arg (stmt, 0) : NULL_TREE;
6223 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6224 gimple_call_arg (stmt, 1) : NULL_TREE;
6226 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6227 gimple_call_combined_fn (stmt),
6228 arg0,
6229 arg1,
6230 strict_overflow_p, depth);
6233 /* Return true if return value of call STMT is known to be non-negative.
6234 If the return value is based on the assumption that signed overflow is
6235 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6236 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6238 static bool
6239 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6240 int depth)
6242 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6244 tree arg = gimple_phi_arg_def (stmt, i);
6245 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6246 return false;
6248 return true;
6251 /* Return true if STMT is known to compute a non-negative value.
6252 If the return value is based on the assumption that signed overflow is
6253 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6254 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6256 bool
6257 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6258 int depth)
6260 switch (gimple_code (stmt))
6262 case GIMPLE_ASSIGN:
6263 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6264 depth);
6265 case GIMPLE_CALL:
6266 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6267 depth);
6268 case GIMPLE_PHI:
6269 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6270 depth);
6271 default:
6272 return false;
6276 /* Return true if the floating-point value computed by assignment STMT
6277 is known to have an integer value. We also allow +Inf, -Inf and NaN
6278 to be considered integer values. Return false for signaling NaN.
6280 DEPTH is the current nesting depth of the query. */
6282 static bool
6283 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6285 enum tree_code code = gimple_assign_rhs_code (stmt);
6286 switch (get_gimple_rhs_class (code))
6288 case GIMPLE_UNARY_RHS:
6289 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6290 gimple_assign_rhs1 (stmt), depth);
6291 case GIMPLE_BINARY_RHS:
6292 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6293 gimple_assign_rhs1 (stmt),
6294 gimple_assign_rhs2 (stmt), depth);
6295 case GIMPLE_TERNARY_RHS:
6296 return false;
6297 case GIMPLE_SINGLE_RHS:
6298 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6299 case GIMPLE_INVALID_RHS:
6300 break;
6302 gcc_unreachable ();
6305 /* Return true if the floating-point value computed by call STMT is known
6306 to have an integer value. We also allow +Inf, -Inf and NaN to be
6307 considered integer values. Return false for signaling NaN.
6309 DEPTH is the current nesting depth of the query. */
6311 static bool
6312 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6314 tree arg0 = (gimple_call_num_args (stmt) > 0
6315 ? gimple_call_arg (stmt, 0)
6316 : NULL_TREE);
6317 tree arg1 = (gimple_call_num_args (stmt) > 1
6318 ? gimple_call_arg (stmt, 1)
6319 : NULL_TREE);
6320 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
6321 arg0, arg1, depth);
6324 /* Return true if the floating-point result of phi STMT is known to have
6325 an integer value. We also allow +Inf, -Inf and NaN to be considered
6326 integer values. Return false for signaling NaN.
6328 DEPTH is the current nesting depth of the query. */
6330 static bool
6331 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6333 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6335 tree arg = gimple_phi_arg_def (stmt, i);
6336 if (!integer_valued_real_single_p (arg, depth + 1))
6337 return false;
6339 return true;
6342 /* Return true if the floating-point value computed by STMT is known
6343 to have an integer value. We also allow +Inf, -Inf and NaN to be
6344 considered integer values. Return false for signaling NaN.
6346 DEPTH is the current nesting depth of the query. */
6348 bool
6349 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6351 switch (gimple_code (stmt))
6353 case GIMPLE_ASSIGN:
6354 return gimple_assign_integer_valued_real_p (stmt, depth);
6355 case GIMPLE_CALL:
6356 return gimple_call_integer_valued_real_p (stmt, depth);
6357 case GIMPLE_PHI:
6358 return gimple_phi_integer_valued_real_p (stmt, depth);
6359 default:
6360 return false;