2016-01-26 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / gimple-fold.c
blobeb130d048469f0b8196e565fed9a40de74b098bd
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2016 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stmt.h"
35 #include "expr.h"
36 #include "stor-layout.h"
37 #include "dumpfile.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "dbgcnt.h"
50 #include "builtins.h"
51 #include "tree-eh.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
55 #include "omp-low.h"
56 #include "ipa-chkp.h"
59 /* Return true when DECL can be referenced from current unit.
60 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
61 We can get declarations that are not possible to reference for various
62 reasons:
64 1) When analyzing C++ virtual tables.
65 C++ virtual tables do have known constructors even
66 when they are keyed to other compilation unit.
67 Those tables can contain pointers to methods and vars
68 in other units. Those methods have both STATIC and EXTERNAL
69 set.
70 2) In WHOPR mode devirtualization might lead to reference
71 to method that was partitioned elsehwere.
72 In this case we have static VAR_DECL or FUNCTION_DECL
73 that has no corresponding callgraph/varpool node
74 declaring the body.
75 3) COMDAT functions referred by external vtables that
76 we devirtualize only during final compilation stage.
77 At this time we already decided that we will not output
78 the function body and thus we can't reference the symbol
79 directly. */
81 static bool
82 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
84 varpool_node *vnode;
85 struct cgraph_node *node;
86 symtab_node *snode;
88 if (DECL_ABSTRACT_P (decl))
89 return false;
91 /* We are concerned only about static/external vars and functions. */
92 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
93 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
94 return true;
96 /* Static objects can be referred only if they was not optimized out yet. */
97 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
99 /* Before we start optimizing unreachable code we can be sure all
100 static objects are defined. */
101 if (symtab->function_flags_ready)
102 return true;
103 snode = symtab_node::get (decl);
104 if (!snode || !snode->definition)
105 return false;
106 node = dyn_cast <cgraph_node *> (snode);
107 return !node || !node->global.inlined_to;
110 /* We will later output the initializer, so we can refer to it.
111 So we are concerned only when DECL comes from initializer of
112 external var or var that has been optimized out. */
113 if (!from_decl
114 || TREE_CODE (from_decl) != VAR_DECL
115 || (!DECL_EXTERNAL (from_decl)
116 && (vnode = varpool_node::get (from_decl)) != NULL
117 && vnode->definition)
118 || (flag_ltrans
119 && (vnode = varpool_node::get (from_decl)) != NULL
120 && vnode->in_other_partition))
121 return true;
122 /* We are folding reference from external vtable. The vtable may reffer
123 to a symbol keyed to other compilation unit. The other compilation
124 unit may be in separate DSO and the symbol may be hidden. */
125 if (DECL_VISIBILITY_SPECIFIED (decl)
126 && DECL_EXTERNAL (decl)
127 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
128 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
129 return false;
130 /* When function is public, we always can introduce new reference.
131 Exception are the COMDAT functions where introducing a direct
132 reference imply need to include function body in the curren tunit. */
133 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
134 return true;
135 /* We have COMDAT. We are going to check if we still have definition
136 or if the definition is going to be output in other partition.
137 Bypass this when gimplifying; all needed functions will be produced.
139 As observed in PR20991 for already optimized out comdat virtual functions
140 it may be tempting to not necessarily give up because the copy will be
141 output elsewhere when corresponding vtable is output.
142 This is however not possible - ABI specify that COMDATs are output in
143 units where they are used and when the other unit was compiled with LTO
144 it is possible that vtable was kept public while the function itself
145 was privatized. */
146 if (!symtab->function_flags_ready)
147 return true;
149 snode = symtab_node::get (decl);
150 if (!snode
151 || ((!snode->definition || DECL_EXTERNAL (decl))
152 && (!snode->in_other_partition
153 || (!snode->forced_by_abi && !snode->force_output))))
154 return false;
155 node = dyn_cast <cgraph_node *> (snode);
156 return !node || !node->global.inlined_to;
159 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
160 acceptable form for is_gimple_min_invariant.
161 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
163 tree
164 canonicalize_constructor_val (tree cval, tree from_decl)
166 tree orig_cval = cval;
167 STRIP_NOPS (cval);
168 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
169 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
171 tree ptr = TREE_OPERAND (cval, 0);
172 if (is_gimple_min_invariant (ptr))
173 cval = build1_loc (EXPR_LOCATION (cval),
174 ADDR_EXPR, TREE_TYPE (ptr),
175 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
176 ptr,
177 fold_convert (ptr_type_node,
178 TREE_OPERAND (cval, 1))));
180 if (TREE_CODE (cval) == ADDR_EXPR)
182 tree base = NULL_TREE;
183 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
185 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
186 if (base)
187 TREE_OPERAND (cval, 0) = base;
189 else
190 base = get_base_address (TREE_OPERAND (cval, 0));
191 if (!base)
192 return NULL_TREE;
194 if ((TREE_CODE (base) == VAR_DECL
195 || TREE_CODE (base) == FUNCTION_DECL)
196 && !can_refer_decl_in_current_unit_p (base, from_decl))
197 return NULL_TREE;
198 if (TREE_TYPE (base) == error_mark_node)
199 return NULL_TREE;
200 if (TREE_CODE (base) == VAR_DECL)
201 TREE_ADDRESSABLE (base) = 1;
202 else if (TREE_CODE (base) == FUNCTION_DECL)
204 /* Make sure we create a cgraph node for functions we'll reference.
205 They can be non-existent if the reference comes from an entry
206 of an external vtable for example. */
207 cgraph_node::get_create (base);
209 /* Fixup types in global initializers. */
210 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
211 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
213 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
214 cval = fold_convert (TREE_TYPE (orig_cval), cval);
215 return cval;
217 if (TREE_OVERFLOW_P (cval))
218 return drop_tree_overflow (cval);
219 return orig_cval;
222 /* If SYM is a constant variable with known value, return the value.
223 NULL_TREE is returned otherwise. */
225 tree
226 get_symbol_constant_value (tree sym)
228 tree val = ctor_for_folding (sym);
229 if (val != error_mark_node)
231 if (val)
233 val = canonicalize_constructor_val (unshare_expr (val), sym);
234 if (val && is_gimple_min_invariant (val))
235 return val;
236 else
237 return NULL_TREE;
239 /* Variables declared 'const' without an initializer
240 have zero as the initializer if they may not be
241 overridden at link or run time. */
242 if (!val
243 && is_gimple_reg_type (TREE_TYPE (sym)))
244 return build_zero_cst (TREE_TYPE (sym));
247 return NULL_TREE;
252 /* Subroutine of fold_stmt. We perform several simplifications of the
253 memory reference tree EXPR and make sure to re-gimplify them properly
254 after propagation of constant addresses. IS_LHS is true if the
255 reference is supposed to be an lvalue. */
257 static tree
258 maybe_fold_reference (tree expr, bool is_lhs)
260 tree result;
262 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
263 || TREE_CODE (expr) == REALPART_EXPR
264 || TREE_CODE (expr) == IMAGPART_EXPR)
265 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
266 return fold_unary_loc (EXPR_LOCATION (expr),
267 TREE_CODE (expr),
268 TREE_TYPE (expr),
269 TREE_OPERAND (expr, 0));
270 else if (TREE_CODE (expr) == BIT_FIELD_REF
271 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
272 return fold_ternary_loc (EXPR_LOCATION (expr),
273 TREE_CODE (expr),
274 TREE_TYPE (expr),
275 TREE_OPERAND (expr, 0),
276 TREE_OPERAND (expr, 1),
277 TREE_OPERAND (expr, 2));
279 if (!is_lhs
280 && (result = fold_const_aggregate_ref (expr))
281 && is_gimple_min_invariant (result))
282 return result;
284 return NULL_TREE;
288 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
289 replacement rhs for the statement or NULL_TREE if no simplification
290 could be made. It is assumed that the operands have been previously
291 folded. */
293 static tree
294 fold_gimple_assign (gimple_stmt_iterator *si)
296 gimple *stmt = gsi_stmt (*si);
297 enum tree_code subcode = gimple_assign_rhs_code (stmt);
298 location_t loc = gimple_location (stmt);
300 tree result = NULL_TREE;
302 switch (get_gimple_rhs_class (subcode))
304 case GIMPLE_SINGLE_RHS:
306 tree rhs = gimple_assign_rhs1 (stmt);
308 if (TREE_CLOBBER_P (rhs))
309 return NULL_TREE;
311 if (REFERENCE_CLASS_P (rhs))
312 return maybe_fold_reference (rhs, false);
314 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
316 tree val = OBJ_TYPE_REF_EXPR (rhs);
317 if (is_gimple_min_invariant (val))
318 return val;
319 else if (flag_devirtualize && virtual_method_call_p (rhs))
321 bool final;
322 vec <cgraph_node *>targets
323 = possible_polymorphic_call_targets (rhs, stmt, &final);
324 if (final && targets.length () <= 1 && dbg_cnt (devirt))
326 if (dump_enabled_p ())
328 location_t loc = gimple_location_safe (stmt);
329 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
330 "resolving virtual function address "
331 "reference to function %s\n",
332 targets.length () == 1
333 ? targets[0]->name ()
334 : "NULL");
336 if (targets.length () == 1)
338 val = fold_convert (TREE_TYPE (val),
339 build_fold_addr_expr_loc
340 (loc, targets[0]->decl));
341 STRIP_USELESS_TYPE_CONVERSION (val);
343 else
344 /* We can not use __builtin_unreachable here because it
345 can not have address taken. */
346 val = build_int_cst (TREE_TYPE (val), 0);
347 return val;
352 else if (TREE_CODE (rhs) == ADDR_EXPR)
354 tree ref = TREE_OPERAND (rhs, 0);
355 tree tem = maybe_fold_reference (ref, true);
356 if (tem
357 && TREE_CODE (tem) == MEM_REF
358 && integer_zerop (TREE_OPERAND (tem, 1)))
359 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
360 else if (tem)
361 result = fold_convert (TREE_TYPE (rhs),
362 build_fold_addr_expr_loc (loc, tem));
363 else if (TREE_CODE (ref) == MEM_REF
364 && integer_zerop (TREE_OPERAND (ref, 1)))
365 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
367 if (result)
369 /* Strip away useless type conversions. Both the
370 NON_LVALUE_EXPR that may have been added by fold, and
371 "useless" type conversions that might now be apparent
372 due to propagation. */
373 STRIP_USELESS_TYPE_CONVERSION (result);
375 if (result != rhs && valid_gimple_rhs_p (result))
376 return result;
380 else if (TREE_CODE (rhs) == CONSTRUCTOR
381 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
383 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
384 unsigned i;
385 tree val;
387 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
388 if (! CONSTANT_CLASS_P (val))
389 return NULL_TREE;
391 return build_vector_from_ctor (TREE_TYPE (rhs),
392 CONSTRUCTOR_ELTS (rhs));
395 else if (DECL_P (rhs))
396 return get_symbol_constant_value (rhs);
398 break;
400 case GIMPLE_UNARY_RHS:
401 break;
403 case GIMPLE_BINARY_RHS:
404 break;
406 case GIMPLE_TERNARY_RHS:
407 result = fold_ternary_loc (loc, subcode,
408 TREE_TYPE (gimple_assign_lhs (stmt)),
409 gimple_assign_rhs1 (stmt),
410 gimple_assign_rhs2 (stmt),
411 gimple_assign_rhs3 (stmt));
413 if (result)
415 STRIP_USELESS_TYPE_CONVERSION (result);
416 if (valid_gimple_rhs_p (result))
417 return result;
419 break;
421 case GIMPLE_INVALID_RHS:
422 gcc_unreachable ();
425 return NULL_TREE;
429 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
430 adjusting the replacement stmts location and virtual operands.
431 If the statement has a lhs the last stmt in the sequence is expected
432 to assign to that lhs. */
434 static void
435 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
437 gimple *stmt = gsi_stmt (*si_p);
439 if (gimple_has_location (stmt))
440 annotate_all_with_location (stmts, gimple_location (stmt));
442 /* First iterate over the replacement statements backward, assigning
443 virtual operands to their defining statements. */
444 gimple *laststore = NULL;
445 for (gimple_stmt_iterator i = gsi_last (stmts);
446 !gsi_end_p (i); gsi_prev (&i))
448 gimple *new_stmt = gsi_stmt (i);
449 if ((gimple_assign_single_p (new_stmt)
450 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
451 || (is_gimple_call (new_stmt)
452 && (gimple_call_flags (new_stmt)
453 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
455 tree vdef;
456 if (!laststore)
457 vdef = gimple_vdef (stmt);
458 else
459 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
460 gimple_set_vdef (new_stmt, vdef);
461 if (vdef && TREE_CODE (vdef) == SSA_NAME)
462 SSA_NAME_DEF_STMT (vdef) = new_stmt;
463 laststore = new_stmt;
467 /* Second iterate over the statements forward, assigning virtual
468 operands to their uses. */
469 tree reaching_vuse = gimple_vuse (stmt);
470 for (gimple_stmt_iterator i = gsi_start (stmts);
471 !gsi_end_p (i); gsi_next (&i))
473 gimple *new_stmt = gsi_stmt (i);
474 /* If the new statement possibly has a VUSE, update it with exact SSA
475 name we know will reach this one. */
476 if (gimple_has_mem_ops (new_stmt))
477 gimple_set_vuse (new_stmt, reaching_vuse);
478 gimple_set_modified (new_stmt, true);
479 if (gimple_vdef (new_stmt))
480 reaching_vuse = gimple_vdef (new_stmt);
483 /* If the new sequence does not do a store release the virtual
484 definition of the original statement. */
485 if (reaching_vuse
486 && reaching_vuse == gimple_vuse (stmt))
488 tree vdef = gimple_vdef (stmt);
489 if (vdef
490 && TREE_CODE (vdef) == SSA_NAME)
492 unlink_stmt_vdef (stmt);
493 release_ssa_name (vdef);
497 /* Finally replace the original statement with the sequence. */
498 gsi_replace_with_seq (si_p, stmts, false);
501 /* Convert EXPR into a GIMPLE value suitable for substitution on the
502 RHS of an assignment. Insert the necessary statements before
503 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
504 is replaced. If the call is expected to produces a result, then it
505 is replaced by an assignment of the new RHS to the result variable.
506 If the result is to be ignored, then the call is replaced by a
507 GIMPLE_NOP. A proper VDEF chain is retained by making the first
508 VUSE and the last VDEF of the whole sequence be the same as the replaced
509 statement and using new SSA names for stores in between. */
511 void
512 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
514 tree lhs;
515 gimple *stmt, *new_stmt;
516 gimple_stmt_iterator i;
517 gimple_seq stmts = NULL;
519 stmt = gsi_stmt (*si_p);
521 gcc_assert (is_gimple_call (stmt));
523 push_gimplify_context (gimple_in_ssa_p (cfun));
525 lhs = gimple_call_lhs (stmt);
526 if (lhs == NULL_TREE)
528 gimplify_and_add (expr, &stmts);
529 /* We can end up with folding a memcpy of an empty class assignment
530 which gets optimized away by C++ gimplification. */
531 if (gimple_seq_empty_p (stmts))
533 pop_gimplify_context (NULL);
534 if (gimple_in_ssa_p (cfun))
536 unlink_stmt_vdef (stmt);
537 release_defs (stmt);
539 gsi_replace (si_p, gimple_build_nop (), false);
540 return;
543 else
545 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
546 new_stmt = gimple_build_assign (lhs, tmp);
547 i = gsi_last (stmts);
548 gsi_insert_after_without_update (&i, new_stmt,
549 GSI_CONTINUE_LINKING);
552 pop_gimplify_context (NULL);
554 gsi_replace_with_seq_vops (si_p, stmts);
558 /* Replace the call at *GSI with the gimple value VAL. */
560 static void
561 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
563 gimple *stmt = gsi_stmt (*gsi);
564 tree lhs = gimple_call_lhs (stmt);
565 gimple *repl;
566 if (lhs)
568 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
569 val = fold_convert (TREE_TYPE (lhs), val);
570 repl = gimple_build_assign (lhs, val);
572 else
573 repl = gimple_build_nop ();
574 tree vdef = gimple_vdef (stmt);
575 if (vdef && TREE_CODE (vdef) == SSA_NAME)
577 unlink_stmt_vdef (stmt);
578 release_ssa_name (vdef);
580 gsi_replace (gsi, repl, false);
583 /* Replace the call at *GSI with the new call REPL and fold that
584 again. */
586 static void
587 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
589 gimple *stmt = gsi_stmt (*gsi);
590 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
591 gimple_set_location (repl, gimple_location (stmt));
592 if (gimple_vdef (stmt)
593 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
595 gimple_set_vdef (repl, gimple_vdef (stmt));
596 gimple_set_vuse (repl, gimple_vuse (stmt));
597 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
599 gsi_replace (gsi, repl, false);
600 fold_stmt (gsi);
603 /* Return true if VAR is a VAR_DECL or a component thereof. */
605 static bool
606 var_decl_component_p (tree var)
608 tree inner = var;
609 while (handled_component_p (inner))
610 inner = TREE_OPERAND (inner, 0);
611 return SSA_VAR_P (inner);
614 /* Fold function call to builtin mem{{,p}cpy,move}. Return
615 false if no simplification can be made.
616 If ENDP is 0, return DEST (like memcpy).
617 If ENDP is 1, return DEST+LEN (like mempcpy).
618 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
619 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
620 (memmove). */
622 static bool
623 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
624 tree dest, tree src, int endp)
626 gimple *stmt = gsi_stmt (*gsi);
627 tree lhs = gimple_call_lhs (stmt);
628 tree len = gimple_call_arg (stmt, 2);
629 tree destvar, srcvar;
630 location_t loc = gimple_location (stmt);
632 /* If the LEN parameter is zero, return DEST. */
633 if (integer_zerop (len))
635 gimple *repl;
636 if (gimple_call_lhs (stmt))
637 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
638 else
639 repl = gimple_build_nop ();
640 tree vdef = gimple_vdef (stmt);
641 if (vdef && TREE_CODE (vdef) == SSA_NAME)
643 unlink_stmt_vdef (stmt);
644 release_ssa_name (vdef);
646 gsi_replace (gsi, repl, false);
647 return true;
650 /* If SRC and DEST are the same (and not volatile), return
651 DEST{,+LEN,+LEN-1}. */
652 if (operand_equal_p (src, dest, 0))
654 unlink_stmt_vdef (stmt);
655 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
656 release_ssa_name (gimple_vdef (stmt));
657 if (!lhs)
659 gsi_replace (gsi, gimple_build_nop (), false);
660 return true;
662 goto done;
664 else
666 tree srctype, desttype;
667 unsigned int src_align, dest_align;
668 tree off0;
670 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
671 pointers as wide integer) and also may result in huge function
672 size because of inlined bounds copy. Thus don't inline for
673 functions we want to instrument. */
674 if (flag_check_pointer_bounds
675 && chkp_instrumentable_p (cfun->decl)
676 /* Even if data may contain pointers we can inline if copy
677 less than a pointer size. */
678 && (!tree_fits_uhwi_p (len)
679 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
680 return false;
682 /* Build accesses at offset zero with a ref-all character type. */
683 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
684 ptr_mode, true), 0);
686 /* If we can perform the copy efficiently with first doing all loads
687 and then all stores inline it that way. Currently efficiently
688 means that we can load all the memory into a single integer
689 register which is what MOVE_MAX gives us. */
690 src_align = get_pointer_alignment (src);
691 dest_align = get_pointer_alignment (dest);
692 if (tree_fits_uhwi_p (len)
693 && compare_tree_int (len, MOVE_MAX) <= 0
694 /* ??? Don't transform copies from strings with known length this
695 confuses the tree-ssa-strlen.c. This doesn't handle
696 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
697 reason. */
698 && !c_strlen (src, 2))
700 unsigned ilen = tree_to_uhwi (len);
701 if (exact_log2 (ilen) != -1)
703 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
704 if (type
705 && TYPE_MODE (type) != BLKmode
706 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
707 == ilen * 8)
708 /* If the destination pointer is not aligned we must be able
709 to emit an unaligned store. */
710 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
711 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
712 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
713 != CODE_FOR_nothing)))
715 tree srctype = type;
716 tree desttype = type;
717 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
718 srctype = build_aligned_type (type, src_align);
719 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
720 tree tem = fold_const_aggregate_ref (srcmem);
721 if (tem)
722 srcmem = tem;
723 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
724 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
725 src_align)
726 && (optab_handler (movmisalign_optab,
727 TYPE_MODE (type))
728 == CODE_FOR_nothing))
729 srcmem = NULL_TREE;
730 if (srcmem)
732 gimple *new_stmt;
733 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
735 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
736 if (gimple_in_ssa_p (cfun))
737 srcmem = make_ssa_name (TREE_TYPE (srcmem),
738 new_stmt);
739 else
740 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
741 gimple_assign_set_lhs (new_stmt, srcmem);
742 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
743 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
745 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
746 desttype = build_aligned_type (type, dest_align);
747 new_stmt
748 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
749 dest, off0),
750 srcmem);
751 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
752 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
753 if (gimple_vdef (new_stmt)
754 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
755 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
756 if (!lhs)
758 gsi_replace (gsi, new_stmt, false);
759 return true;
761 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
762 goto done;
768 if (endp == 3)
770 /* Both DEST and SRC must be pointer types.
771 ??? This is what old code did. Is the testing for pointer types
772 really mandatory?
774 If either SRC is readonly or length is 1, we can use memcpy. */
775 if (!dest_align || !src_align)
776 return false;
777 if (readonly_data_expr (src)
778 || (tree_fits_uhwi_p (len)
779 && (MIN (src_align, dest_align) / BITS_PER_UNIT
780 >= tree_to_uhwi (len))))
782 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
783 if (!fn)
784 return false;
785 gimple_call_set_fndecl (stmt, fn);
786 gimple_call_set_arg (stmt, 0, dest);
787 gimple_call_set_arg (stmt, 1, src);
788 fold_stmt (gsi);
789 return true;
792 /* If *src and *dest can't overlap, optimize into memcpy as well. */
793 if (TREE_CODE (src) == ADDR_EXPR
794 && TREE_CODE (dest) == ADDR_EXPR)
796 tree src_base, dest_base, fn;
797 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
798 HOST_WIDE_INT size = -1;
799 HOST_WIDE_INT maxsize = -1;
800 bool reverse;
802 srcvar = TREE_OPERAND (src, 0);
803 src_base = get_ref_base_and_extent (srcvar, &src_offset,
804 &size, &maxsize, &reverse);
805 destvar = TREE_OPERAND (dest, 0);
806 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
807 &size, &maxsize, &reverse);
808 if (tree_fits_uhwi_p (len))
809 maxsize = tree_to_uhwi (len);
810 else
811 maxsize = -1;
812 src_offset /= BITS_PER_UNIT;
813 dest_offset /= BITS_PER_UNIT;
814 if (SSA_VAR_P (src_base)
815 && SSA_VAR_P (dest_base))
817 if (operand_equal_p (src_base, dest_base, 0)
818 && ranges_overlap_p (src_offset, maxsize,
819 dest_offset, maxsize))
820 return false;
822 else if (TREE_CODE (src_base) == MEM_REF
823 && TREE_CODE (dest_base) == MEM_REF)
825 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
826 TREE_OPERAND (dest_base, 0), 0))
827 return false;
828 offset_int off = mem_ref_offset (src_base) + src_offset;
829 if (!wi::fits_shwi_p (off))
830 return false;
831 src_offset = off.to_shwi ();
833 off = mem_ref_offset (dest_base) + dest_offset;
834 if (!wi::fits_shwi_p (off))
835 return false;
836 dest_offset = off.to_shwi ();
837 if (ranges_overlap_p (src_offset, maxsize,
838 dest_offset, maxsize))
839 return false;
841 else
842 return false;
844 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
845 if (!fn)
846 return false;
847 gimple_call_set_fndecl (stmt, fn);
848 gimple_call_set_arg (stmt, 0, dest);
849 gimple_call_set_arg (stmt, 1, src);
850 fold_stmt (gsi);
851 return true;
854 /* If the destination and source do not alias optimize into
855 memcpy as well. */
856 if ((is_gimple_min_invariant (dest)
857 || TREE_CODE (dest) == SSA_NAME)
858 && (is_gimple_min_invariant (src)
859 || TREE_CODE (src) == SSA_NAME))
861 ao_ref destr, srcr;
862 ao_ref_init_from_ptr_and_size (&destr, dest, len);
863 ao_ref_init_from_ptr_and_size (&srcr, src, len);
864 if (!refs_may_alias_p_1 (&destr, &srcr, false))
866 tree fn;
867 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
868 if (!fn)
869 return false;
870 gimple_call_set_fndecl (stmt, fn);
871 gimple_call_set_arg (stmt, 0, dest);
872 gimple_call_set_arg (stmt, 1, src);
873 fold_stmt (gsi);
874 return true;
878 return false;
881 if (!tree_fits_shwi_p (len))
882 return false;
883 /* FIXME:
884 This logic lose for arguments like (type *)malloc (sizeof (type)),
885 since we strip the casts of up to VOID return value from malloc.
886 Perhaps we ought to inherit type from non-VOID argument here? */
887 STRIP_NOPS (src);
888 STRIP_NOPS (dest);
889 if (!POINTER_TYPE_P (TREE_TYPE (src))
890 || !POINTER_TYPE_P (TREE_TYPE (dest)))
891 return false;
892 /* In the following try to find a type that is most natural to be
893 used for the memcpy source and destination and that allows
894 the most optimization when memcpy is turned into a plain assignment
895 using that type. In theory we could always use a char[len] type
896 but that only gains us that the destination and source possibly
897 no longer will have their address taken. */
898 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
899 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
901 tree tem = TREE_OPERAND (src, 0);
902 STRIP_NOPS (tem);
903 if (tem != TREE_OPERAND (src, 0))
904 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
906 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
908 tree tem = TREE_OPERAND (dest, 0);
909 STRIP_NOPS (tem);
910 if (tem != TREE_OPERAND (dest, 0))
911 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
913 srctype = TREE_TYPE (TREE_TYPE (src));
914 if (TREE_CODE (srctype) == ARRAY_TYPE
915 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
917 srctype = TREE_TYPE (srctype);
918 STRIP_NOPS (src);
919 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
921 desttype = TREE_TYPE (TREE_TYPE (dest));
922 if (TREE_CODE (desttype) == ARRAY_TYPE
923 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
925 desttype = TREE_TYPE (desttype);
926 STRIP_NOPS (dest);
927 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
929 if (TREE_ADDRESSABLE (srctype)
930 || TREE_ADDRESSABLE (desttype))
931 return false;
933 /* Make sure we are not copying using a floating-point mode or
934 a type whose size possibly does not match its precision. */
935 if (FLOAT_MODE_P (TYPE_MODE (desttype))
936 || TREE_CODE (desttype) == BOOLEAN_TYPE
937 || TREE_CODE (desttype) == ENUMERAL_TYPE)
938 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
939 if (FLOAT_MODE_P (TYPE_MODE (srctype))
940 || TREE_CODE (srctype) == BOOLEAN_TYPE
941 || TREE_CODE (srctype) == ENUMERAL_TYPE)
942 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
943 if (!srctype)
944 srctype = desttype;
945 if (!desttype)
946 desttype = srctype;
947 if (!srctype)
948 return false;
950 src_align = get_pointer_alignment (src);
951 dest_align = get_pointer_alignment (dest);
952 if (dest_align < TYPE_ALIGN (desttype)
953 || src_align < TYPE_ALIGN (srctype))
954 return false;
956 destvar = dest;
957 STRIP_NOPS (destvar);
958 if (TREE_CODE (destvar) == ADDR_EXPR
959 && var_decl_component_p (TREE_OPERAND (destvar, 0))
960 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
961 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
962 else
963 destvar = NULL_TREE;
965 srcvar = src;
966 STRIP_NOPS (srcvar);
967 if (TREE_CODE (srcvar) == ADDR_EXPR
968 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
969 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
971 if (!destvar
972 || src_align >= TYPE_ALIGN (desttype))
973 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
974 srcvar, off0);
975 else if (!STRICT_ALIGNMENT)
977 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
978 src_align);
979 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
981 else
982 srcvar = NULL_TREE;
984 else
985 srcvar = NULL_TREE;
987 if (srcvar == NULL_TREE && destvar == NULL_TREE)
988 return false;
990 if (srcvar == NULL_TREE)
992 STRIP_NOPS (src);
993 if (src_align >= TYPE_ALIGN (desttype))
994 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
995 else
997 if (STRICT_ALIGNMENT)
998 return false;
999 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1000 src_align);
1001 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1004 else if (destvar == NULL_TREE)
1006 STRIP_NOPS (dest);
1007 if (dest_align >= TYPE_ALIGN (srctype))
1008 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1009 else
1011 if (STRICT_ALIGNMENT)
1012 return false;
1013 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1014 dest_align);
1015 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1019 gimple *new_stmt;
1020 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1022 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1023 if (gimple_in_ssa_p (cfun))
1024 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1025 else
1026 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1027 gimple_assign_set_lhs (new_stmt, srcvar);
1028 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1029 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1031 new_stmt = gimple_build_assign (destvar, srcvar);
1032 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1033 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1034 if (gimple_vdef (new_stmt)
1035 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1036 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1037 if (!lhs)
1039 gsi_replace (gsi, new_stmt, false);
1040 return true;
1042 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1045 done:
1046 gimple_seq stmts = NULL;
1047 if (endp == 0 || endp == 3)
1048 len = NULL_TREE;
1049 else if (endp == 2)
1050 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1051 ssize_int (1));
1052 if (endp == 2 || endp == 1)
1054 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1055 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1056 TREE_TYPE (dest), dest, len);
1059 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1060 gimple *repl = gimple_build_assign (lhs, dest);
1061 gsi_replace (gsi, repl, false);
1062 return true;
1065 /* Fold function call to builtin memset or bzero at *GSI setting the
1066 memory of size LEN to VAL. Return whether a simplification was made. */
1068 static bool
1069 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1071 gimple *stmt = gsi_stmt (*gsi);
1072 tree etype;
1073 unsigned HOST_WIDE_INT length, cval;
1075 /* If the LEN parameter is zero, return DEST. */
1076 if (integer_zerop (len))
1078 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1079 return true;
1082 if (! tree_fits_uhwi_p (len))
1083 return false;
1085 if (TREE_CODE (c) != INTEGER_CST)
1086 return false;
1088 tree dest = gimple_call_arg (stmt, 0);
1089 tree var = dest;
1090 if (TREE_CODE (var) != ADDR_EXPR)
1091 return false;
1093 var = TREE_OPERAND (var, 0);
1094 if (TREE_THIS_VOLATILE (var))
1095 return false;
1097 etype = TREE_TYPE (var);
1098 if (TREE_CODE (etype) == ARRAY_TYPE)
1099 etype = TREE_TYPE (etype);
1101 if (!INTEGRAL_TYPE_P (etype)
1102 && !POINTER_TYPE_P (etype))
1103 return NULL_TREE;
1105 if (! var_decl_component_p (var))
1106 return NULL_TREE;
1108 length = tree_to_uhwi (len);
1109 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1110 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1111 return NULL_TREE;
1113 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1114 return NULL_TREE;
1116 if (integer_zerop (c))
1117 cval = 0;
1118 else
1120 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1121 return NULL_TREE;
1123 cval = TREE_INT_CST_LOW (c);
1124 cval &= 0xff;
1125 cval |= cval << 8;
1126 cval |= cval << 16;
1127 cval |= (cval << 31) << 1;
1130 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1131 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1132 gimple_set_vuse (store, gimple_vuse (stmt));
1133 tree vdef = gimple_vdef (stmt);
1134 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1136 gimple_set_vdef (store, gimple_vdef (stmt));
1137 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1139 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1140 if (gimple_call_lhs (stmt))
1142 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1143 gsi_replace (gsi, asgn, false);
1145 else
1147 gimple_stmt_iterator gsi2 = *gsi;
1148 gsi_prev (gsi);
1149 gsi_remove (&gsi2, true);
1152 return true;
1156 /* Return the string length, maximum string length or maximum value of
1157 ARG in LENGTH.
1158 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1159 is not NULL and, for TYPE == 0, its value is not equal to the length
1160 we determine or if we are unable to determine the length or value,
1161 return false. VISITED is a bitmap of visited variables.
1162 TYPE is 0 if string length should be returned, 1 for maximum string
1163 length and 2 for maximum value ARG can have. */
1165 static bool
1166 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1168 tree var, val;
1169 gimple *def_stmt;
1171 if (TREE_CODE (arg) != SSA_NAME)
1173 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1174 if (TREE_CODE (arg) == ADDR_EXPR
1175 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1176 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1178 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1179 if (TREE_CODE (aop0) == INDIRECT_REF
1180 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1181 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1182 length, visited, type);
1185 if (type == 2)
1187 val = arg;
1188 if (TREE_CODE (val) != INTEGER_CST
1189 || tree_int_cst_sgn (val) < 0)
1190 return false;
1192 else
1193 val = c_strlen (arg, 1);
1194 if (!val)
1195 return false;
1197 if (*length)
1199 if (type > 0)
1201 if (TREE_CODE (*length) != INTEGER_CST
1202 || TREE_CODE (val) != INTEGER_CST)
1203 return false;
1205 if (tree_int_cst_lt (*length, val))
1206 *length = val;
1207 return true;
1209 else if (simple_cst_equal (val, *length) != 1)
1210 return false;
1213 *length = val;
1214 return true;
1217 /* If ARG is registered for SSA update we cannot look at its defining
1218 statement. */
1219 if (name_registered_for_update_p (arg))
1220 return false;
1222 /* If we were already here, break the infinite cycle. */
1223 if (!*visited)
1224 *visited = BITMAP_ALLOC (NULL);
1225 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1226 return true;
1228 var = arg;
1229 def_stmt = SSA_NAME_DEF_STMT (var);
1231 switch (gimple_code (def_stmt))
1233 case GIMPLE_ASSIGN:
1234 /* The RHS of the statement defining VAR must either have a
1235 constant length or come from another SSA_NAME with a constant
1236 length. */
1237 if (gimple_assign_single_p (def_stmt)
1238 || gimple_assign_unary_nop_p (def_stmt))
1240 tree rhs = gimple_assign_rhs1 (def_stmt);
1241 return get_maxval_strlen (rhs, length, visited, type);
1243 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1245 tree op2 = gimple_assign_rhs2 (def_stmt);
1246 tree op3 = gimple_assign_rhs3 (def_stmt);
1247 return get_maxval_strlen (op2, length, visited, type)
1248 && get_maxval_strlen (op3, length, visited, type);
1250 return false;
1252 case GIMPLE_PHI:
1254 /* All the arguments of the PHI node must have the same constant
1255 length. */
1256 unsigned i;
1258 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1260 tree arg = gimple_phi_arg (def_stmt, i)->def;
1262 /* If this PHI has itself as an argument, we cannot
1263 determine the string length of this argument. However,
1264 if we can find a constant string length for the other
1265 PHI args then we can still be sure that this is a
1266 constant string length. So be optimistic and just
1267 continue with the next argument. */
1268 if (arg == gimple_phi_result (def_stmt))
1269 continue;
1271 if (!get_maxval_strlen (arg, length, visited, type))
1272 return false;
1275 return true;
1277 default:
1278 return false;
1282 tree
1283 get_maxval_strlen (tree arg, int type)
1285 bitmap visited = NULL;
1286 tree len = NULL_TREE;
1287 if (!get_maxval_strlen (arg, &len, &visited, type))
1288 len = NULL_TREE;
1289 if (visited)
1290 BITMAP_FREE (visited);
1292 return len;
1296 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1297 If LEN is not NULL, it represents the length of the string to be
1298 copied. Return NULL_TREE if no simplification can be made. */
1300 static bool
1301 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1302 tree dest, tree src)
1304 location_t loc = gimple_location (gsi_stmt (*gsi));
1305 tree fn;
1307 /* If SRC and DEST are the same (and not volatile), return DEST. */
1308 if (operand_equal_p (src, dest, 0))
1310 replace_call_with_value (gsi, dest);
1311 return true;
1314 if (optimize_function_for_size_p (cfun))
1315 return false;
1317 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1318 if (!fn)
1319 return false;
1321 tree len = get_maxval_strlen (src, 0);
1322 if (!len)
1323 return false;
1325 len = fold_convert_loc (loc, size_type_node, len);
1326 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1327 len = force_gimple_operand_gsi (gsi, len, true,
1328 NULL_TREE, true, GSI_SAME_STMT);
1329 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1330 replace_call_with_call_and_fold (gsi, repl);
1331 return true;
1334 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1335 If SLEN is not NULL, it represents the length of the source string.
1336 Return NULL_TREE if no simplification can be made. */
1338 static bool
1339 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1340 tree dest, tree src, tree len)
1342 location_t loc = gimple_location (gsi_stmt (*gsi));
1343 tree fn;
1345 /* If the LEN parameter is zero, return DEST. */
1346 if (integer_zerop (len))
1348 replace_call_with_value (gsi, dest);
1349 return true;
1352 /* We can't compare slen with len as constants below if len is not a
1353 constant. */
1354 if (TREE_CODE (len) != INTEGER_CST)
1355 return false;
1357 /* Now, we must be passed a constant src ptr parameter. */
1358 tree slen = get_maxval_strlen (src, 0);
1359 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1360 return false;
1362 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1364 /* We do not support simplification of this case, though we do
1365 support it when expanding trees into RTL. */
1366 /* FIXME: generate a call to __builtin_memset. */
1367 if (tree_int_cst_lt (slen, len))
1368 return false;
1370 /* OK transform into builtin memcpy. */
1371 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1372 if (!fn)
1373 return false;
1375 len = fold_convert_loc (loc, size_type_node, len);
1376 len = force_gimple_operand_gsi (gsi, len, true,
1377 NULL_TREE, true, GSI_SAME_STMT);
1378 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1379 replace_call_with_call_and_fold (gsi, repl);
1380 return true;
1383 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1384 to the call.
1386 Return NULL_TREE if no simplification was possible, otherwise return the
1387 simplified form of the call as a tree.
1389 The simplified form may be a constant or other expression which
1390 computes the same value, but in a more efficient manner (including
1391 calls to other builtin functions).
1393 The call may contain arguments which need to be evaluated, but
1394 which are not useful to determine the result of the call. In
1395 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1396 COMPOUND_EXPR will be an argument which must be evaluated.
1397 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1398 COMPOUND_EXPR in the chain will contain the tree for the simplified
1399 form of the builtin function call. */
1401 static bool
1402 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1404 gimple *stmt = gsi_stmt (*gsi);
1405 location_t loc = gimple_location (stmt);
1407 const char *p = c_getstr (src);
1409 /* If the string length is zero, return the dst parameter. */
1410 if (p && *p == '\0')
1412 replace_call_with_value (gsi, dst);
1413 return true;
1416 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1417 return false;
1419 /* See if we can store by pieces into (dst + strlen(dst)). */
1420 tree newdst;
1421 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1422 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1424 if (!strlen_fn || !memcpy_fn)
1425 return false;
1427 /* If the length of the source string isn't computable don't
1428 split strcat into strlen and memcpy. */
1429 tree len = get_maxval_strlen (src, 0);
1430 if (! len)
1431 return false;
1433 /* Create strlen (dst). */
1434 gimple_seq stmts = NULL, stmts2;
1435 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1436 gimple_set_location (repl, loc);
1437 if (gimple_in_ssa_p (cfun))
1438 newdst = make_ssa_name (size_type_node);
1439 else
1440 newdst = create_tmp_reg (size_type_node);
1441 gimple_call_set_lhs (repl, newdst);
1442 gimple_seq_add_stmt_without_update (&stmts, repl);
1444 /* Create (dst p+ strlen (dst)). */
1445 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1446 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1447 gimple_seq_add_seq_without_update (&stmts, stmts2);
1449 len = fold_convert_loc (loc, size_type_node, len);
1450 len = size_binop_loc (loc, PLUS_EXPR, len,
1451 build_int_cst (size_type_node, 1));
1452 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1453 gimple_seq_add_seq_without_update (&stmts, stmts2);
1455 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1456 gimple_seq_add_stmt_without_update (&stmts, repl);
1457 if (gimple_call_lhs (stmt))
1459 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1460 gimple_seq_add_stmt_without_update (&stmts, repl);
1461 gsi_replace_with_seq_vops (gsi, stmts);
1462 /* gsi now points at the assignment to the lhs, get a
1463 stmt iterator to the memcpy call.
1464 ??? We can't use gsi_for_stmt as that doesn't work when the
1465 CFG isn't built yet. */
1466 gimple_stmt_iterator gsi2 = *gsi;
1467 gsi_prev (&gsi2);
1468 fold_stmt (&gsi2);
1470 else
1472 gsi_replace_with_seq_vops (gsi, stmts);
1473 fold_stmt (gsi);
1475 return true;
1478 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1479 are the arguments to the call. */
1481 static bool
1482 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1484 gimple *stmt = gsi_stmt (*gsi);
1485 tree dest = gimple_call_arg (stmt, 0);
1486 tree src = gimple_call_arg (stmt, 1);
1487 tree size = gimple_call_arg (stmt, 2);
1488 tree fn;
1489 const char *p;
1492 p = c_getstr (src);
1493 /* If the SRC parameter is "", return DEST. */
1494 if (p && *p == '\0')
1496 replace_call_with_value (gsi, dest);
1497 return true;
1500 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1501 return false;
1503 /* If __builtin_strcat_chk is used, assume strcat is available. */
1504 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1505 if (!fn)
1506 return false;
1508 gimple *repl = gimple_build_call (fn, 2, dest, src);
1509 replace_call_with_call_and_fold (gsi, repl);
1510 return true;
1513 /* Simplify a call to the strncat builtin. */
1515 static bool
1516 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1518 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1519 tree dst = gimple_call_arg (stmt, 0);
1520 tree src = gimple_call_arg (stmt, 1);
1521 tree len = gimple_call_arg (stmt, 2);
1523 const char *p = c_getstr (src);
1525 /* If the requested length is zero, or the src parameter string
1526 length is zero, return the dst parameter. */
1527 if (integer_zerop (len) || (p && *p == '\0'))
1529 replace_call_with_value (gsi, dst);
1530 return true;
1533 /* If the requested len is greater than or equal to the string
1534 length, call strcat. */
1535 if (TREE_CODE (len) == INTEGER_CST && p
1536 && compare_tree_int (len, strlen (p)) >= 0)
1538 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1540 /* If the replacement _DECL isn't initialized, don't do the
1541 transformation. */
1542 if (!fn)
1543 return false;
1545 gcall *repl = gimple_build_call (fn, 2, dst, src);
1546 replace_call_with_call_and_fold (gsi, repl);
1547 return true;
1550 return false;
1553 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1554 LEN, and SIZE. */
1556 static bool
1557 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1559 gimple *stmt = gsi_stmt (*gsi);
1560 tree dest = gimple_call_arg (stmt, 0);
1561 tree src = gimple_call_arg (stmt, 1);
1562 tree len = gimple_call_arg (stmt, 2);
1563 tree size = gimple_call_arg (stmt, 3);
1564 tree fn;
1565 const char *p;
1567 p = c_getstr (src);
1568 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1569 if ((p && *p == '\0')
1570 || integer_zerop (len))
1572 replace_call_with_value (gsi, dest);
1573 return true;
1576 if (! tree_fits_uhwi_p (size))
1577 return false;
1579 if (! integer_all_onesp (size))
1581 tree src_len = c_strlen (src, 1);
1582 if (src_len
1583 && tree_fits_uhwi_p (src_len)
1584 && tree_fits_uhwi_p (len)
1585 && ! tree_int_cst_lt (len, src_len))
1587 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1588 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1589 if (!fn)
1590 return false;
1592 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1593 replace_call_with_call_and_fold (gsi, repl);
1594 return true;
1596 return false;
1599 /* If __builtin_strncat_chk is used, assume strncat is available. */
1600 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1601 if (!fn)
1602 return false;
1604 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1605 replace_call_with_call_and_fold (gsi, repl);
1606 return true;
1609 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1610 to the call. IGNORE is true if the value returned
1611 by the builtin will be ignored. UNLOCKED is true is true if this
1612 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1613 the known length of the string. Return NULL_TREE if no simplification
1614 was possible. */
1616 static bool
1617 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1618 tree arg0, tree arg1,
1619 bool unlocked)
1621 gimple *stmt = gsi_stmt (*gsi);
1623 /* If we're using an unlocked function, assume the other unlocked
1624 functions exist explicitly. */
1625 tree const fn_fputc = (unlocked
1626 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1627 : builtin_decl_implicit (BUILT_IN_FPUTC));
1628 tree const fn_fwrite = (unlocked
1629 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1630 : builtin_decl_implicit (BUILT_IN_FWRITE));
1632 /* If the return value is used, don't do the transformation. */
1633 if (gimple_call_lhs (stmt))
1634 return false;
1636 /* Get the length of the string passed to fputs. If the length
1637 can't be determined, punt. */
1638 tree len = get_maxval_strlen (arg0, 0);
1639 if (!len
1640 || TREE_CODE (len) != INTEGER_CST)
1641 return false;
1643 switch (compare_tree_int (len, 1))
1645 case -1: /* length is 0, delete the call entirely . */
1646 replace_call_with_value (gsi, integer_zero_node);
1647 return true;
1649 case 0: /* length is 1, call fputc. */
1651 const char *p = c_getstr (arg0);
1652 if (p != NULL)
1654 if (!fn_fputc)
1655 return false;
1657 gimple *repl = gimple_build_call (fn_fputc, 2,
1658 build_int_cst
1659 (integer_type_node, p[0]), arg1);
1660 replace_call_with_call_and_fold (gsi, repl);
1661 return true;
1664 /* FALLTHROUGH */
1665 case 1: /* length is greater than 1, call fwrite. */
1667 /* If optimizing for size keep fputs. */
1668 if (optimize_function_for_size_p (cfun))
1669 return false;
1670 /* New argument list transforming fputs(string, stream) to
1671 fwrite(string, 1, len, stream). */
1672 if (!fn_fwrite)
1673 return false;
1675 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
1676 size_one_node, len, arg1);
1677 replace_call_with_call_and_fold (gsi, repl);
1678 return true;
1680 default:
1681 gcc_unreachable ();
1683 return false;
1686 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1687 DEST, SRC, LEN, and SIZE are the arguments to the call.
1688 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1689 code of the builtin. If MAXLEN is not NULL, it is maximum length
1690 passed as third argument. */
1692 static bool
1693 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1694 tree dest, tree src, tree len, tree size,
1695 enum built_in_function fcode)
1697 gimple *stmt = gsi_stmt (*gsi);
1698 location_t loc = gimple_location (stmt);
1699 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1700 tree fn;
1702 /* If SRC and DEST are the same (and not volatile), return DEST
1703 (resp. DEST+LEN for __mempcpy_chk). */
1704 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1706 if (fcode != BUILT_IN_MEMPCPY_CHK)
1708 replace_call_with_value (gsi, dest);
1709 return true;
1711 else
1713 gimple_seq stmts = NULL;
1714 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1715 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1716 TREE_TYPE (dest), dest, len);
1717 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1718 replace_call_with_value (gsi, temp);
1719 return true;
1723 if (! tree_fits_uhwi_p (size))
1724 return false;
1726 tree maxlen = get_maxval_strlen (len, 2);
1727 if (! integer_all_onesp (size))
1729 if (! tree_fits_uhwi_p (len))
1731 /* If LEN is not constant, try MAXLEN too.
1732 For MAXLEN only allow optimizing into non-_ocs function
1733 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1734 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1736 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1738 /* (void) __mempcpy_chk () can be optimized into
1739 (void) __memcpy_chk (). */
1740 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1741 if (!fn)
1742 return false;
1744 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1745 replace_call_with_call_and_fold (gsi, repl);
1746 return true;
1748 return false;
1751 else
1752 maxlen = len;
1754 if (tree_int_cst_lt (size, maxlen))
1755 return false;
1758 fn = NULL_TREE;
1759 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1760 mem{cpy,pcpy,move,set} is available. */
1761 switch (fcode)
1763 case BUILT_IN_MEMCPY_CHK:
1764 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1765 break;
1766 case BUILT_IN_MEMPCPY_CHK:
1767 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1768 break;
1769 case BUILT_IN_MEMMOVE_CHK:
1770 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1771 break;
1772 case BUILT_IN_MEMSET_CHK:
1773 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1774 break;
1775 default:
1776 break;
1779 if (!fn)
1780 return false;
1782 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1783 replace_call_with_call_and_fold (gsi, repl);
1784 return true;
1787 /* Fold a call to the __st[rp]cpy_chk builtin.
1788 DEST, SRC, and SIZE are the arguments to the call.
1789 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1790 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1791 strings passed as second argument. */
1793 static bool
1794 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1795 tree dest,
1796 tree src, tree size,
1797 enum built_in_function fcode)
1799 gimple *stmt = gsi_stmt (*gsi);
1800 location_t loc = gimple_location (stmt);
1801 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1802 tree len, fn;
1804 /* If SRC and DEST are the same (and not volatile), return DEST. */
1805 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1807 replace_call_with_value (gsi, dest);
1808 return true;
1811 if (! tree_fits_uhwi_p (size))
1812 return false;
1814 tree maxlen = get_maxval_strlen (src, 1);
1815 if (! integer_all_onesp (size))
1817 len = c_strlen (src, 1);
1818 if (! len || ! tree_fits_uhwi_p (len))
1820 /* If LEN is not constant, try MAXLEN too.
1821 For MAXLEN only allow optimizing into non-_ocs function
1822 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1823 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1825 if (fcode == BUILT_IN_STPCPY_CHK)
1827 if (! ignore)
1828 return false;
1830 /* If return value of __stpcpy_chk is ignored,
1831 optimize into __strcpy_chk. */
1832 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1833 if (!fn)
1834 return false;
1836 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1837 replace_call_with_call_and_fold (gsi, repl);
1838 return true;
1841 if (! len || TREE_SIDE_EFFECTS (len))
1842 return false;
1844 /* If c_strlen returned something, but not a constant,
1845 transform __strcpy_chk into __memcpy_chk. */
1846 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1847 if (!fn)
1848 return false;
1850 gimple_seq stmts = NULL;
1851 len = gimple_convert (&stmts, loc, size_type_node, len);
1852 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
1853 build_int_cst (size_type_node, 1));
1854 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1855 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1856 replace_call_with_call_and_fold (gsi, repl);
1857 return true;
1860 else
1861 maxlen = len;
1863 if (! tree_int_cst_lt (maxlen, size))
1864 return false;
1867 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1868 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1869 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1870 if (!fn)
1871 return false;
1873 gimple *repl = gimple_build_call (fn, 2, dest, src);
1874 replace_call_with_call_and_fold (gsi, repl);
1875 return true;
1878 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1879 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1880 length passed as third argument. IGNORE is true if return value can be
1881 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1883 static bool
1884 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1885 tree dest, tree src,
1886 tree len, tree size,
1887 enum built_in_function fcode)
1889 gimple *stmt = gsi_stmt (*gsi);
1890 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1891 tree fn;
1893 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1895 /* If return value of __stpncpy_chk is ignored,
1896 optimize into __strncpy_chk. */
1897 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1898 if (fn)
1900 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1901 replace_call_with_call_and_fold (gsi, repl);
1902 return true;
1906 if (! tree_fits_uhwi_p (size))
1907 return false;
1909 tree maxlen = get_maxval_strlen (len, 2);
1910 if (! integer_all_onesp (size))
1912 if (! tree_fits_uhwi_p (len))
1914 /* If LEN is not constant, try MAXLEN too.
1915 For MAXLEN only allow optimizing into non-_ocs function
1916 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1917 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1918 return false;
1920 else
1921 maxlen = len;
1923 if (tree_int_cst_lt (size, maxlen))
1924 return false;
1927 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1928 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1929 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1930 if (!fn)
1931 return false;
1933 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1934 replace_call_with_call_and_fold (gsi, repl);
1935 return true;
1938 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
1939 Return NULL_TREE if no simplification can be made. */
1941 static bool
1942 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
1944 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1945 location_t loc = gimple_location (stmt);
1946 tree dest = gimple_call_arg (stmt, 0);
1947 tree src = gimple_call_arg (stmt, 1);
1948 tree fn, len, lenp1;
1950 /* If the result is unused, replace stpcpy with strcpy. */
1951 if (gimple_call_lhs (stmt) == NULL_TREE)
1953 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
1954 if (!fn)
1955 return false;
1956 gimple_call_set_fndecl (stmt, fn);
1957 fold_stmt (gsi);
1958 return true;
1961 len = c_strlen (src, 1);
1962 if (!len
1963 || TREE_CODE (len) != INTEGER_CST)
1964 return false;
1966 if (optimize_function_for_size_p (cfun)
1967 /* If length is zero it's small enough. */
1968 && !integer_zerop (len))
1969 return false;
1971 /* If the source has a known length replace stpcpy with memcpy. */
1972 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1973 if (!fn)
1974 return false;
1976 gimple_seq stmts = NULL;
1977 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
1978 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
1979 tem, build_int_cst (size_type_node, 1));
1980 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1981 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
1982 gimple_set_vuse (repl, gimple_vuse (stmt));
1983 gimple_set_vdef (repl, gimple_vdef (stmt));
1984 if (gimple_vdef (repl)
1985 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
1986 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
1987 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
1988 /* Replace the result with dest + len. */
1989 stmts = NULL;
1990 tem = gimple_convert (&stmts, loc, sizetype, len);
1991 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1992 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
1993 POINTER_PLUS_EXPR, dest, tem);
1994 gsi_replace (gsi, ret, false);
1995 /* Finally fold the memcpy call. */
1996 gimple_stmt_iterator gsi2 = *gsi;
1997 gsi_prev (&gsi2);
1998 fold_stmt (&gsi2);
1999 return true;
2002 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2003 NULL_TREE if a normal call should be emitted rather than expanding
2004 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2005 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2006 passed as second argument. */
2008 static bool
2009 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2010 enum built_in_function fcode)
2012 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2013 tree dest, size, len, fn, fmt, flag;
2014 const char *fmt_str;
2016 /* Verify the required arguments in the original call. */
2017 if (gimple_call_num_args (stmt) < 5)
2018 return false;
2020 dest = gimple_call_arg (stmt, 0);
2021 len = gimple_call_arg (stmt, 1);
2022 flag = gimple_call_arg (stmt, 2);
2023 size = gimple_call_arg (stmt, 3);
2024 fmt = gimple_call_arg (stmt, 4);
2026 if (! tree_fits_uhwi_p (size))
2027 return false;
2029 if (! integer_all_onesp (size))
2031 tree maxlen = get_maxval_strlen (len, 2);
2032 if (! tree_fits_uhwi_p (len))
2034 /* If LEN is not constant, try MAXLEN too.
2035 For MAXLEN only allow optimizing into non-_ocs function
2036 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2037 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2038 return false;
2040 else
2041 maxlen = len;
2043 if (tree_int_cst_lt (size, maxlen))
2044 return false;
2047 if (!init_target_chars ())
2048 return false;
2050 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2051 or if format doesn't contain % chars or is "%s". */
2052 if (! integer_zerop (flag))
2054 fmt_str = c_getstr (fmt);
2055 if (fmt_str == NULL)
2056 return false;
2057 if (strchr (fmt_str, target_percent) != NULL
2058 && strcmp (fmt_str, target_percent_s))
2059 return false;
2062 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2063 available. */
2064 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2065 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2066 if (!fn)
2067 return false;
2069 /* Replace the called function and the first 5 argument by 3 retaining
2070 trailing varargs. */
2071 gimple_call_set_fndecl (stmt, fn);
2072 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2073 gimple_call_set_arg (stmt, 0, dest);
2074 gimple_call_set_arg (stmt, 1, len);
2075 gimple_call_set_arg (stmt, 2, fmt);
2076 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2077 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2078 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2079 fold_stmt (gsi);
2080 return true;
2083 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2084 Return NULL_TREE if a normal call should be emitted rather than
2085 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2086 or BUILT_IN_VSPRINTF_CHK. */
2088 static bool
2089 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2090 enum built_in_function fcode)
2092 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2093 tree dest, size, len, fn, fmt, flag;
2094 const char *fmt_str;
2095 unsigned nargs = gimple_call_num_args (stmt);
2097 /* Verify the required arguments in the original call. */
2098 if (nargs < 4)
2099 return false;
2100 dest = gimple_call_arg (stmt, 0);
2101 flag = gimple_call_arg (stmt, 1);
2102 size = gimple_call_arg (stmt, 2);
2103 fmt = gimple_call_arg (stmt, 3);
2105 if (! tree_fits_uhwi_p (size))
2106 return false;
2108 len = NULL_TREE;
2110 if (!init_target_chars ())
2111 return false;
2113 /* Check whether the format is a literal string constant. */
2114 fmt_str = c_getstr (fmt);
2115 if (fmt_str != NULL)
2117 /* If the format doesn't contain % args or %%, we know the size. */
2118 if (strchr (fmt_str, target_percent) == 0)
2120 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2121 len = build_int_cstu (size_type_node, strlen (fmt_str));
2123 /* If the format is "%s" and first ... argument is a string literal,
2124 we know the size too. */
2125 else if (fcode == BUILT_IN_SPRINTF_CHK
2126 && strcmp (fmt_str, target_percent_s) == 0)
2128 tree arg;
2130 if (nargs == 5)
2132 arg = gimple_call_arg (stmt, 4);
2133 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2135 len = c_strlen (arg, 1);
2136 if (! len || ! tree_fits_uhwi_p (len))
2137 len = NULL_TREE;
2143 if (! integer_all_onesp (size))
2145 if (! len || ! tree_int_cst_lt (len, size))
2146 return false;
2149 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2150 or if format doesn't contain % chars or is "%s". */
2151 if (! integer_zerop (flag))
2153 if (fmt_str == NULL)
2154 return false;
2155 if (strchr (fmt_str, target_percent) != NULL
2156 && strcmp (fmt_str, target_percent_s))
2157 return false;
2160 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2161 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2162 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2163 if (!fn)
2164 return false;
2166 /* Replace the called function and the first 4 argument by 2 retaining
2167 trailing varargs. */
2168 gimple_call_set_fndecl (stmt, fn);
2169 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2170 gimple_call_set_arg (stmt, 0, dest);
2171 gimple_call_set_arg (stmt, 1, fmt);
2172 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2173 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2174 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2175 fold_stmt (gsi);
2176 return true;
2179 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2180 ORIG may be null if this is a 2-argument call. We don't attempt to
2181 simplify calls with more than 3 arguments.
2183 Return NULL_TREE if no simplification was possible, otherwise return the
2184 simplified form of the call as a tree. If IGNORED is true, it means that
2185 the caller does not use the returned value of the function. */
2187 static bool
2188 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2190 gimple *stmt = gsi_stmt (*gsi);
2191 tree dest = gimple_call_arg (stmt, 0);
2192 tree fmt = gimple_call_arg (stmt, 1);
2193 tree orig = NULL_TREE;
2194 const char *fmt_str = NULL;
2196 /* Verify the required arguments in the original call. We deal with two
2197 types of sprintf() calls: 'sprintf (str, fmt)' and
2198 'sprintf (dest, "%s", orig)'. */
2199 if (gimple_call_num_args (stmt) > 3)
2200 return false;
2202 if (gimple_call_num_args (stmt) == 3)
2203 orig = gimple_call_arg (stmt, 2);
2205 /* Check whether the format is a literal string constant. */
2206 fmt_str = c_getstr (fmt);
2207 if (fmt_str == NULL)
2208 return false;
2210 if (!init_target_chars ())
2211 return false;
2213 /* If the format doesn't contain % args or %%, use strcpy. */
2214 if (strchr (fmt_str, target_percent) == NULL)
2216 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2218 if (!fn)
2219 return false;
2221 /* Don't optimize sprintf (buf, "abc", ptr++). */
2222 if (orig)
2223 return false;
2225 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2226 'format' is known to contain no % formats. */
2227 gimple_seq stmts = NULL;
2228 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2229 gimple_seq_add_stmt_without_update (&stmts, repl);
2230 if (gimple_call_lhs (stmt))
2232 repl = gimple_build_assign (gimple_call_lhs (stmt),
2233 build_int_cst (integer_type_node,
2234 strlen (fmt_str)));
2235 gimple_seq_add_stmt_without_update (&stmts, repl);
2236 gsi_replace_with_seq_vops (gsi, stmts);
2237 /* gsi now points at the assignment to the lhs, get a
2238 stmt iterator to the memcpy call.
2239 ??? We can't use gsi_for_stmt as that doesn't work when the
2240 CFG isn't built yet. */
2241 gimple_stmt_iterator gsi2 = *gsi;
2242 gsi_prev (&gsi2);
2243 fold_stmt (&gsi2);
2245 else
2247 gsi_replace_with_seq_vops (gsi, stmts);
2248 fold_stmt (gsi);
2250 return true;
2253 /* If the format is "%s", use strcpy if the result isn't used. */
2254 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2256 tree fn;
2257 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2259 if (!fn)
2260 return false;
2262 /* Don't crash on sprintf (str1, "%s"). */
2263 if (!orig)
2264 return false;
2266 tree orig_len = NULL_TREE;
2267 if (gimple_call_lhs (stmt))
2269 orig_len = get_maxval_strlen (orig, 0);
2270 if (!orig_len)
2271 return false;
2274 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2275 gimple_seq stmts = NULL;
2276 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2277 gimple_seq_add_stmt_without_update (&stmts, repl);
2278 if (gimple_call_lhs (stmt))
2280 if (!useless_type_conversion_p (integer_type_node,
2281 TREE_TYPE (orig_len)))
2282 orig_len = fold_convert (integer_type_node, orig_len);
2283 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2284 gimple_seq_add_stmt_without_update (&stmts, repl);
2285 gsi_replace_with_seq_vops (gsi, stmts);
2286 /* gsi now points at the assignment to the lhs, get a
2287 stmt iterator to the memcpy call.
2288 ??? We can't use gsi_for_stmt as that doesn't work when the
2289 CFG isn't built yet. */
2290 gimple_stmt_iterator gsi2 = *gsi;
2291 gsi_prev (&gsi2);
2292 fold_stmt (&gsi2);
2294 else
2296 gsi_replace_with_seq_vops (gsi, stmts);
2297 fold_stmt (gsi);
2299 return true;
2301 return false;
2304 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2305 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2306 attempt to simplify calls with more than 4 arguments.
2308 Return NULL_TREE if no simplification was possible, otherwise return the
2309 simplified form of the call as a tree. If IGNORED is true, it means that
2310 the caller does not use the returned value of the function. */
2312 static bool
2313 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2315 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2316 tree dest = gimple_call_arg (stmt, 0);
2317 tree destsize = gimple_call_arg (stmt, 1);
2318 tree fmt = gimple_call_arg (stmt, 2);
2319 tree orig = NULL_TREE;
2320 const char *fmt_str = NULL;
2322 if (gimple_call_num_args (stmt) > 4)
2323 return false;
2325 if (gimple_call_num_args (stmt) == 4)
2326 orig = gimple_call_arg (stmt, 3);
2328 if (!tree_fits_uhwi_p (destsize))
2329 return false;
2330 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2332 /* Check whether the format is a literal string constant. */
2333 fmt_str = c_getstr (fmt);
2334 if (fmt_str == NULL)
2335 return false;
2337 if (!init_target_chars ())
2338 return false;
2340 /* If the format doesn't contain % args or %%, use strcpy. */
2341 if (strchr (fmt_str, target_percent) == NULL)
2343 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2344 if (!fn)
2345 return false;
2347 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2348 if (orig)
2349 return false;
2351 /* We could expand this as
2352 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2353 or to
2354 memcpy (str, fmt_with_nul_at_cstm1, cst);
2355 but in the former case that might increase code size
2356 and in the latter case grow .rodata section too much.
2357 So punt for now. */
2358 size_t len = strlen (fmt_str);
2359 if (len >= destlen)
2360 return false;
2362 gimple_seq stmts = NULL;
2363 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2364 gimple_seq_add_stmt_without_update (&stmts, repl);
2365 if (gimple_call_lhs (stmt))
2367 repl = gimple_build_assign (gimple_call_lhs (stmt),
2368 build_int_cst (integer_type_node, len));
2369 gimple_seq_add_stmt_without_update (&stmts, repl);
2370 gsi_replace_with_seq_vops (gsi, stmts);
2371 /* gsi now points at the assignment to the lhs, get a
2372 stmt iterator to the memcpy call.
2373 ??? We can't use gsi_for_stmt as that doesn't work when the
2374 CFG isn't built yet. */
2375 gimple_stmt_iterator gsi2 = *gsi;
2376 gsi_prev (&gsi2);
2377 fold_stmt (&gsi2);
2379 else
2381 gsi_replace_with_seq_vops (gsi, stmts);
2382 fold_stmt (gsi);
2384 return true;
2387 /* If the format is "%s", use strcpy if the result isn't used. */
2388 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2390 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2391 if (!fn)
2392 return false;
2394 /* Don't crash on snprintf (str1, cst, "%s"). */
2395 if (!orig)
2396 return false;
2398 tree orig_len = get_maxval_strlen (orig, 0);
2399 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2400 return false;
2402 /* We could expand this as
2403 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2404 or to
2405 memcpy (str1, str2_with_nul_at_cstm1, cst);
2406 but in the former case that might increase code size
2407 and in the latter case grow .rodata section too much.
2408 So punt for now. */
2409 if (compare_tree_int (orig_len, destlen) >= 0)
2410 return false;
2412 /* Convert snprintf (str1, cst, "%s", str2) into
2413 strcpy (str1, str2) if strlen (str2) < cst. */
2414 gimple_seq stmts = NULL;
2415 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2416 gimple_seq_add_stmt_without_update (&stmts, repl);
2417 if (gimple_call_lhs (stmt))
2419 if (!useless_type_conversion_p (integer_type_node,
2420 TREE_TYPE (orig_len)))
2421 orig_len = fold_convert (integer_type_node, orig_len);
2422 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2423 gimple_seq_add_stmt_without_update (&stmts, repl);
2424 gsi_replace_with_seq_vops (gsi, stmts);
2425 /* gsi now points at the assignment to the lhs, get a
2426 stmt iterator to the memcpy call.
2427 ??? We can't use gsi_for_stmt as that doesn't work when the
2428 CFG isn't built yet. */
2429 gimple_stmt_iterator gsi2 = *gsi;
2430 gsi_prev (&gsi2);
2431 fold_stmt (&gsi2);
2433 else
2435 gsi_replace_with_seq_vops (gsi, stmts);
2436 fold_stmt (gsi);
2438 return true;
2440 return false;
2443 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2444 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2445 more than 3 arguments, and ARG may be null in the 2-argument case.
2447 Return NULL_TREE if no simplification was possible, otherwise return the
2448 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2449 code of the function to be simplified. */
2451 static bool
2452 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2453 tree fp, tree fmt, tree arg,
2454 enum built_in_function fcode)
2456 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2457 tree fn_fputc, fn_fputs;
2458 const char *fmt_str = NULL;
2460 /* If the return value is used, don't do the transformation. */
2461 if (gimple_call_lhs (stmt) != NULL_TREE)
2462 return false;
2464 /* Check whether the format is a literal string constant. */
2465 fmt_str = c_getstr (fmt);
2466 if (fmt_str == NULL)
2467 return false;
2469 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2471 /* If we're using an unlocked function, assume the other
2472 unlocked functions exist explicitly. */
2473 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2474 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2476 else
2478 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2479 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2482 if (!init_target_chars ())
2483 return false;
2485 /* If the format doesn't contain % args or %%, use strcpy. */
2486 if (strchr (fmt_str, target_percent) == NULL)
2488 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2489 && arg)
2490 return false;
2492 /* If the format specifier was "", fprintf does nothing. */
2493 if (fmt_str[0] == '\0')
2495 replace_call_with_value (gsi, NULL_TREE);
2496 return true;
2499 /* When "string" doesn't contain %, replace all cases of
2500 fprintf (fp, string) with fputs (string, fp). The fputs
2501 builtin will take care of special cases like length == 1. */
2502 if (fn_fputs)
2504 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2505 replace_call_with_call_and_fold (gsi, repl);
2506 return true;
2510 /* The other optimizations can be done only on the non-va_list variants. */
2511 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2512 return false;
2514 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2515 else if (strcmp (fmt_str, target_percent_s) == 0)
2517 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2518 return false;
2519 if (fn_fputs)
2521 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2522 replace_call_with_call_and_fold (gsi, repl);
2523 return true;
2527 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2528 else if (strcmp (fmt_str, target_percent_c) == 0)
2530 if (!arg
2531 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2532 return false;
2533 if (fn_fputc)
2535 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2536 replace_call_with_call_and_fold (gsi, repl);
2537 return true;
2541 return false;
2544 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2545 FMT and ARG are the arguments to the call; we don't fold cases with
2546 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2548 Return NULL_TREE if no simplification was possible, otherwise return the
2549 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2550 code of the function to be simplified. */
2552 static bool
2553 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2554 tree arg, enum built_in_function fcode)
2556 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2557 tree fn_putchar, fn_puts, newarg;
2558 const char *fmt_str = NULL;
2560 /* If the return value is used, don't do the transformation. */
2561 if (gimple_call_lhs (stmt) != NULL_TREE)
2562 return false;
2564 /* Check whether the format is a literal string constant. */
2565 fmt_str = c_getstr (fmt);
2566 if (fmt_str == NULL)
2567 return false;
2569 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2571 /* If we're using an unlocked function, assume the other
2572 unlocked functions exist explicitly. */
2573 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2574 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2576 else
2578 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2579 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2582 if (!init_target_chars ())
2583 return false;
2585 if (strcmp (fmt_str, target_percent_s) == 0
2586 || strchr (fmt_str, target_percent) == NULL)
2588 const char *str;
2590 if (strcmp (fmt_str, target_percent_s) == 0)
2592 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2593 return false;
2595 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2596 return false;
2598 str = c_getstr (arg);
2599 if (str == NULL)
2600 return false;
2602 else
2604 /* The format specifier doesn't contain any '%' characters. */
2605 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2606 && arg)
2607 return false;
2608 str = fmt_str;
2611 /* If the string was "", printf does nothing. */
2612 if (str[0] == '\0')
2614 replace_call_with_value (gsi, NULL_TREE);
2615 return true;
2618 /* If the string has length of 1, call putchar. */
2619 if (str[1] == '\0')
2621 /* Given printf("c"), (where c is any one character,)
2622 convert "c"[0] to an int and pass that to the replacement
2623 function. */
2624 newarg = build_int_cst (integer_type_node, str[0]);
2625 if (fn_putchar)
2627 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2628 replace_call_with_call_and_fold (gsi, repl);
2629 return true;
2632 else
2634 /* If the string was "string\n", call puts("string"). */
2635 size_t len = strlen (str);
2636 if ((unsigned char)str[len - 1] == target_newline
2637 && (size_t) (int) len == len
2638 && (int) len > 0)
2640 char *newstr;
2641 tree offset_node, string_cst;
2643 /* Create a NUL-terminated string that's one char shorter
2644 than the original, stripping off the trailing '\n'. */
2645 newarg = build_string_literal (len, str);
2646 string_cst = string_constant (newarg, &offset_node);
2647 gcc_checking_assert (string_cst
2648 && (TREE_STRING_LENGTH (string_cst)
2649 == (int) len)
2650 && integer_zerop (offset_node)
2651 && (unsigned char)
2652 TREE_STRING_POINTER (string_cst)[len - 1]
2653 == target_newline);
2654 /* build_string_literal creates a new STRING_CST,
2655 modify it in place to avoid double copying. */
2656 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2657 newstr[len - 1] = '\0';
2658 if (fn_puts)
2660 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2661 replace_call_with_call_and_fold (gsi, repl);
2662 return true;
2665 else
2666 /* We'd like to arrange to call fputs(string,stdout) here,
2667 but we need stdout and don't have a way to get it yet. */
2668 return false;
2672 /* The other optimizations can be done only on the non-va_list variants. */
2673 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2674 return false;
2676 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2677 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2679 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2680 return false;
2681 if (fn_puts)
2683 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2684 replace_call_with_call_and_fold (gsi, repl);
2685 return true;
2689 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2690 else if (strcmp (fmt_str, target_percent_c) == 0)
2692 if (!arg || ! useless_type_conversion_p (integer_type_node,
2693 TREE_TYPE (arg)))
2694 return false;
2695 if (fn_putchar)
2697 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2698 replace_call_with_call_and_fold (gsi, repl);
2699 return true;
2703 return false;
2708 /* Fold a call to __builtin_strlen with known length LEN. */
2710 static bool
2711 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2713 gimple *stmt = gsi_stmt (*gsi);
2714 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2715 if (!len)
2716 return false;
2717 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2718 replace_call_with_value (gsi, len);
2719 return true;
2722 /* Fold a call to __builtin_acc_on_device. */
2724 static bool
2725 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2727 /* Defer folding until we know which compiler we're in. */
2728 if (symtab->state != EXPANSION)
2729 return false;
2731 unsigned val_host = GOMP_DEVICE_HOST;
2732 unsigned val_dev = GOMP_DEVICE_NONE;
2734 #ifdef ACCEL_COMPILER
2735 val_host = GOMP_DEVICE_NOT_HOST;
2736 val_dev = ACCEL_COMPILER_acc_device;
2737 #endif
2739 location_t loc = gimple_location (gsi_stmt (*gsi));
2741 tree host_eq = make_ssa_name (boolean_type_node);
2742 gimple *host_ass = gimple_build_assign
2743 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2744 gimple_set_location (host_ass, loc);
2745 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2747 tree dev_eq = make_ssa_name (boolean_type_node);
2748 gimple *dev_ass = gimple_build_assign
2749 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2750 gimple_set_location (dev_ass, loc);
2751 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2753 tree result = make_ssa_name (boolean_type_node);
2754 gimple *result_ass = gimple_build_assign
2755 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2756 gimple_set_location (result_ass, loc);
2757 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2759 replace_call_with_value (gsi, result);
2761 return true;
2764 /* Fold the non-target builtin at *GSI and return whether any simplification
2765 was made. */
2767 static bool
2768 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2770 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2771 tree callee = gimple_call_fndecl (stmt);
2773 /* Give up for always_inline inline builtins until they are
2774 inlined. */
2775 if (avoid_folding_inline_builtin (callee))
2776 return false;
2778 unsigned n = gimple_call_num_args (stmt);
2779 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2780 switch (fcode)
2782 case BUILT_IN_BZERO:
2783 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2784 gimple_call_arg (stmt, 1));
2785 case BUILT_IN_MEMSET:
2786 return gimple_fold_builtin_memset (gsi,
2787 gimple_call_arg (stmt, 1),
2788 gimple_call_arg (stmt, 2));
2789 case BUILT_IN_BCOPY:
2790 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2791 gimple_call_arg (stmt, 0), 3);
2792 case BUILT_IN_MEMCPY:
2793 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2794 gimple_call_arg (stmt, 1), 0);
2795 case BUILT_IN_MEMPCPY:
2796 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2797 gimple_call_arg (stmt, 1), 1);
2798 case BUILT_IN_MEMMOVE:
2799 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2800 gimple_call_arg (stmt, 1), 3);
2801 case BUILT_IN_SPRINTF_CHK:
2802 case BUILT_IN_VSPRINTF_CHK:
2803 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2804 case BUILT_IN_STRCAT_CHK:
2805 return gimple_fold_builtin_strcat_chk (gsi);
2806 case BUILT_IN_STRNCAT_CHK:
2807 return gimple_fold_builtin_strncat_chk (gsi);
2808 case BUILT_IN_STRLEN:
2809 return gimple_fold_builtin_strlen (gsi);
2810 case BUILT_IN_STRCPY:
2811 return gimple_fold_builtin_strcpy (gsi,
2812 gimple_call_arg (stmt, 0),
2813 gimple_call_arg (stmt, 1));
2814 case BUILT_IN_STRNCPY:
2815 return gimple_fold_builtin_strncpy (gsi,
2816 gimple_call_arg (stmt, 0),
2817 gimple_call_arg (stmt, 1),
2818 gimple_call_arg (stmt, 2));
2819 case BUILT_IN_STRCAT:
2820 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2821 gimple_call_arg (stmt, 1));
2822 case BUILT_IN_STRNCAT:
2823 return gimple_fold_builtin_strncat (gsi);
2824 case BUILT_IN_FPUTS:
2825 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2826 gimple_call_arg (stmt, 1), false);
2827 case BUILT_IN_FPUTS_UNLOCKED:
2828 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2829 gimple_call_arg (stmt, 1), true);
2830 case BUILT_IN_MEMCPY_CHK:
2831 case BUILT_IN_MEMPCPY_CHK:
2832 case BUILT_IN_MEMMOVE_CHK:
2833 case BUILT_IN_MEMSET_CHK:
2834 return gimple_fold_builtin_memory_chk (gsi,
2835 gimple_call_arg (stmt, 0),
2836 gimple_call_arg (stmt, 1),
2837 gimple_call_arg (stmt, 2),
2838 gimple_call_arg (stmt, 3),
2839 fcode);
2840 case BUILT_IN_STPCPY:
2841 return gimple_fold_builtin_stpcpy (gsi);
2842 case BUILT_IN_STRCPY_CHK:
2843 case BUILT_IN_STPCPY_CHK:
2844 return gimple_fold_builtin_stxcpy_chk (gsi,
2845 gimple_call_arg (stmt, 0),
2846 gimple_call_arg (stmt, 1),
2847 gimple_call_arg (stmt, 2),
2848 fcode);
2849 case BUILT_IN_STRNCPY_CHK:
2850 case BUILT_IN_STPNCPY_CHK:
2851 return gimple_fold_builtin_stxncpy_chk (gsi,
2852 gimple_call_arg (stmt, 0),
2853 gimple_call_arg (stmt, 1),
2854 gimple_call_arg (stmt, 2),
2855 gimple_call_arg (stmt, 3),
2856 fcode);
2857 case BUILT_IN_SNPRINTF_CHK:
2858 case BUILT_IN_VSNPRINTF_CHK:
2859 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2860 case BUILT_IN_SNPRINTF:
2861 return gimple_fold_builtin_snprintf (gsi);
2862 case BUILT_IN_SPRINTF:
2863 return gimple_fold_builtin_sprintf (gsi);
2864 case BUILT_IN_FPRINTF:
2865 case BUILT_IN_FPRINTF_UNLOCKED:
2866 case BUILT_IN_VFPRINTF:
2867 if (n == 2 || n == 3)
2868 return gimple_fold_builtin_fprintf (gsi,
2869 gimple_call_arg (stmt, 0),
2870 gimple_call_arg (stmt, 1),
2871 n == 3
2872 ? gimple_call_arg (stmt, 2)
2873 : NULL_TREE,
2874 fcode);
2875 break;
2876 case BUILT_IN_FPRINTF_CHK:
2877 case BUILT_IN_VFPRINTF_CHK:
2878 if (n == 3 || n == 4)
2879 return gimple_fold_builtin_fprintf (gsi,
2880 gimple_call_arg (stmt, 0),
2881 gimple_call_arg (stmt, 2),
2882 n == 4
2883 ? gimple_call_arg (stmt, 3)
2884 : NULL_TREE,
2885 fcode);
2886 break;
2887 case BUILT_IN_PRINTF:
2888 case BUILT_IN_PRINTF_UNLOCKED:
2889 case BUILT_IN_VPRINTF:
2890 if (n == 1 || n == 2)
2891 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2892 n == 2
2893 ? gimple_call_arg (stmt, 1)
2894 : NULL_TREE, fcode);
2895 break;
2896 case BUILT_IN_PRINTF_CHK:
2897 case BUILT_IN_VPRINTF_CHK:
2898 if (n == 2 || n == 3)
2899 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2900 n == 3
2901 ? gimple_call_arg (stmt, 2)
2902 : NULL_TREE, fcode);
2903 break;
2904 case BUILT_IN_ACC_ON_DEVICE:
2905 return gimple_fold_builtin_acc_on_device (gsi,
2906 gimple_call_arg (stmt, 0));
2907 default:;
2910 /* Try the generic builtin folder. */
2911 bool ignore = (gimple_call_lhs (stmt) == NULL);
2912 tree result = fold_call_stmt (stmt, ignore);
2913 if (result)
2915 if (ignore)
2916 STRIP_NOPS (result);
2917 else
2918 result = fold_convert (gimple_call_return_type (stmt), result);
2919 if (!update_call_from_tree (gsi, result))
2920 gimplify_and_update_call_from_tree (gsi, result);
2921 return true;
2924 return false;
2927 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
2928 function calls to constants, where possible. */
2930 static tree
2931 fold_internal_goacc_dim (const gimple *call)
2933 int axis = get_oacc_ifn_dim_arg (call);
2934 int size = get_oacc_fn_dim_size (current_function_decl, axis);
2935 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
2936 tree result = NULL_TREE;
2938 /* If the size is 1, or we only want the size and it is not dynamic,
2939 we know the answer. */
2940 if (size == 1 || (!is_pos && size))
2942 tree type = TREE_TYPE (gimple_call_lhs (call));
2943 result = build_int_cst (type, size - is_pos);
2946 return result;
2949 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2950 doesn't fit into TYPE. The test for overflow should be regardless of
2951 -fwrapv, and even for unsigned types. */
2953 bool
2954 arith_overflowed_p (enum tree_code code, const_tree type,
2955 const_tree arg0, const_tree arg1)
2957 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2958 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2959 widest2_int_cst;
2960 widest2_int warg0 = widest2_int_cst (arg0);
2961 widest2_int warg1 = widest2_int_cst (arg1);
2962 widest2_int wres;
2963 switch (code)
2965 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2966 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2967 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2968 default: gcc_unreachable ();
2970 signop sign = TYPE_SIGN (type);
2971 if (sign == UNSIGNED && wi::neg_p (wres))
2972 return true;
2973 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2976 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2977 The statement may be replaced by another statement, e.g., if the call
2978 simplifies to a constant value. Return true if any changes were made.
2979 It is assumed that the operands have been previously folded. */
2981 static bool
2982 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
2984 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2985 tree callee;
2986 bool changed = false;
2987 unsigned i;
2989 /* Fold *& in call arguments. */
2990 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2991 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2993 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2994 if (tmp)
2996 gimple_call_set_arg (stmt, i, tmp);
2997 changed = true;
3001 /* Check for virtual calls that became direct calls. */
3002 callee = gimple_call_fn (stmt);
3003 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3005 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3007 if (dump_file && virtual_method_call_p (callee)
3008 && !possible_polymorphic_call_target_p
3009 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3010 (OBJ_TYPE_REF_EXPR (callee)))))
3012 fprintf (dump_file,
3013 "Type inheritance inconsistent devirtualization of ");
3014 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3015 fprintf (dump_file, " to ");
3016 print_generic_expr (dump_file, callee, TDF_SLIM);
3017 fprintf (dump_file, "\n");
3020 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3021 changed = true;
3023 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3025 bool final;
3026 vec <cgraph_node *>targets
3027 = possible_polymorphic_call_targets (callee, stmt, &final);
3028 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3030 tree lhs = gimple_call_lhs (stmt);
3031 if (dump_enabled_p ())
3033 location_t loc = gimple_location_safe (stmt);
3034 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3035 "folding virtual function call to %s\n",
3036 targets.length () == 1
3037 ? targets[0]->name ()
3038 : "__builtin_unreachable");
3040 if (targets.length () == 1)
3042 gimple_call_set_fndecl (stmt, targets[0]->decl);
3043 changed = true;
3044 /* If the call becomes noreturn, remove the lhs. */
3045 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3047 if (TREE_CODE (lhs) == SSA_NAME)
3049 tree var = create_tmp_var (TREE_TYPE (lhs));
3050 tree def = get_or_create_ssa_default_def (cfun, var);
3051 gimple *new_stmt = gimple_build_assign (lhs, def);
3052 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3054 gimple_call_set_lhs (stmt, NULL_TREE);
3056 maybe_remove_unused_call_args (cfun, stmt);
3058 else
3060 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3061 gimple *new_stmt = gimple_build_call (fndecl, 0);
3062 gimple_set_location (new_stmt, gimple_location (stmt));
3063 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3065 tree var = create_tmp_var (TREE_TYPE (lhs));
3066 tree def = get_or_create_ssa_default_def (cfun, var);
3068 /* To satisfy condition for
3069 cgraph_update_edges_for_call_stmt_node,
3070 we need to preserve GIMPLE_CALL statement
3071 at position of GSI iterator. */
3072 update_call_from_tree (gsi, def);
3073 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3075 else
3077 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3078 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3079 gsi_replace (gsi, new_stmt, false);
3081 return true;
3087 /* Check for indirect calls that became direct calls, and then
3088 no longer require a static chain. */
3089 if (gimple_call_chain (stmt))
3091 tree fn = gimple_call_fndecl (stmt);
3092 if (fn && !DECL_STATIC_CHAIN (fn))
3094 gimple_call_set_chain (stmt, NULL);
3095 changed = true;
3097 else
3099 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3100 if (tmp)
3102 gimple_call_set_chain (stmt, tmp);
3103 changed = true;
3108 if (inplace)
3109 return changed;
3111 /* Check for builtins that CCP can handle using information not
3112 available in the generic fold routines. */
3113 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3115 if (gimple_fold_builtin (gsi))
3116 changed = true;
3118 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3120 changed |= targetm.gimple_fold_builtin (gsi);
3122 else if (gimple_call_internal_p (stmt))
3124 enum tree_code subcode = ERROR_MARK;
3125 tree result = NULL_TREE;
3126 bool cplx_result = false;
3127 tree overflow = NULL_TREE;
3128 switch (gimple_call_internal_fn (stmt))
3130 case IFN_BUILTIN_EXPECT:
3131 result = fold_builtin_expect (gimple_location (stmt),
3132 gimple_call_arg (stmt, 0),
3133 gimple_call_arg (stmt, 1),
3134 gimple_call_arg (stmt, 2));
3135 break;
3136 case IFN_UBSAN_OBJECT_SIZE:
3137 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3138 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3139 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3140 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3141 gimple_call_arg (stmt, 2))))
3143 gsi_replace (gsi, gimple_build_nop (), false);
3144 unlink_stmt_vdef (stmt);
3145 release_defs (stmt);
3146 return true;
3148 break;
3149 case IFN_GOACC_DIM_SIZE:
3150 case IFN_GOACC_DIM_POS:
3151 result = fold_internal_goacc_dim (stmt);
3152 break;
3153 case IFN_UBSAN_CHECK_ADD:
3154 subcode = PLUS_EXPR;
3155 break;
3156 case IFN_UBSAN_CHECK_SUB:
3157 subcode = MINUS_EXPR;
3158 break;
3159 case IFN_UBSAN_CHECK_MUL:
3160 subcode = MULT_EXPR;
3161 break;
3162 case IFN_ADD_OVERFLOW:
3163 subcode = PLUS_EXPR;
3164 cplx_result = true;
3165 break;
3166 case IFN_SUB_OVERFLOW:
3167 subcode = MINUS_EXPR;
3168 cplx_result = true;
3169 break;
3170 case IFN_MUL_OVERFLOW:
3171 subcode = MULT_EXPR;
3172 cplx_result = true;
3173 break;
3174 default:
3175 break;
3177 if (subcode != ERROR_MARK)
3179 tree arg0 = gimple_call_arg (stmt, 0);
3180 tree arg1 = gimple_call_arg (stmt, 1);
3181 tree type = TREE_TYPE (arg0);
3182 if (cplx_result)
3184 tree lhs = gimple_call_lhs (stmt);
3185 if (lhs == NULL_TREE)
3186 type = NULL_TREE;
3187 else
3188 type = TREE_TYPE (TREE_TYPE (lhs));
3190 if (type == NULL_TREE)
3192 /* x = y + 0; x = y - 0; x = y * 0; */
3193 else if (integer_zerop (arg1))
3194 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3195 /* x = 0 + y; x = 0 * y; */
3196 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3197 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3198 /* x = y - y; */
3199 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3200 result = integer_zero_node;
3201 /* x = y * 1; x = 1 * y; */
3202 else if (subcode == MULT_EXPR && integer_onep (arg1))
3203 result = arg0;
3204 else if (subcode == MULT_EXPR && integer_onep (arg0))
3205 result = arg1;
3206 else if (TREE_CODE (arg0) == INTEGER_CST
3207 && TREE_CODE (arg1) == INTEGER_CST)
3209 if (cplx_result)
3210 result = int_const_binop (subcode, fold_convert (type, arg0),
3211 fold_convert (type, arg1));
3212 else
3213 result = int_const_binop (subcode, arg0, arg1);
3214 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3216 if (cplx_result)
3217 overflow = build_one_cst (type);
3218 else
3219 result = NULL_TREE;
3222 if (result)
3224 if (result == integer_zero_node)
3225 result = build_zero_cst (type);
3226 else if (cplx_result && TREE_TYPE (result) != type)
3228 if (TREE_CODE (result) == INTEGER_CST)
3230 if (arith_overflowed_p (PLUS_EXPR, type, result,
3231 integer_zero_node))
3232 overflow = build_one_cst (type);
3234 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3235 && TYPE_UNSIGNED (type))
3236 || (TYPE_PRECISION (type)
3237 < (TYPE_PRECISION (TREE_TYPE (result))
3238 + (TYPE_UNSIGNED (TREE_TYPE (result))
3239 && !TYPE_UNSIGNED (type)))))
3240 result = NULL_TREE;
3241 if (result)
3242 result = fold_convert (type, result);
3247 if (result)
3249 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3250 result = drop_tree_overflow (result);
3251 if (cplx_result)
3253 if (overflow == NULL_TREE)
3254 overflow = build_zero_cst (TREE_TYPE (result));
3255 tree ctype = build_complex_type (TREE_TYPE (result));
3256 if (TREE_CODE (result) == INTEGER_CST
3257 && TREE_CODE (overflow) == INTEGER_CST)
3258 result = build_complex (ctype, result, overflow);
3259 else
3260 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3261 ctype, result, overflow);
3263 if (!update_call_from_tree (gsi, result))
3264 gimplify_and_update_call_from_tree (gsi, result);
3265 changed = true;
3269 return changed;
3273 /* Return true whether NAME has a use on STMT. */
3275 static bool
3276 has_use_on_stmt (tree name, gimple *stmt)
3278 imm_use_iterator iter;
3279 use_operand_p use_p;
3280 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3281 if (USE_STMT (use_p) == stmt)
3282 return true;
3283 return false;
3286 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3287 gimple_simplify.
3289 Replaces *GSI with the simplification result in RCODE and OPS
3290 and the associated statements in *SEQ. Does the replacement
3291 according to INPLACE and returns true if the operation succeeded. */
3293 static bool
3294 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3295 code_helper rcode, tree *ops,
3296 gimple_seq *seq, bool inplace)
3298 gimple *stmt = gsi_stmt (*gsi);
3300 /* Play safe and do not allow abnormals to be mentioned in
3301 newly created statements. See also maybe_push_res_to_seq.
3302 As an exception allow such uses if there was a use of the
3303 same SSA name on the old stmt. */
3304 if ((TREE_CODE (ops[0]) == SSA_NAME
3305 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3306 && !has_use_on_stmt (ops[0], stmt))
3307 || (ops[1]
3308 && TREE_CODE (ops[1]) == SSA_NAME
3309 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3310 && !has_use_on_stmt (ops[1], stmt))
3311 || (ops[2]
3312 && TREE_CODE (ops[2]) == SSA_NAME
3313 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3314 && !has_use_on_stmt (ops[2], stmt))
3315 || (COMPARISON_CLASS_P (ops[0])
3316 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
3317 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
3318 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
3319 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
3320 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
3321 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
3322 return false;
3324 /* Don't insert new statements when INPLACE is true, even if we could
3325 reuse STMT for the final statement. */
3326 if (inplace && !gimple_seq_empty_p (*seq))
3327 return false;
3329 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3331 gcc_assert (rcode.is_tree_code ());
3332 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3333 /* GIMPLE_CONDs condition may not throw. */
3334 && (!flag_exceptions
3335 || !cfun->can_throw_non_call_exceptions
3336 || !operation_could_trap_p (rcode,
3337 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3338 false, NULL_TREE)))
3339 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3340 else if (rcode == SSA_NAME)
3341 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3342 build_zero_cst (TREE_TYPE (ops[0])));
3343 else if (rcode == INTEGER_CST)
3345 if (integer_zerop (ops[0]))
3346 gimple_cond_make_false (cond_stmt);
3347 else
3348 gimple_cond_make_true (cond_stmt);
3350 else if (!inplace)
3352 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3353 ops, seq);
3354 if (!res)
3355 return false;
3356 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3357 build_zero_cst (TREE_TYPE (res)));
3359 else
3360 return false;
3361 if (dump_file && (dump_flags & TDF_DETAILS))
3363 fprintf (dump_file, "gimple_simplified to ");
3364 if (!gimple_seq_empty_p (*seq))
3365 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3366 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3367 0, TDF_SLIM);
3369 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3370 return true;
3372 else if (is_gimple_assign (stmt)
3373 && rcode.is_tree_code ())
3375 if (!inplace
3376 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3378 maybe_build_generic_op (rcode,
3379 TREE_TYPE (gimple_assign_lhs (stmt)),
3380 &ops[0], ops[1], ops[2]);
3381 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3382 if (dump_file && (dump_flags & TDF_DETAILS))
3384 fprintf (dump_file, "gimple_simplified to ");
3385 if (!gimple_seq_empty_p (*seq))
3386 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3387 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3388 0, TDF_SLIM);
3390 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3391 return true;
3394 else if (rcode.is_fn_code ()
3395 && gimple_call_combined_fn (stmt) == rcode)
3397 unsigned i;
3398 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3400 gcc_assert (ops[i] != NULL_TREE);
3401 gimple_call_set_arg (stmt, i, ops[i]);
3403 if (i < 3)
3404 gcc_assert (ops[i] == NULL_TREE);
3405 if (dump_file && (dump_flags & TDF_DETAILS))
3407 fprintf (dump_file, "gimple_simplified to ");
3408 if (!gimple_seq_empty_p (*seq))
3409 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3410 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3412 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3413 return true;
3415 else if (!inplace)
3417 if (gimple_has_lhs (stmt))
3419 tree lhs = gimple_get_lhs (stmt);
3420 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3421 ops, seq, lhs))
3422 return false;
3423 if (dump_file && (dump_flags & TDF_DETAILS))
3425 fprintf (dump_file, "gimple_simplified to ");
3426 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3428 gsi_replace_with_seq_vops (gsi, *seq);
3429 return true;
3431 else
3432 gcc_unreachable ();
3435 return false;
3438 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3440 static bool
3441 maybe_canonicalize_mem_ref_addr (tree *t)
3443 bool res = false;
3445 if (TREE_CODE (*t) == ADDR_EXPR)
3446 t = &TREE_OPERAND (*t, 0);
3448 while (handled_component_p (*t))
3449 t = &TREE_OPERAND (*t, 0);
3451 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3452 of invariant addresses into a SSA name MEM_REF address. */
3453 if (TREE_CODE (*t) == MEM_REF
3454 || TREE_CODE (*t) == TARGET_MEM_REF)
3456 tree addr = TREE_OPERAND (*t, 0);
3457 if (TREE_CODE (addr) == ADDR_EXPR
3458 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3459 || handled_component_p (TREE_OPERAND (addr, 0))))
3461 tree base;
3462 HOST_WIDE_INT coffset;
3463 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3464 &coffset);
3465 if (!base)
3466 gcc_unreachable ();
3468 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3469 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3470 TREE_OPERAND (*t, 1),
3471 size_int (coffset));
3472 res = true;
3474 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3475 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3478 /* Canonicalize back MEM_REFs to plain reference trees if the object
3479 accessed is a decl that has the same access semantics as the MEM_REF. */
3480 if (TREE_CODE (*t) == MEM_REF
3481 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3482 && integer_zerop (TREE_OPERAND (*t, 1))
3483 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3485 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3486 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3487 if (/* Same volatile qualification. */
3488 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3489 /* Same TBAA behavior with -fstrict-aliasing. */
3490 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3491 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3492 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3493 /* Same alignment. */
3494 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3495 /* We have to look out here to not drop a required conversion
3496 from the rhs to the lhs if *t appears on the lhs or vice-versa
3497 if it appears on the rhs. Thus require strict type
3498 compatibility. */
3499 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3501 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3502 res = true;
3506 /* Canonicalize TARGET_MEM_REF in particular with respect to
3507 the indexes becoming constant. */
3508 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3510 tree tem = maybe_fold_tmr (*t);
3511 if (tem)
3513 *t = tem;
3514 res = true;
3518 return res;
3521 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3522 distinguishes both cases. */
3524 static bool
3525 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3527 bool changed = false;
3528 gimple *stmt = gsi_stmt (*gsi);
3529 unsigned i;
3531 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3532 after propagation.
3533 ??? This shouldn't be done in generic folding but in the
3534 propagation helpers which also know whether an address was
3535 propagated.
3536 Also canonicalize operand order. */
3537 switch (gimple_code (stmt))
3539 case GIMPLE_ASSIGN:
3540 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3542 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3543 if ((REFERENCE_CLASS_P (*rhs)
3544 || TREE_CODE (*rhs) == ADDR_EXPR)
3545 && maybe_canonicalize_mem_ref_addr (rhs))
3546 changed = true;
3547 tree *lhs = gimple_assign_lhs_ptr (stmt);
3548 if (REFERENCE_CLASS_P (*lhs)
3549 && maybe_canonicalize_mem_ref_addr (lhs))
3550 changed = true;
3552 else
3554 /* Canonicalize operand order. */
3555 enum tree_code code = gimple_assign_rhs_code (stmt);
3556 if (TREE_CODE_CLASS (code) == tcc_comparison
3557 || commutative_tree_code (code)
3558 || commutative_ternary_tree_code (code))
3560 tree rhs1 = gimple_assign_rhs1 (stmt);
3561 tree rhs2 = gimple_assign_rhs2 (stmt);
3562 if (tree_swap_operands_p (rhs1, rhs2, false))
3564 gimple_assign_set_rhs1 (stmt, rhs2);
3565 gimple_assign_set_rhs2 (stmt, rhs1);
3566 if (TREE_CODE_CLASS (code) == tcc_comparison)
3567 gimple_assign_set_rhs_code (stmt,
3568 swap_tree_comparison (code));
3569 changed = true;
3573 break;
3574 case GIMPLE_CALL:
3576 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3578 tree *arg = gimple_call_arg_ptr (stmt, i);
3579 if (REFERENCE_CLASS_P (*arg)
3580 && maybe_canonicalize_mem_ref_addr (arg))
3581 changed = true;
3583 tree *lhs = gimple_call_lhs_ptr (stmt);
3584 if (*lhs
3585 && REFERENCE_CLASS_P (*lhs)
3586 && maybe_canonicalize_mem_ref_addr (lhs))
3587 changed = true;
3588 break;
3590 case GIMPLE_ASM:
3592 gasm *asm_stmt = as_a <gasm *> (stmt);
3593 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3595 tree link = gimple_asm_output_op (asm_stmt, i);
3596 tree op = TREE_VALUE (link);
3597 if (REFERENCE_CLASS_P (op)
3598 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3599 changed = true;
3601 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3603 tree link = gimple_asm_input_op (asm_stmt, i);
3604 tree op = TREE_VALUE (link);
3605 if ((REFERENCE_CLASS_P (op)
3606 || TREE_CODE (op) == ADDR_EXPR)
3607 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3608 changed = true;
3611 break;
3612 case GIMPLE_DEBUG:
3613 if (gimple_debug_bind_p (stmt))
3615 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3616 if (*val
3617 && (REFERENCE_CLASS_P (*val)
3618 || TREE_CODE (*val) == ADDR_EXPR)
3619 && maybe_canonicalize_mem_ref_addr (val))
3620 changed = true;
3622 break;
3623 case GIMPLE_COND:
3625 /* Canonicalize operand order. */
3626 tree lhs = gimple_cond_lhs (stmt);
3627 tree rhs = gimple_cond_rhs (stmt);
3628 if (tree_swap_operands_p (lhs, rhs, false))
3630 gcond *gc = as_a <gcond *> (stmt);
3631 gimple_cond_set_lhs (gc, rhs);
3632 gimple_cond_set_rhs (gc, lhs);
3633 gimple_cond_set_code (gc,
3634 swap_tree_comparison (gimple_cond_code (gc)));
3635 changed = true;
3638 default:;
3641 /* Dispatch to pattern-based folding. */
3642 if (!inplace
3643 || is_gimple_assign (stmt)
3644 || gimple_code (stmt) == GIMPLE_COND)
3646 gimple_seq seq = NULL;
3647 code_helper rcode;
3648 tree ops[3] = {};
3649 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3650 valueize, valueize))
3652 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3653 changed = true;
3654 else
3655 gimple_seq_discard (seq);
3659 stmt = gsi_stmt (*gsi);
3661 /* Fold the main computation performed by the statement. */
3662 switch (gimple_code (stmt))
3664 case GIMPLE_ASSIGN:
3666 /* Try to canonicalize for boolean-typed X the comparisons
3667 X == 0, X == 1, X != 0, and X != 1. */
3668 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3669 || gimple_assign_rhs_code (stmt) == NE_EXPR)
3671 tree lhs = gimple_assign_lhs (stmt);
3672 tree op1 = gimple_assign_rhs1 (stmt);
3673 tree op2 = gimple_assign_rhs2 (stmt);
3674 tree type = TREE_TYPE (op1);
3676 /* Check whether the comparison operands are of the same boolean
3677 type as the result type is.
3678 Check that second operand is an integer-constant with value
3679 one or zero. */
3680 if (TREE_CODE (op2) == INTEGER_CST
3681 && (integer_zerop (op2) || integer_onep (op2))
3682 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3684 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3685 bool is_logical_not = false;
3687 /* X == 0 and X != 1 is a logical-not.of X
3688 X == 1 and X != 0 is X */
3689 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3690 || (cmp_code == NE_EXPR && integer_onep (op2)))
3691 is_logical_not = true;
3693 if (is_logical_not == false)
3694 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3695 /* Only for one-bit precision typed X the transformation
3696 !X -> ~X is valied. */
3697 else if (TYPE_PRECISION (type) == 1)
3698 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3699 /* Otherwise we use !X -> X ^ 1. */
3700 else
3701 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3702 build_int_cst (type, 1));
3703 changed = true;
3704 break;
3708 unsigned old_num_ops = gimple_num_ops (stmt);
3709 tree lhs = gimple_assign_lhs (stmt);
3710 tree new_rhs = fold_gimple_assign (gsi);
3711 if (new_rhs
3712 && !useless_type_conversion_p (TREE_TYPE (lhs),
3713 TREE_TYPE (new_rhs)))
3714 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3715 if (new_rhs
3716 && (!inplace
3717 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3719 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3720 changed = true;
3722 break;
3725 case GIMPLE_CALL:
3726 changed |= gimple_fold_call (gsi, inplace);
3727 break;
3729 case GIMPLE_ASM:
3730 /* Fold *& in asm operands. */
3732 gasm *asm_stmt = as_a <gasm *> (stmt);
3733 size_t noutputs;
3734 const char **oconstraints;
3735 const char *constraint;
3736 bool allows_mem, allows_reg;
3738 noutputs = gimple_asm_noutputs (asm_stmt);
3739 oconstraints = XALLOCAVEC (const char *, noutputs);
3741 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3743 tree link = gimple_asm_output_op (asm_stmt, i);
3744 tree op = TREE_VALUE (link);
3745 oconstraints[i]
3746 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3747 if (REFERENCE_CLASS_P (op)
3748 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3750 TREE_VALUE (link) = op;
3751 changed = true;
3754 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3756 tree link = gimple_asm_input_op (asm_stmt, i);
3757 tree op = TREE_VALUE (link);
3758 constraint
3759 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3760 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3761 oconstraints, &allows_mem, &allows_reg);
3762 if (REFERENCE_CLASS_P (op)
3763 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3764 != NULL_TREE)
3766 TREE_VALUE (link) = op;
3767 changed = true;
3771 break;
3773 case GIMPLE_DEBUG:
3774 if (gimple_debug_bind_p (stmt))
3776 tree val = gimple_debug_bind_get_value (stmt);
3777 if (val
3778 && REFERENCE_CLASS_P (val))
3780 tree tem = maybe_fold_reference (val, false);
3781 if (tem)
3783 gimple_debug_bind_set_value (stmt, tem);
3784 changed = true;
3787 else if (val
3788 && TREE_CODE (val) == ADDR_EXPR)
3790 tree ref = TREE_OPERAND (val, 0);
3791 tree tem = maybe_fold_reference (ref, false);
3792 if (tem)
3794 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3795 gimple_debug_bind_set_value (stmt, tem);
3796 changed = true;
3800 break;
3802 default:;
3805 stmt = gsi_stmt (*gsi);
3807 /* Fold *& on the lhs. */
3808 if (gimple_has_lhs (stmt))
3810 tree lhs = gimple_get_lhs (stmt);
3811 if (lhs && REFERENCE_CLASS_P (lhs))
3813 tree new_lhs = maybe_fold_reference (lhs, true);
3814 if (new_lhs)
3816 gimple_set_lhs (stmt, new_lhs);
3817 changed = true;
3822 return changed;
3825 /* Valueziation callback that ends up not following SSA edges. */
3827 tree
3828 no_follow_ssa_edges (tree)
3830 return NULL_TREE;
3833 /* Valueization callback that ends up following single-use SSA edges only. */
3835 tree
3836 follow_single_use_edges (tree val)
3838 if (TREE_CODE (val) == SSA_NAME
3839 && !has_single_use (val))
3840 return NULL_TREE;
3841 return val;
3844 /* Fold the statement pointed to by GSI. In some cases, this function may
3845 replace the whole statement with a new one. Returns true iff folding
3846 makes any changes.
3847 The statement pointed to by GSI should be in valid gimple form but may
3848 be in unfolded state as resulting from for example constant propagation
3849 which can produce *&x = 0. */
3851 bool
3852 fold_stmt (gimple_stmt_iterator *gsi)
3854 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3857 bool
3858 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3860 return fold_stmt_1 (gsi, false, valueize);
3863 /* Perform the minimal folding on statement *GSI. Only operations like
3864 *&x created by constant propagation are handled. The statement cannot
3865 be replaced with a new one. Return true if the statement was
3866 changed, false otherwise.
3867 The statement *GSI should be in valid gimple form but may
3868 be in unfolded state as resulting from for example constant propagation
3869 which can produce *&x = 0. */
3871 bool
3872 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3874 gimple *stmt = gsi_stmt (*gsi);
3875 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3876 gcc_assert (gsi_stmt (*gsi) == stmt);
3877 return changed;
3880 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3881 if EXPR is null or we don't know how.
3882 If non-null, the result always has boolean type. */
3884 static tree
3885 canonicalize_bool (tree expr, bool invert)
3887 if (!expr)
3888 return NULL_TREE;
3889 else if (invert)
3891 if (integer_nonzerop (expr))
3892 return boolean_false_node;
3893 else if (integer_zerop (expr))
3894 return boolean_true_node;
3895 else if (TREE_CODE (expr) == SSA_NAME)
3896 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3897 build_int_cst (TREE_TYPE (expr), 0));
3898 else if (COMPARISON_CLASS_P (expr))
3899 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3900 boolean_type_node,
3901 TREE_OPERAND (expr, 0),
3902 TREE_OPERAND (expr, 1));
3903 else
3904 return NULL_TREE;
3906 else
3908 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3909 return expr;
3910 if (integer_nonzerop (expr))
3911 return boolean_true_node;
3912 else if (integer_zerop (expr))
3913 return boolean_false_node;
3914 else if (TREE_CODE (expr) == SSA_NAME)
3915 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3916 build_int_cst (TREE_TYPE (expr), 0));
3917 else if (COMPARISON_CLASS_P (expr))
3918 return fold_build2 (TREE_CODE (expr),
3919 boolean_type_node,
3920 TREE_OPERAND (expr, 0),
3921 TREE_OPERAND (expr, 1));
3922 else
3923 return NULL_TREE;
3927 /* Check to see if a boolean expression EXPR is logically equivalent to the
3928 comparison (OP1 CODE OP2). Check for various identities involving
3929 SSA_NAMEs. */
3931 static bool
3932 same_bool_comparison_p (const_tree expr, enum tree_code code,
3933 const_tree op1, const_tree op2)
3935 gimple *s;
3937 /* The obvious case. */
3938 if (TREE_CODE (expr) == code
3939 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3940 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3941 return true;
3943 /* Check for comparing (name, name != 0) and the case where expr
3944 is an SSA_NAME with a definition matching the comparison. */
3945 if (TREE_CODE (expr) == SSA_NAME
3946 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3948 if (operand_equal_p (expr, op1, 0))
3949 return ((code == NE_EXPR && integer_zerop (op2))
3950 || (code == EQ_EXPR && integer_nonzerop (op2)));
3951 s = SSA_NAME_DEF_STMT (expr);
3952 if (is_gimple_assign (s)
3953 && gimple_assign_rhs_code (s) == code
3954 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3955 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3956 return true;
3959 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3960 of name is a comparison, recurse. */
3961 if (TREE_CODE (op1) == SSA_NAME
3962 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3964 s = SSA_NAME_DEF_STMT (op1);
3965 if (is_gimple_assign (s)
3966 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3968 enum tree_code c = gimple_assign_rhs_code (s);
3969 if ((c == NE_EXPR && integer_zerop (op2))
3970 || (c == EQ_EXPR && integer_nonzerop (op2)))
3971 return same_bool_comparison_p (expr, c,
3972 gimple_assign_rhs1 (s),
3973 gimple_assign_rhs2 (s));
3974 if ((c == EQ_EXPR && integer_zerop (op2))
3975 || (c == NE_EXPR && integer_nonzerop (op2)))
3976 return same_bool_comparison_p (expr,
3977 invert_tree_comparison (c, false),
3978 gimple_assign_rhs1 (s),
3979 gimple_assign_rhs2 (s));
3982 return false;
3985 /* Check to see if two boolean expressions OP1 and OP2 are logically
3986 equivalent. */
3988 static bool
3989 same_bool_result_p (const_tree op1, const_tree op2)
3991 /* Simple cases first. */
3992 if (operand_equal_p (op1, op2, 0))
3993 return true;
3995 /* Check the cases where at least one of the operands is a comparison.
3996 These are a bit smarter than operand_equal_p in that they apply some
3997 identifies on SSA_NAMEs. */
3998 if (COMPARISON_CLASS_P (op2)
3999 && same_bool_comparison_p (op1, TREE_CODE (op2),
4000 TREE_OPERAND (op2, 0),
4001 TREE_OPERAND (op2, 1)))
4002 return true;
4003 if (COMPARISON_CLASS_P (op1)
4004 && same_bool_comparison_p (op2, TREE_CODE (op1),
4005 TREE_OPERAND (op1, 0),
4006 TREE_OPERAND (op1, 1)))
4007 return true;
4009 /* Default case. */
4010 return false;
4013 /* Forward declarations for some mutually recursive functions. */
4015 static tree
4016 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4017 enum tree_code code2, tree op2a, tree op2b);
4018 static tree
4019 and_var_with_comparison (tree var, bool invert,
4020 enum tree_code code2, tree op2a, tree op2b);
4021 static tree
4022 and_var_with_comparison_1 (gimple *stmt,
4023 enum tree_code code2, tree op2a, tree op2b);
4024 static tree
4025 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4026 enum tree_code code2, tree op2a, tree op2b);
4027 static tree
4028 or_var_with_comparison (tree var, bool invert,
4029 enum tree_code code2, tree op2a, tree op2b);
4030 static tree
4031 or_var_with_comparison_1 (gimple *stmt,
4032 enum tree_code code2, tree op2a, tree op2b);
4034 /* Helper function for and_comparisons_1: try to simplify the AND of the
4035 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4036 If INVERT is true, invert the value of the VAR before doing the AND.
4037 Return NULL_EXPR if we can't simplify this to a single expression. */
4039 static tree
4040 and_var_with_comparison (tree var, bool invert,
4041 enum tree_code code2, tree op2a, tree op2b)
4043 tree t;
4044 gimple *stmt = SSA_NAME_DEF_STMT (var);
4046 /* We can only deal with variables whose definitions are assignments. */
4047 if (!is_gimple_assign (stmt))
4048 return NULL_TREE;
4050 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4051 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4052 Then we only have to consider the simpler non-inverted cases. */
4053 if (invert)
4054 t = or_var_with_comparison_1 (stmt,
4055 invert_tree_comparison (code2, false),
4056 op2a, op2b);
4057 else
4058 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4059 return canonicalize_bool (t, invert);
4062 /* Try to simplify the AND of the ssa variable defined by the assignment
4063 STMT with the comparison specified by (OP2A CODE2 OP2B).
4064 Return NULL_EXPR if we can't simplify this to a single expression. */
4066 static tree
4067 and_var_with_comparison_1 (gimple *stmt,
4068 enum tree_code code2, tree op2a, tree op2b)
4070 tree var = gimple_assign_lhs (stmt);
4071 tree true_test_var = NULL_TREE;
4072 tree false_test_var = NULL_TREE;
4073 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4075 /* Check for identities like (var AND (var == 0)) => false. */
4076 if (TREE_CODE (op2a) == SSA_NAME
4077 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4079 if ((code2 == NE_EXPR && integer_zerop (op2b))
4080 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4082 true_test_var = op2a;
4083 if (var == true_test_var)
4084 return var;
4086 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4087 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4089 false_test_var = op2a;
4090 if (var == false_test_var)
4091 return boolean_false_node;
4095 /* If the definition is a comparison, recurse on it. */
4096 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4098 tree t = and_comparisons_1 (innercode,
4099 gimple_assign_rhs1 (stmt),
4100 gimple_assign_rhs2 (stmt),
4101 code2,
4102 op2a,
4103 op2b);
4104 if (t)
4105 return t;
4108 /* If the definition is an AND or OR expression, we may be able to
4109 simplify by reassociating. */
4110 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4111 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4113 tree inner1 = gimple_assign_rhs1 (stmt);
4114 tree inner2 = gimple_assign_rhs2 (stmt);
4115 gimple *s;
4116 tree t;
4117 tree partial = NULL_TREE;
4118 bool is_and = (innercode == BIT_AND_EXPR);
4120 /* Check for boolean identities that don't require recursive examination
4121 of inner1/inner2:
4122 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4123 inner1 AND (inner1 OR inner2) => inner1
4124 !inner1 AND (inner1 AND inner2) => false
4125 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4126 Likewise for similar cases involving inner2. */
4127 if (inner1 == true_test_var)
4128 return (is_and ? var : inner1);
4129 else if (inner2 == true_test_var)
4130 return (is_and ? var : inner2);
4131 else if (inner1 == false_test_var)
4132 return (is_and
4133 ? boolean_false_node
4134 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4135 else if (inner2 == false_test_var)
4136 return (is_and
4137 ? boolean_false_node
4138 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4140 /* Next, redistribute/reassociate the AND across the inner tests.
4141 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4142 if (TREE_CODE (inner1) == SSA_NAME
4143 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4144 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4145 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4146 gimple_assign_rhs1 (s),
4147 gimple_assign_rhs2 (s),
4148 code2, op2a, op2b)))
4150 /* Handle the AND case, where we are reassociating:
4151 (inner1 AND inner2) AND (op2a code2 op2b)
4152 => (t AND inner2)
4153 If the partial result t is a constant, we win. Otherwise
4154 continue on to try reassociating with the other inner test. */
4155 if (is_and)
4157 if (integer_onep (t))
4158 return inner2;
4159 else if (integer_zerop (t))
4160 return boolean_false_node;
4163 /* Handle the OR case, where we are redistributing:
4164 (inner1 OR inner2) AND (op2a code2 op2b)
4165 => (t OR (inner2 AND (op2a code2 op2b))) */
4166 else if (integer_onep (t))
4167 return boolean_true_node;
4169 /* Save partial result for later. */
4170 partial = t;
4173 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4174 if (TREE_CODE (inner2) == SSA_NAME
4175 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4176 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4177 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4178 gimple_assign_rhs1 (s),
4179 gimple_assign_rhs2 (s),
4180 code2, op2a, op2b)))
4182 /* Handle the AND case, where we are reassociating:
4183 (inner1 AND inner2) AND (op2a code2 op2b)
4184 => (inner1 AND t) */
4185 if (is_and)
4187 if (integer_onep (t))
4188 return inner1;
4189 else if (integer_zerop (t))
4190 return boolean_false_node;
4191 /* If both are the same, we can apply the identity
4192 (x AND x) == x. */
4193 else if (partial && same_bool_result_p (t, partial))
4194 return t;
4197 /* Handle the OR case. where we are redistributing:
4198 (inner1 OR inner2) AND (op2a code2 op2b)
4199 => (t OR (inner1 AND (op2a code2 op2b)))
4200 => (t OR partial) */
4201 else
4203 if (integer_onep (t))
4204 return boolean_true_node;
4205 else if (partial)
4207 /* We already got a simplification for the other
4208 operand to the redistributed OR expression. The
4209 interesting case is when at least one is false.
4210 Or, if both are the same, we can apply the identity
4211 (x OR x) == x. */
4212 if (integer_zerop (partial))
4213 return t;
4214 else if (integer_zerop (t))
4215 return partial;
4216 else if (same_bool_result_p (t, partial))
4217 return t;
4222 return NULL_TREE;
4225 /* Try to simplify the AND of two comparisons defined by
4226 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4227 If this can be done without constructing an intermediate value,
4228 return the resulting tree; otherwise NULL_TREE is returned.
4229 This function is deliberately asymmetric as it recurses on SSA_DEFs
4230 in the first comparison but not the second. */
4232 static tree
4233 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4234 enum tree_code code2, tree op2a, tree op2b)
4236 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4238 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4239 if (operand_equal_p (op1a, op2a, 0)
4240 && operand_equal_p (op1b, op2b, 0))
4242 /* Result will be either NULL_TREE, or a combined comparison. */
4243 tree t = combine_comparisons (UNKNOWN_LOCATION,
4244 TRUTH_ANDIF_EXPR, code1, code2,
4245 truth_type, op1a, op1b);
4246 if (t)
4247 return t;
4250 /* Likewise the swapped case of the above. */
4251 if (operand_equal_p (op1a, op2b, 0)
4252 && operand_equal_p (op1b, op2a, 0))
4254 /* Result will be either NULL_TREE, or a combined comparison. */
4255 tree t = combine_comparisons (UNKNOWN_LOCATION,
4256 TRUTH_ANDIF_EXPR, code1,
4257 swap_tree_comparison (code2),
4258 truth_type, op1a, op1b);
4259 if (t)
4260 return t;
4263 /* If both comparisons are of the same value against constants, we might
4264 be able to merge them. */
4265 if (operand_equal_p (op1a, op2a, 0)
4266 && TREE_CODE (op1b) == INTEGER_CST
4267 && TREE_CODE (op2b) == INTEGER_CST)
4269 int cmp = tree_int_cst_compare (op1b, op2b);
4271 /* If we have (op1a == op1b), we should either be able to
4272 return that or FALSE, depending on whether the constant op1b
4273 also satisfies the other comparison against op2b. */
4274 if (code1 == EQ_EXPR)
4276 bool done = true;
4277 bool val;
4278 switch (code2)
4280 case EQ_EXPR: val = (cmp == 0); break;
4281 case NE_EXPR: val = (cmp != 0); break;
4282 case LT_EXPR: val = (cmp < 0); break;
4283 case GT_EXPR: val = (cmp > 0); break;
4284 case LE_EXPR: val = (cmp <= 0); break;
4285 case GE_EXPR: val = (cmp >= 0); break;
4286 default: done = false;
4288 if (done)
4290 if (val)
4291 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4292 else
4293 return boolean_false_node;
4296 /* Likewise if the second comparison is an == comparison. */
4297 else if (code2 == EQ_EXPR)
4299 bool done = true;
4300 bool val;
4301 switch (code1)
4303 case EQ_EXPR: val = (cmp == 0); break;
4304 case NE_EXPR: val = (cmp != 0); break;
4305 case LT_EXPR: val = (cmp > 0); break;
4306 case GT_EXPR: val = (cmp < 0); break;
4307 case LE_EXPR: val = (cmp >= 0); break;
4308 case GE_EXPR: val = (cmp <= 0); break;
4309 default: done = false;
4311 if (done)
4313 if (val)
4314 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4315 else
4316 return boolean_false_node;
4320 /* Same business with inequality tests. */
4321 else if (code1 == NE_EXPR)
4323 bool val;
4324 switch (code2)
4326 case EQ_EXPR: val = (cmp != 0); break;
4327 case NE_EXPR: val = (cmp == 0); break;
4328 case LT_EXPR: val = (cmp >= 0); break;
4329 case GT_EXPR: val = (cmp <= 0); break;
4330 case LE_EXPR: val = (cmp > 0); break;
4331 case GE_EXPR: val = (cmp < 0); break;
4332 default:
4333 val = false;
4335 if (val)
4336 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4338 else if (code2 == NE_EXPR)
4340 bool val;
4341 switch (code1)
4343 case EQ_EXPR: val = (cmp == 0); break;
4344 case NE_EXPR: val = (cmp != 0); break;
4345 case LT_EXPR: val = (cmp <= 0); break;
4346 case GT_EXPR: val = (cmp >= 0); break;
4347 case LE_EXPR: val = (cmp < 0); break;
4348 case GE_EXPR: val = (cmp > 0); break;
4349 default:
4350 val = false;
4352 if (val)
4353 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4356 /* Chose the more restrictive of two < or <= comparisons. */
4357 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4358 && (code2 == LT_EXPR || code2 == LE_EXPR))
4360 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4361 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4362 else
4363 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4366 /* Likewise chose the more restrictive of two > or >= comparisons. */
4367 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4368 && (code2 == GT_EXPR || code2 == GE_EXPR))
4370 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4371 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4372 else
4373 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4376 /* Check for singleton ranges. */
4377 else if (cmp == 0
4378 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4379 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4380 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4382 /* Check for disjoint ranges. */
4383 else if (cmp <= 0
4384 && (code1 == LT_EXPR || code1 == LE_EXPR)
4385 && (code2 == GT_EXPR || code2 == GE_EXPR))
4386 return boolean_false_node;
4387 else if (cmp >= 0
4388 && (code1 == GT_EXPR || code1 == GE_EXPR)
4389 && (code2 == LT_EXPR || code2 == LE_EXPR))
4390 return boolean_false_node;
4393 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4394 NAME's definition is a truth value. See if there are any simplifications
4395 that can be done against the NAME's definition. */
4396 if (TREE_CODE (op1a) == SSA_NAME
4397 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4398 && (integer_zerop (op1b) || integer_onep (op1b)))
4400 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4401 || (code1 == NE_EXPR && integer_onep (op1b)));
4402 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4403 switch (gimple_code (stmt))
4405 case GIMPLE_ASSIGN:
4406 /* Try to simplify by copy-propagating the definition. */
4407 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4409 case GIMPLE_PHI:
4410 /* If every argument to the PHI produces the same result when
4411 ANDed with the second comparison, we win.
4412 Do not do this unless the type is bool since we need a bool
4413 result here anyway. */
4414 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4416 tree result = NULL_TREE;
4417 unsigned i;
4418 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4420 tree arg = gimple_phi_arg_def (stmt, i);
4422 /* If this PHI has itself as an argument, ignore it.
4423 If all the other args produce the same result,
4424 we're still OK. */
4425 if (arg == gimple_phi_result (stmt))
4426 continue;
4427 else if (TREE_CODE (arg) == INTEGER_CST)
4429 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4431 if (!result)
4432 result = boolean_false_node;
4433 else if (!integer_zerop (result))
4434 return NULL_TREE;
4436 else if (!result)
4437 result = fold_build2 (code2, boolean_type_node,
4438 op2a, op2b);
4439 else if (!same_bool_comparison_p (result,
4440 code2, op2a, op2b))
4441 return NULL_TREE;
4443 else if (TREE_CODE (arg) == SSA_NAME
4444 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4446 tree temp;
4447 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4448 /* In simple cases we can look through PHI nodes,
4449 but we have to be careful with loops.
4450 See PR49073. */
4451 if (! dom_info_available_p (CDI_DOMINATORS)
4452 || gimple_bb (def_stmt) == gimple_bb (stmt)
4453 || dominated_by_p (CDI_DOMINATORS,
4454 gimple_bb (def_stmt),
4455 gimple_bb (stmt)))
4456 return NULL_TREE;
4457 temp = and_var_with_comparison (arg, invert, code2,
4458 op2a, op2b);
4459 if (!temp)
4460 return NULL_TREE;
4461 else if (!result)
4462 result = temp;
4463 else if (!same_bool_result_p (result, temp))
4464 return NULL_TREE;
4466 else
4467 return NULL_TREE;
4469 return result;
4472 default:
4473 break;
4476 return NULL_TREE;
4479 /* Try to simplify the AND of two comparisons, specified by
4480 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4481 If this can be simplified to a single expression (without requiring
4482 introducing more SSA variables to hold intermediate values),
4483 return the resulting tree. Otherwise return NULL_TREE.
4484 If the result expression is non-null, it has boolean type. */
4486 tree
4487 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4488 enum tree_code code2, tree op2a, tree op2b)
4490 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4491 if (t)
4492 return t;
4493 else
4494 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4497 /* Helper function for or_comparisons_1: try to simplify the OR of the
4498 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4499 If INVERT is true, invert the value of VAR before doing the OR.
4500 Return NULL_EXPR if we can't simplify this to a single expression. */
4502 static tree
4503 or_var_with_comparison (tree var, bool invert,
4504 enum tree_code code2, tree op2a, tree op2b)
4506 tree t;
4507 gimple *stmt = SSA_NAME_DEF_STMT (var);
4509 /* We can only deal with variables whose definitions are assignments. */
4510 if (!is_gimple_assign (stmt))
4511 return NULL_TREE;
4513 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4514 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4515 Then we only have to consider the simpler non-inverted cases. */
4516 if (invert)
4517 t = and_var_with_comparison_1 (stmt,
4518 invert_tree_comparison (code2, false),
4519 op2a, op2b);
4520 else
4521 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4522 return canonicalize_bool (t, invert);
4525 /* Try to simplify the OR of the ssa variable defined by the assignment
4526 STMT with the comparison specified by (OP2A CODE2 OP2B).
4527 Return NULL_EXPR if we can't simplify this to a single expression. */
4529 static tree
4530 or_var_with_comparison_1 (gimple *stmt,
4531 enum tree_code code2, tree op2a, tree op2b)
4533 tree var = gimple_assign_lhs (stmt);
4534 tree true_test_var = NULL_TREE;
4535 tree false_test_var = NULL_TREE;
4536 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4538 /* Check for identities like (var OR (var != 0)) => true . */
4539 if (TREE_CODE (op2a) == SSA_NAME
4540 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4542 if ((code2 == NE_EXPR && integer_zerop (op2b))
4543 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4545 true_test_var = op2a;
4546 if (var == true_test_var)
4547 return var;
4549 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4550 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4552 false_test_var = op2a;
4553 if (var == false_test_var)
4554 return boolean_true_node;
4558 /* If the definition is a comparison, recurse on it. */
4559 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4561 tree t = or_comparisons_1 (innercode,
4562 gimple_assign_rhs1 (stmt),
4563 gimple_assign_rhs2 (stmt),
4564 code2,
4565 op2a,
4566 op2b);
4567 if (t)
4568 return t;
4571 /* If the definition is an AND or OR expression, we may be able to
4572 simplify by reassociating. */
4573 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4574 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4576 tree inner1 = gimple_assign_rhs1 (stmt);
4577 tree inner2 = gimple_assign_rhs2 (stmt);
4578 gimple *s;
4579 tree t;
4580 tree partial = NULL_TREE;
4581 bool is_or = (innercode == BIT_IOR_EXPR);
4583 /* Check for boolean identities that don't require recursive examination
4584 of inner1/inner2:
4585 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4586 inner1 OR (inner1 AND inner2) => inner1
4587 !inner1 OR (inner1 OR inner2) => true
4588 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4590 if (inner1 == true_test_var)
4591 return (is_or ? var : inner1);
4592 else if (inner2 == true_test_var)
4593 return (is_or ? var : inner2);
4594 else if (inner1 == false_test_var)
4595 return (is_or
4596 ? boolean_true_node
4597 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4598 else if (inner2 == false_test_var)
4599 return (is_or
4600 ? boolean_true_node
4601 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4603 /* Next, redistribute/reassociate the OR across the inner tests.
4604 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4605 if (TREE_CODE (inner1) == SSA_NAME
4606 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4607 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4608 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4609 gimple_assign_rhs1 (s),
4610 gimple_assign_rhs2 (s),
4611 code2, op2a, op2b)))
4613 /* Handle the OR case, where we are reassociating:
4614 (inner1 OR inner2) OR (op2a code2 op2b)
4615 => (t OR inner2)
4616 If the partial result t is a constant, we win. Otherwise
4617 continue on to try reassociating with the other inner test. */
4618 if (is_or)
4620 if (integer_onep (t))
4621 return boolean_true_node;
4622 else if (integer_zerop (t))
4623 return inner2;
4626 /* Handle the AND case, where we are redistributing:
4627 (inner1 AND inner2) OR (op2a code2 op2b)
4628 => (t AND (inner2 OR (op2a code op2b))) */
4629 else if (integer_zerop (t))
4630 return boolean_false_node;
4632 /* Save partial result for later. */
4633 partial = t;
4636 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4637 if (TREE_CODE (inner2) == SSA_NAME
4638 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4639 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4640 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4641 gimple_assign_rhs1 (s),
4642 gimple_assign_rhs2 (s),
4643 code2, op2a, op2b)))
4645 /* Handle the OR case, where we are reassociating:
4646 (inner1 OR inner2) OR (op2a code2 op2b)
4647 => (inner1 OR t)
4648 => (t OR partial) */
4649 if (is_or)
4651 if (integer_zerop (t))
4652 return inner1;
4653 else if (integer_onep (t))
4654 return boolean_true_node;
4655 /* If both are the same, we can apply the identity
4656 (x OR x) == x. */
4657 else if (partial && same_bool_result_p (t, partial))
4658 return t;
4661 /* Handle the AND case, where we are redistributing:
4662 (inner1 AND inner2) OR (op2a code2 op2b)
4663 => (t AND (inner1 OR (op2a code2 op2b)))
4664 => (t AND partial) */
4665 else
4667 if (integer_zerop (t))
4668 return boolean_false_node;
4669 else if (partial)
4671 /* We already got a simplification for the other
4672 operand to the redistributed AND expression. The
4673 interesting case is when at least one is true.
4674 Or, if both are the same, we can apply the identity
4675 (x AND x) == x. */
4676 if (integer_onep (partial))
4677 return t;
4678 else if (integer_onep (t))
4679 return partial;
4680 else if (same_bool_result_p (t, partial))
4681 return t;
4686 return NULL_TREE;
4689 /* Try to simplify the OR of two comparisons defined by
4690 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4691 If this can be done without constructing an intermediate value,
4692 return the resulting tree; otherwise NULL_TREE is returned.
4693 This function is deliberately asymmetric as it recurses on SSA_DEFs
4694 in the first comparison but not the second. */
4696 static tree
4697 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4698 enum tree_code code2, tree op2a, tree op2b)
4700 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4702 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4703 if (operand_equal_p (op1a, op2a, 0)
4704 && operand_equal_p (op1b, op2b, 0))
4706 /* Result will be either NULL_TREE, or a combined comparison. */
4707 tree t = combine_comparisons (UNKNOWN_LOCATION,
4708 TRUTH_ORIF_EXPR, code1, code2,
4709 truth_type, op1a, op1b);
4710 if (t)
4711 return t;
4714 /* Likewise the swapped case of the above. */
4715 if (operand_equal_p (op1a, op2b, 0)
4716 && operand_equal_p (op1b, op2a, 0))
4718 /* Result will be either NULL_TREE, or a combined comparison. */
4719 tree t = combine_comparisons (UNKNOWN_LOCATION,
4720 TRUTH_ORIF_EXPR, code1,
4721 swap_tree_comparison (code2),
4722 truth_type, op1a, op1b);
4723 if (t)
4724 return t;
4727 /* If both comparisons are of the same value against constants, we might
4728 be able to merge them. */
4729 if (operand_equal_p (op1a, op2a, 0)
4730 && TREE_CODE (op1b) == INTEGER_CST
4731 && TREE_CODE (op2b) == INTEGER_CST)
4733 int cmp = tree_int_cst_compare (op1b, op2b);
4735 /* If we have (op1a != op1b), we should either be able to
4736 return that or TRUE, depending on whether the constant op1b
4737 also satisfies the other comparison against op2b. */
4738 if (code1 == NE_EXPR)
4740 bool done = true;
4741 bool val;
4742 switch (code2)
4744 case EQ_EXPR: val = (cmp == 0); break;
4745 case NE_EXPR: val = (cmp != 0); break;
4746 case LT_EXPR: val = (cmp < 0); break;
4747 case GT_EXPR: val = (cmp > 0); break;
4748 case LE_EXPR: val = (cmp <= 0); break;
4749 case GE_EXPR: val = (cmp >= 0); break;
4750 default: done = false;
4752 if (done)
4754 if (val)
4755 return boolean_true_node;
4756 else
4757 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4760 /* Likewise if the second comparison is a != comparison. */
4761 else if (code2 == NE_EXPR)
4763 bool done = true;
4764 bool val;
4765 switch (code1)
4767 case EQ_EXPR: val = (cmp == 0); break;
4768 case NE_EXPR: val = (cmp != 0); break;
4769 case LT_EXPR: val = (cmp > 0); break;
4770 case GT_EXPR: val = (cmp < 0); break;
4771 case LE_EXPR: val = (cmp >= 0); break;
4772 case GE_EXPR: val = (cmp <= 0); break;
4773 default: done = false;
4775 if (done)
4777 if (val)
4778 return boolean_true_node;
4779 else
4780 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4784 /* See if an equality test is redundant with the other comparison. */
4785 else if (code1 == EQ_EXPR)
4787 bool val;
4788 switch (code2)
4790 case EQ_EXPR: val = (cmp == 0); break;
4791 case NE_EXPR: val = (cmp != 0); break;
4792 case LT_EXPR: val = (cmp < 0); break;
4793 case GT_EXPR: val = (cmp > 0); break;
4794 case LE_EXPR: val = (cmp <= 0); break;
4795 case GE_EXPR: val = (cmp >= 0); break;
4796 default:
4797 val = false;
4799 if (val)
4800 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4802 else if (code2 == EQ_EXPR)
4804 bool val;
4805 switch (code1)
4807 case EQ_EXPR: val = (cmp == 0); break;
4808 case NE_EXPR: val = (cmp != 0); break;
4809 case LT_EXPR: val = (cmp > 0); break;
4810 case GT_EXPR: val = (cmp < 0); break;
4811 case LE_EXPR: val = (cmp >= 0); break;
4812 case GE_EXPR: val = (cmp <= 0); break;
4813 default:
4814 val = false;
4816 if (val)
4817 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4820 /* Chose the less restrictive of two < or <= comparisons. */
4821 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4822 && (code2 == LT_EXPR || code2 == LE_EXPR))
4824 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4825 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4826 else
4827 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4830 /* Likewise chose the less restrictive of two > or >= comparisons. */
4831 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4832 && (code2 == GT_EXPR || code2 == GE_EXPR))
4834 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4835 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4836 else
4837 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4840 /* Check for singleton ranges. */
4841 else if (cmp == 0
4842 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4843 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4844 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4846 /* Check for less/greater pairs that don't restrict the range at all. */
4847 else if (cmp >= 0
4848 && (code1 == LT_EXPR || code1 == LE_EXPR)
4849 && (code2 == GT_EXPR || code2 == GE_EXPR))
4850 return boolean_true_node;
4851 else if (cmp <= 0
4852 && (code1 == GT_EXPR || code1 == GE_EXPR)
4853 && (code2 == LT_EXPR || code2 == LE_EXPR))
4854 return boolean_true_node;
4857 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4858 NAME's definition is a truth value. See if there are any simplifications
4859 that can be done against the NAME's definition. */
4860 if (TREE_CODE (op1a) == SSA_NAME
4861 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4862 && (integer_zerop (op1b) || integer_onep (op1b)))
4864 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4865 || (code1 == NE_EXPR && integer_onep (op1b)));
4866 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4867 switch (gimple_code (stmt))
4869 case GIMPLE_ASSIGN:
4870 /* Try to simplify by copy-propagating the definition. */
4871 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4873 case GIMPLE_PHI:
4874 /* If every argument to the PHI produces the same result when
4875 ORed with the second comparison, we win.
4876 Do not do this unless the type is bool since we need a bool
4877 result here anyway. */
4878 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4880 tree result = NULL_TREE;
4881 unsigned i;
4882 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4884 tree arg = gimple_phi_arg_def (stmt, i);
4886 /* If this PHI has itself as an argument, ignore it.
4887 If all the other args produce the same result,
4888 we're still OK. */
4889 if (arg == gimple_phi_result (stmt))
4890 continue;
4891 else if (TREE_CODE (arg) == INTEGER_CST)
4893 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4895 if (!result)
4896 result = boolean_true_node;
4897 else if (!integer_onep (result))
4898 return NULL_TREE;
4900 else if (!result)
4901 result = fold_build2 (code2, boolean_type_node,
4902 op2a, op2b);
4903 else if (!same_bool_comparison_p (result,
4904 code2, op2a, op2b))
4905 return NULL_TREE;
4907 else if (TREE_CODE (arg) == SSA_NAME
4908 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4910 tree temp;
4911 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4912 /* In simple cases we can look through PHI nodes,
4913 but we have to be careful with loops.
4914 See PR49073. */
4915 if (! dom_info_available_p (CDI_DOMINATORS)
4916 || gimple_bb (def_stmt) == gimple_bb (stmt)
4917 || dominated_by_p (CDI_DOMINATORS,
4918 gimple_bb (def_stmt),
4919 gimple_bb (stmt)))
4920 return NULL_TREE;
4921 temp = or_var_with_comparison (arg, invert, code2,
4922 op2a, op2b);
4923 if (!temp)
4924 return NULL_TREE;
4925 else if (!result)
4926 result = temp;
4927 else if (!same_bool_result_p (result, temp))
4928 return NULL_TREE;
4930 else
4931 return NULL_TREE;
4933 return result;
4936 default:
4937 break;
4940 return NULL_TREE;
4943 /* Try to simplify the OR of two comparisons, specified by
4944 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4945 If this can be simplified to a single expression (without requiring
4946 introducing more SSA variables to hold intermediate values),
4947 return the resulting tree. Otherwise return NULL_TREE.
4948 If the result expression is non-null, it has boolean type. */
4950 tree
4951 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4952 enum tree_code code2, tree op2a, tree op2b)
4954 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4955 if (t)
4956 return t;
4957 else
4958 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4962 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4964 Either NULL_TREE, a simplified but non-constant or a constant
4965 is returned.
4967 ??? This should go into a gimple-fold-inline.h file to be eventually
4968 privatized with the single valueize function used in the various TUs
4969 to avoid the indirect function call overhead. */
4971 tree
4972 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
4973 tree (*gvalueize) (tree))
4975 code_helper rcode;
4976 tree ops[3] = {};
4977 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4978 edges if there are intermediate VARYING defs. For this reason
4979 do not follow SSA edges here even though SCCVN can technically
4980 just deal fine with that. */
4981 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
4983 tree res = NULL_TREE;
4984 if (gimple_simplified_result_is_gimple_val (rcode, ops))
4985 res = ops[0];
4986 else if (mprts_hook)
4987 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
4988 if (res)
4990 if (dump_file && dump_flags & TDF_DETAILS)
4992 fprintf (dump_file, "Match-and-simplified ");
4993 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4994 fprintf (dump_file, " to ");
4995 print_generic_expr (dump_file, res, 0);
4996 fprintf (dump_file, "\n");
4998 return res;
5002 location_t loc = gimple_location (stmt);
5003 switch (gimple_code (stmt))
5005 case GIMPLE_ASSIGN:
5007 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5009 switch (get_gimple_rhs_class (subcode))
5011 case GIMPLE_SINGLE_RHS:
5013 tree rhs = gimple_assign_rhs1 (stmt);
5014 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5016 if (TREE_CODE (rhs) == SSA_NAME)
5018 /* If the RHS is an SSA_NAME, return its known constant value,
5019 if any. */
5020 return (*valueize) (rhs);
5022 /* Handle propagating invariant addresses into address
5023 operations. */
5024 else if (TREE_CODE (rhs) == ADDR_EXPR
5025 && !is_gimple_min_invariant (rhs))
5027 HOST_WIDE_INT offset = 0;
5028 tree base;
5029 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5030 &offset,
5031 valueize);
5032 if (base
5033 && (CONSTANT_CLASS_P (base)
5034 || decl_address_invariant_p (base)))
5035 return build_invariant_address (TREE_TYPE (rhs),
5036 base, offset);
5038 else if (TREE_CODE (rhs) == CONSTRUCTOR
5039 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5040 && (CONSTRUCTOR_NELTS (rhs)
5041 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5043 unsigned i;
5044 tree val, *vec;
5046 vec = XALLOCAVEC (tree,
5047 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5048 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5050 val = (*valueize) (val);
5051 if (TREE_CODE (val) == INTEGER_CST
5052 || TREE_CODE (val) == REAL_CST
5053 || TREE_CODE (val) == FIXED_CST)
5054 vec[i] = val;
5055 else
5056 return NULL_TREE;
5059 return build_vector (TREE_TYPE (rhs), vec);
5061 if (subcode == OBJ_TYPE_REF)
5063 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5064 /* If callee is constant, we can fold away the wrapper. */
5065 if (is_gimple_min_invariant (val))
5066 return val;
5069 if (kind == tcc_reference)
5071 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5072 || TREE_CODE (rhs) == REALPART_EXPR
5073 || TREE_CODE (rhs) == IMAGPART_EXPR)
5074 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5076 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5077 return fold_unary_loc (EXPR_LOCATION (rhs),
5078 TREE_CODE (rhs),
5079 TREE_TYPE (rhs), val);
5081 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5082 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5084 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5085 return fold_ternary_loc (EXPR_LOCATION (rhs),
5086 TREE_CODE (rhs),
5087 TREE_TYPE (rhs), val,
5088 TREE_OPERAND (rhs, 1),
5089 TREE_OPERAND (rhs, 2));
5091 else if (TREE_CODE (rhs) == MEM_REF
5092 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5094 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5095 if (TREE_CODE (val) == ADDR_EXPR
5096 && is_gimple_min_invariant (val))
5098 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5099 unshare_expr (val),
5100 TREE_OPERAND (rhs, 1));
5101 if (tem)
5102 rhs = tem;
5105 return fold_const_aggregate_ref_1 (rhs, valueize);
5107 else if (kind == tcc_declaration)
5108 return get_symbol_constant_value (rhs);
5109 return rhs;
5112 case GIMPLE_UNARY_RHS:
5113 return NULL_TREE;
5115 case GIMPLE_BINARY_RHS:
5116 /* Translate &x + CST into an invariant form suitable for
5117 further propagation. */
5118 if (subcode == POINTER_PLUS_EXPR)
5120 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5121 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5122 if (TREE_CODE (op0) == ADDR_EXPR
5123 && TREE_CODE (op1) == INTEGER_CST)
5125 tree off = fold_convert (ptr_type_node, op1);
5126 return build_fold_addr_expr_loc
5127 (loc,
5128 fold_build2 (MEM_REF,
5129 TREE_TYPE (TREE_TYPE (op0)),
5130 unshare_expr (op0), off));
5133 /* Canonicalize bool != 0 and bool == 0 appearing after
5134 valueization. While gimple_simplify handles this
5135 it can get confused by the ~X == 1 -> X == 0 transform
5136 which we cant reduce to a SSA name or a constant
5137 (and we have no way to tell gimple_simplify to not
5138 consider those transforms in the first place). */
5139 else if (subcode == EQ_EXPR
5140 || subcode == NE_EXPR)
5142 tree lhs = gimple_assign_lhs (stmt);
5143 tree op0 = gimple_assign_rhs1 (stmt);
5144 if (useless_type_conversion_p (TREE_TYPE (lhs),
5145 TREE_TYPE (op0)))
5147 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5148 op0 = (*valueize) (op0);
5149 if (TREE_CODE (op0) == INTEGER_CST)
5150 std::swap (op0, op1);
5151 if (TREE_CODE (op1) == INTEGER_CST
5152 && ((subcode == NE_EXPR && integer_zerop (op1))
5153 || (subcode == EQ_EXPR && integer_onep (op1))))
5154 return op0;
5157 return NULL_TREE;
5159 case GIMPLE_TERNARY_RHS:
5161 /* Handle ternary operators that can appear in GIMPLE form. */
5162 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5163 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5164 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5165 return fold_ternary_loc (loc, subcode,
5166 gimple_expr_type (stmt), op0, op1, op2);
5169 default:
5170 gcc_unreachable ();
5174 case GIMPLE_CALL:
5176 tree fn;
5177 gcall *call_stmt = as_a <gcall *> (stmt);
5179 if (gimple_call_internal_p (stmt))
5181 enum tree_code subcode = ERROR_MARK;
5182 switch (gimple_call_internal_fn (stmt))
5184 case IFN_UBSAN_CHECK_ADD:
5185 subcode = PLUS_EXPR;
5186 break;
5187 case IFN_UBSAN_CHECK_SUB:
5188 subcode = MINUS_EXPR;
5189 break;
5190 case IFN_UBSAN_CHECK_MUL:
5191 subcode = MULT_EXPR;
5192 break;
5193 default:
5194 return NULL_TREE;
5196 tree arg0 = gimple_call_arg (stmt, 0);
5197 tree arg1 = gimple_call_arg (stmt, 1);
5198 tree op0 = (*valueize) (arg0);
5199 tree op1 = (*valueize) (arg1);
5201 if (TREE_CODE (op0) != INTEGER_CST
5202 || TREE_CODE (op1) != INTEGER_CST)
5204 switch (subcode)
5206 case MULT_EXPR:
5207 /* x * 0 = 0 * x = 0 without overflow. */
5208 if (integer_zerop (op0) || integer_zerop (op1))
5209 return build_zero_cst (TREE_TYPE (arg0));
5210 break;
5211 case MINUS_EXPR:
5212 /* y - y = 0 without overflow. */
5213 if (operand_equal_p (op0, op1, 0))
5214 return build_zero_cst (TREE_TYPE (arg0));
5215 break;
5216 default:
5217 break;
5220 tree res
5221 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5222 if (res
5223 && TREE_CODE (res) == INTEGER_CST
5224 && !TREE_OVERFLOW (res))
5225 return res;
5226 return NULL_TREE;
5229 fn = (*valueize) (gimple_call_fn (stmt));
5230 if (TREE_CODE (fn) == ADDR_EXPR
5231 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5232 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5233 && gimple_builtin_call_types_compatible_p (stmt,
5234 TREE_OPERAND (fn, 0)))
5236 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5237 tree retval;
5238 unsigned i;
5239 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5240 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5241 retval = fold_builtin_call_array (loc,
5242 gimple_call_return_type (call_stmt),
5243 fn, gimple_call_num_args (stmt), args);
5244 if (retval)
5246 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5247 STRIP_NOPS (retval);
5248 retval = fold_convert (gimple_call_return_type (call_stmt),
5249 retval);
5251 return retval;
5253 return NULL_TREE;
5256 default:
5257 return NULL_TREE;
5261 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5262 Returns NULL_TREE if folding to a constant is not possible, otherwise
5263 returns a constant according to is_gimple_min_invariant. */
5265 tree
5266 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5268 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5269 if (res && is_gimple_min_invariant (res))
5270 return res;
5271 return NULL_TREE;
5275 /* The following set of functions are supposed to fold references using
5276 their constant initializers. */
5278 /* See if we can find constructor defining value of BASE.
5279 When we know the consructor with constant offset (such as
5280 base is array[40] and we do know constructor of array), then
5281 BIT_OFFSET is adjusted accordingly.
5283 As a special case, return error_mark_node when constructor
5284 is not explicitly available, but it is known to be zero
5285 such as 'static const int a;'. */
5286 static tree
5287 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5288 tree (*valueize)(tree))
5290 HOST_WIDE_INT bit_offset2, size, max_size;
5291 bool reverse;
5293 if (TREE_CODE (base) == MEM_REF)
5295 if (!integer_zerop (TREE_OPERAND (base, 1)))
5297 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5298 return NULL_TREE;
5299 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5300 * BITS_PER_UNIT);
5303 if (valueize
5304 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5305 base = valueize (TREE_OPERAND (base, 0));
5306 if (!base || TREE_CODE (base) != ADDR_EXPR)
5307 return NULL_TREE;
5308 base = TREE_OPERAND (base, 0);
5311 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5312 DECL_INITIAL. If BASE is a nested reference into another
5313 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5314 the inner reference. */
5315 switch (TREE_CODE (base))
5317 case VAR_DECL:
5318 case CONST_DECL:
5320 tree init = ctor_for_folding (base);
5322 /* Our semantic is exact opposite of ctor_for_folding;
5323 NULL means unknown, while error_mark_node is 0. */
5324 if (init == error_mark_node)
5325 return NULL_TREE;
5326 if (!init)
5327 return error_mark_node;
5328 return init;
5331 case ARRAY_REF:
5332 case COMPONENT_REF:
5333 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
5334 &reverse);
5335 if (max_size == -1 || size != max_size)
5336 return NULL_TREE;
5337 *bit_offset += bit_offset2;
5338 return get_base_constructor (base, bit_offset, valueize);
5340 case STRING_CST:
5341 case CONSTRUCTOR:
5342 return base;
5344 default:
5345 return NULL_TREE;
5349 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5350 SIZE to the memory at bit OFFSET. */
5352 static tree
5353 fold_array_ctor_reference (tree type, tree ctor,
5354 unsigned HOST_WIDE_INT offset,
5355 unsigned HOST_WIDE_INT size,
5356 tree from_decl)
5358 offset_int low_bound;
5359 offset_int elt_size;
5360 offset_int access_index;
5361 tree domain_type = NULL_TREE;
5362 HOST_WIDE_INT inner_offset;
5364 /* Compute low bound and elt size. */
5365 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5366 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5367 if (domain_type && TYPE_MIN_VALUE (domain_type))
5369 /* Static constructors for variably sized objects makes no sense. */
5370 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
5371 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5373 else
5374 low_bound = 0;
5375 /* Static constructors for variably sized objects makes no sense. */
5376 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
5377 == INTEGER_CST);
5378 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5380 /* We can handle only constantly sized accesses that are known to not
5381 be larger than size of array element. */
5382 if (!TYPE_SIZE_UNIT (type)
5383 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5384 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5385 || elt_size == 0)
5386 return NULL_TREE;
5388 /* Compute the array index we look for. */
5389 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5390 elt_size);
5391 access_index += low_bound;
5393 /* And offset within the access. */
5394 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5396 /* See if the array field is large enough to span whole access. We do not
5397 care to fold accesses spanning multiple array indexes. */
5398 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5399 return NULL_TREE;
5400 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
5401 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
5403 /* When memory is not explicitely mentioned in constructor,
5404 it is 0 (or out of range). */
5405 return build_zero_cst (type);
5408 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5409 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5411 static tree
5412 fold_nonarray_ctor_reference (tree type, tree ctor,
5413 unsigned HOST_WIDE_INT offset,
5414 unsigned HOST_WIDE_INT size,
5415 tree from_decl)
5417 unsigned HOST_WIDE_INT cnt;
5418 tree cfield, cval;
5420 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5421 cval)
5423 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5424 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5425 tree field_size = DECL_SIZE (cfield);
5426 offset_int bitoffset;
5427 offset_int bitoffset_end, access_end;
5429 /* Variable sized objects in static constructors makes no sense,
5430 but field_size can be NULL for flexible array members. */
5431 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5432 && TREE_CODE (byte_offset) == INTEGER_CST
5433 && (field_size != NULL_TREE
5434 ? TREE_CODE (field_size) == INTEGER_CST
5435 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5437 /* Compute bit offset of the field. */
5438 bitoffset = (wi::to_offset (field_offset)
5439 + wi::lshift (wi::to_offset (byte_offset),
5440 LOG2_BITS_PER_UNIT));
5441 /* Compute bit offset where the field ends. */
5442 if (field_size != NULL_TREE)
5443 bitoffset_end = bitoffset + wi::to_offset (field_size);
5444 else
5445 bitoffset_end = 0;
5447 access_end = offset_int (offset) + size;
5449 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5450 [BITOFFSET, BITOFFSET_END)? */
5451 if (wi::cmps (access_end, bitoffset) > 0
5452 && (field_size == NULL_TREE
5453 || wi::lts_p (offset, bitoffset_end)))
5455 offset_int inner_offset = offset_int (offset) - bitoffset;
5456 /* We do have overlap. Now see if field is large enough to
5457 cover the access. Give up for accesses spanning multiple
5458 fields. */
5459 if (wi::cmps (access_end, bitoffset_end) > 0)
5460 return NULL_TREE;
5461 if (wi::lts_p (offset, bitoffset))
5462 return NULL_TREE;
5463 return fold_ctor_reference (type, cval,
5464 inner_offset.to_uhwi (), size,
5465 from_decl);
5468 /* When memory is not explicitely mentioned in constructor, it is 0. */
5469 return build_zero_cst (type);
5472 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5473 to the memory at bit OFFSET. */
5475 tree
5476 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5477 unsigned HOST_WIDE_INT size, tree from_decl)
5479 tree ret;
5481 /* We found the field with exact match. */
5482 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5483 && !offset)
5484 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5486 /* We are at the end of walk, see if we can view convert the
5487 result. */
5488 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5489 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5490 && !compare_tree_int (TYPE_SIZE (type), size)
5491 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5493 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5494 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5495 if (ret)
5496 STRIP_USELESS_TYPE_CONVERSION (ret);
5497 return ret;
5499 /* For constants and byte-aligned/sized reads try to go through
5500 native_encode/interpret. */
5501 if (CONSTANT_CLASS_P (ctor)
5502 && BITS_PER_UNIT == 8
5503 && offset % BITS_PER_UNIT == 0
5504 && size % BITS_PER_UNIT == 0
5505 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5507 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5508 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5509 offset / BITS_PER_UNIT);
5510 if (len > 0)
5511 return native_interpret_expr (type, buf, len);
5513 if (TREE_CODE (ctor) == CONSTRUCTOR)
5516 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5517 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5518 return fold_array_ctor_reference (type, ctor, offset, size,
5519 from_decl);
5520 else
5521 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5522 from_decl);
5525 return NULL_TREE;
5528 /* Return the tree representing the element referenced by T if T is an
5529 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5530 names using VALUEIZE. Return NULL_TREE otherwise. */
5532 tree
5533 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5535 tree ctor, idx, base;
5536 HOST_WIDE_INT offset, size, max_size;
5537 tree tem;
5538 bool reverse;
5540 if (TREE_THIS_VOLATILE (t))
5541 return NULL_TREE;
5543 if (DECL_P (t))
5544 return get_symbol_constant_value (t);
5546 tem = fold_read_from_constant_string (t);
5547 if (tem)
5548 return tem;
5550 switch (TREE_CODE (t))
5552 case ARRAY_REF:
5553 case ARRAY_RANGE_REF:
5554 /* Constant indexes are handled well by get_base_constructor.
5555 Only special case variable offsets.
5556 FIXME: This code can't handle nested references with variable indexes
5557 (they will be handled only by iteration of ccp). Perhaps we can bring
5558 get_ref_base_and_extent here and make it use a valueize callback. */
5559 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5560 && valueize
5561 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5562 && TREE_CODE (idx) == INTEGER_CST)
5564 tree low_bound, unit_size;
5566 /* If the resulting bit-offset is constant, track it. */
5567 if ((low_bound = array_ref_low_bound (t),
5568 TREE_CODE (low_bound) == INTEGER_CST)
5569 && (unit_size = array_ref_element_size (t),
5570 tree_fits_uhwi_p (unit_size)))
5572 offset_int woffset
5573 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5574 TYPE_PRECISION (TREE_TYPE (idx)));
5576 if (wi::fits_shwi_p (woffset))
5578 offset = woffset.to_shwi ();
5579 /* TODO: This code seems wrong, multiply then check
5580 to see if it fits. */
5581 offset *= tree_to_uhwi (unit_size);
5582 offset *= BITS_PER_UNIT;
5584 base = TREE_OPERAND (t, 0);
5585 ctor = get_base_constructor (base, &offset, valueize);
5586 /* Empty constructor. Always fold to 0. */
5587 if (ctor == error_mark_node)
5588 return build_zero_cst (TREE_TYPE (t));
5589 /* Out of bound array access. Value is undefined,
5590 but don't fold. */
5591 if (offset < 0)
5592 return NULL_TREE;
5593 /* We can not determine ctor. */
5594 if (!ctor)
5595 return NULL_TREE;
5596 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5597 tree_to_uhwi (unit_size)
5598 * BITS_PER_UNIT,
5599 base);
5603 /* Fallthru. */
5605 case COMPONENT_REF:
5606 case BIT_FIELD_REF:
5607 case TARGET_MEM_REF:
5608 case MEM_REF:
5609 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
5610 ctor = get_base_constructor (base, &offset, valueize);
5612 /* Empty constructor. Always fold to 0. */
5613 if (ctor == error_mark_node)
5614 return build_zero_cst (TREE_TYPE (t));
5615 /* We do not know precise address. */
5616 if (max_size == -1 || max_size != size)
5617 return NULL_TREE;
5618 /* We can not determine ctor. */
5619 if (!ctor)
5620 return NULL_TREE;
5622 /* Out of bound array access. Value is undefined, but don't fold. */
5623 if (offset < 0)
5624 return NULL_TREE;
5626 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5627 base);
5629 case REALPART_EXPR:
5630 case IMAGPART_EXPR:
5632 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5633 if (c && TREE_CODE (c) == COMPLEX_CST)
5634 return fold_build1_loc (EXPR_LOCATION (t),
5635 TREE_CODE (t), TREE_TYPE (t), c);
5636 break;
5639 default:
5640 break;
5643 return NULL_TREE;
5646 tree
5647 fold_const_aggregate_ref (tree t)
5649 return fold_const_aggregate_ref_1 (t, NULL);
5652 /* Lookup virtual method with index TOKEN in a virtual table V
5653 at OFFSET.
5654 Set CAN_REFER if non-NULL to false if method
5655 is not referable or if the virtual table is ill-formed (such as rewriten
5656 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5658 tree
5659 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5660 tree v,
5661 unsigned HOST_WIDE_INT offset,
5662 bool *can_refer)
5664 tree vtable = v, init, fn;
5665 unsigned HOST_WIDE_INT size;
5666 unsigned HOST_WIDE_INT elt_size, access_index;
5667 tree domain_type;
5669 if (can_refer)
5670 *can_refer = true;
5672 /* First of all double check we have virtual table. */
5673 if (TREE_CODE (v) != VAR_DECL
5674 || !DECL_VIRTUAL_P (v))
5676 /* Pass down that we lost track of the target. */
5677 if (can_refer)
5678 *can_refer = false;
5679 return NULL_TREE;
5682 init = ctor_for_folding (v);
5684 /* The virtual tables should always be born with constructors
5685 and we always should assume that they are avaialble for
5686 folding. At the moment we do not stream them in all cases,
5687 but it should never happen that ctor seem unreachable. */
5688 gcc_assert (init);
5689 if (init == error_mark_node)
5691 gcc_assert (in_lto_p);
5692 /* Pass down that we lost track of the target. */
5693 if (can_refer)
5694 *can_refer = false;
5695 return NULL_TREE;
5697 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5698 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5699 offset *= BITS_PER_UNIT;
5700 offset += token * size;
5702 /* Lookup the value in the constructor that is assumed to be array.
5703 This is equivalent to
5704 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5705 offset, size, NULL);
5706 but in a constant time. We expect that frontend produced a simple
5707 array without indexed initializers. */
5709 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5710 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5711 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5712 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5714 access_index = offset / BITS_PER_UNIT / elt_size;
5715 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5717 /* This code makes an assumption that there are no
5718 indexed fileds produced by C++ FE, so we can directly index the array. */
5719 if (access_index < CONSTRUCTOR_NELTS (init))
5721 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5722 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5723 STRIP_NOPS (fn);
5725 else
5726 fn = NULL;
5728 /* For type inconsistent program we may end up looking up virtual method
5729 in virtual table that does not contain TOKEN entries. We may overrun
5730 the virtual table and pick up a constant or RTTI info pointer.
5731 In any case the call is undefined. */
5732 if (!fn
5733 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5734 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5735 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5736 else
5738 fn = TREE_OPERAND (fn, 0);
5740 /* When cgraph node is missing and function is not public, we cannot
5741 devirtualize. This can happen in WHOPR when the actual method
5742 ends up in other partition, because we found devirtualization
5743 possibility too late. */
5744 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5746 if (can_refer)
5748 *can_refer = false;
5749 return fn;
5751 return NULL_TREE;
5755 /* Make sure we create a cgraph node for functions we'll reference.
5756 They can be non-existent if the reference comes from an entry
5757 of an external vtable for example. */
5758 cgraph_node::get_create (fn);
5760 return fn;
5763 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5764 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5765 KNOWN_BINFO carries the binfo describing the true type of
5766 OBJ_TYPE_REF_OBJECT(REF).
5767 Set CAN_REFER if non-NULL to false if method
5768 is not referable or if the virtual table is ill-formed (such as rewriten
5769 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5771 tree
5772 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5773 bool *can_refer)
5775 unsigned HOST_WIDE_INT offset;
5776 tree v;
5778 v = BINFO_VTABLE (known_binfo);
5779 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5780 if (!v)
5781 return NULL_TREE;
5783 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5785 if (can_refer)
5786 *can_refer = false;
5787 return NULL_TREE;
5789 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5792 /* Given a pointer value OP0, return a simplified version of an
5793 indirection through OP0, or NULL_TREE if no simplification is
5794 possible. Note that the resulting type may be different from
5795 the type pointed to in the sense that it is still compatible
5796 from the langhooks point of view. */
5798 tree
5799 gimple_fold_indirect_ref (tree t)
5801 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5802 tree sub = t;
5803 tree subtype;
5805 STRIP_NOPS (sub);
5806 subtype = TREE_TYPE (sub);
5807 if (!POINTER_TYPE_P (subtype))
5808 return NULL_TREE;
5810 if (TREE_CODE (sub) == ADDR_EXPR)
5812 tree op = TREE_OPERAND (sub, 0);
5813 tree optype = TREE_TYPE (op);
5814 /* *&p => p */
5815 if (useless_type_conversion_p (type, optype))
5816 return op;
5818 /* *(foo *)&fooarray => fooarray[0] */
5819 if (TREE_CODE (optype) == ARRAY_TYPE
5820 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5821 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5823 tree type_domain = TYPE_DOMAIN (optype);
5824 tree min_val = size_zero_node;
5825 if (type_domain && TYPE_MIN_VALUE (type_domain))
5826 min_val = TYPE_MIN_VALUE (type_domain);
5827 if (TREE_CODE (min_val) == INTEGER_CST)
5828 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5830 /* *(foo *)&complexfoo => __real__ complexfoo */
5831 else if (TREE_CODE (optype) == COMPLEX_TYPE
5832 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5833 return fold_build1 (REALPART_EXPR, type, op);
5834 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5835 else if (TREE_CODE (optype) == VECTOR_TYPE
5836 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5838 tree part_width = TYPE_SIZE (type);
5839 tree index = bitsize_int (0);
5840 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5844 /* *(p + CST) -> ... */
5845 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5846 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5848 tree addr = TREE_OPERAND (sub, 0);
5849 tree off = TREE_OPERAND (sub, 1);
5850 tree addrtype;
5852 STRIP_NOPS (addr);
5853 addrtype = TREE_TYPE (addr);
5855 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5856 if (TREE_CODE (addr) == ADDR_EXPR
5857 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5858 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5859 && tree_fits_uhwi_p (off))
5861 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5862 tree part_width = TYPE_SIZE (type);
5863 unsigned HOST_WIDE_INT part_widthi
5864 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5865 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5866 tree index = bitsize_int (indexi);
5867 if (offset / part_widthi
5868 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5869 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5870 part_width, index);
5873 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5874 if (TREE_CODE (addr) == ADDR_EXPR
5875 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5876 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5878 tree size = TYPE_SIZE_UNIT (type);
5879 if (tree_int_cst_equal (size, off))
5880 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5883 /* *(p + CST) -> MEM_REF <p, CST>. */
5884 if (TREE_CODE (addr) != ADDR_EXPR
5885 || DECL_P (TREE_OPERAND (addr, 0)))
5886 return fold_build2 (MEM_REF, type,
5887 addr,
5888 wide_int_to_tree (ptype, off));
5891 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5892 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5893 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5894 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5896 tree type_domain;
5897 tree min_val = size_zero_node;
5898 tree osub = sub;
5899 sub = gimple_fold_indirect_ref (sub);
5900 if (! sub)
5901 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5902 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5903 if (type_domain && TYPE_MIN_VALUE (type_domain))
5904 min_val = TYPE_MIN_VALUE (type_domain);
5905 if (TREE_CODE (min_val) == INTEGER_CST)
5906 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5909 return NULL_TREE;
5912 /* Return true if CODE is an operation that when operating on signed
5913 integer types involves undefined behavior on overflow and the
5914 operation can be expressed with unsigned arithmetic. */
5916 bool
5917 arith_code_with_undefined_signed_overflow (tree_code code)
5919 switch (code)
5921 case PLUS_EXPR:
5922 case MINUS_EXPR:
5923 case MULT_EXPR:
5924 case NEGATE_EXPR:
5925 case POINTER_PLUS_EXPR:
5926 return true;
5927 default:
5928 return false;
5932 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5933 operation that can be transformed to unsigned arithmetic by converting
5934 its operand, carrying out the operation in the corresponding unsigned
5935 type and converting the result back to the original type.
5937 Returns a sequence of statements that replace STMT and also contain
5938 a modified form of STMT itself. */
5940 gimple_seq
5941 rewrite_to_defined_overflow (gimple *stmt)
5943 if (dump_file && (dump_flags & TDF_DETAILS))
5945 fprintf (dump_file, "rewriting stmt with undefined signed "
5946 "overflow ");
5947 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5950 tree lhs = gimple_assign_lhs (stmt);
5951 tree type = unsigned_type_for (TREE_TYPE (lhs));
5952 gimple_seq stmts = NULL;
5953 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5955 tree op = gimple_op (stmt, i);
5956 op = gimple_convert (&stmts, type, op);
5957 gimple_set_op (stmt, i, op);
5959 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5960 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5961 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5962 gimple_seq_add_stmt (&stmts, stmt);
5963 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
5964 gimple_seq_add_stmt (&stmts, cvt);
5966 return stmts;
5970 /* The valueization hook we use for the gimple_build API simplification.
5971 This makes us match fold_buildN behavior by only combining with
5972 statements in the sequence(s) we are currently building. */
5974 static tree
5975 gimple_build_valueize (tree op)
5977 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
5978 return op;
5979 return NULL_TREE;
5982 /* Build the expression CODE OP0 of type TYPE with location LOC,
5983 simplifying it first if possible. Returns the built
5984 expression value and appends statements possibly defining it
5985 to SEQ. */
5987 tree
5988 gimple_build (gimple_seq *seq, location_t loc,
5989 enum tree_code code, tree type, tree op0)
5991 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
5992 if (!res)
5994 if (gimple_in_ssa_p (cfun))
5995 res = make_ssa_name (type);
5996 else
5997 res = create_tmp_reg (type);
5998 gimple *stmt;
5999 if (code == REALPART_EXPR
6000 || code == IMAGPART_EXPR
6001 || code == VIEW_CONVERT_EXPR)
6002 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6003 else
6004 stmt = gimple_build_assign (res, code, op0);
6005 gimple_set_location (stmt, loc);
6006 gimple_seq_add_stmt_without_update (seq, stmt);
6008 return res;
6011 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6012 simplifying it first if possible. Returns the built
6013 expression value and appends statements possibly defining it
6014 to SEQ. */
6016 tree
6017 gimple_build (gimple_seq *seq, location_t loc,
6018 enum tree_code code, tree type, tree op0, tree op1)
6020 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6021 if (!res)
6023 if (gimple_in_ssa_p (cfun))
6024 res = make_ssa_name (type);
6025 else
6026 res = create_tmp_reg (type);
6027 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6028 gimple_set_location (stmt, loc);
6029 gimple_seq_add_stmt_without_update (seq, stmt);
6031 return res;
6034 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6035 simplifying it first if possible. Returns the built
6036 expression value and appends statements possibly defining it
6037 to SEQ. */
6039 tree
6040 gimple_build (gimple_seq *seq, location_t loc,
6041 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6043 tree res = gimple_simplify (code, type, op0, op1, op2,
6044 seq, gimple_build_valueize);
6045 if (!res)
6047 if (gimple_in_ssa_p (cfun))
6048 res = make_ssa_name (type);
6049 else
6050 res = create_tmp_reg (type);
6051 gimple *stmt;
6052 if (code == BIT_FIELD_REF)
6053 stmt = gimple_build_assign (res, code,
6054 build3 (code, type, op0, op1, op2));
6055 else
6056 stmt = gimple_build_assign (res, code, op0, op1, op2);
6057 gimple_set_location (stmt, loc);
6058 gimple_seq_add_stmt_without_update (seq, stmt);
6060 return res;
6063 /* Build the call FN (ARG0) with a result of type TYPE
6064 (or no result if TYPE is void) with location LOC,
6065 simplifying it first if possible. Returns the built
6066 expression value (or NULL_TREE if TYPE is void) and appends
6067 statements possibly defining it to SEQ. */
6069 tree
6070 gimple_build (gimple_seq *seq, location_t loc,
6071 enum built_in_function fn, tree type, tree arg0)
6073 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6074 if (!res)
6076 tree decl = builtin_decl_implicit (fn);
6077 gimple *stmt = gimple_build_call (decl, 1, arg0);
6078 if (!VOID_TYPE_P (type))
6080 if (gimple_in_ssa_p (cfun))
6081 res = make_ssa_name (type);
6082 else
6083 res = create_tmp_reg (type);
6084 gimple_call_set_lhs (stmt, res);
6086 gimple_set_location (stmt, loc);
6087 gimple_seq_add_stmt_without_update (seq, stmt);
6089 return res;
6092 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6093 (or no result if TYPE is void) with location LOC,
6094 simplifying it first if possible. Returns the built
6095 expression value (or NULL_TREE if TYPE is void) and appends
6096 statements possibly defining it to SEQ. */
6098 tree
6099 gimple_build (gimple_seq *seq, location_t loc,
6100 enum built_in_function fn, tree type, tree arg0, tree arg1)
6102 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6103 if (!res)
6105 tree decl = builtin_decl_implicit (fn);
6106 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6107 if (!VOID_TYPE_P (type))
6109 if (gimple_in_ssa_p (cfun))
6110 res = make_ssa_name (type);
6111 else
6112 res = create_tmp_reg (type);
6113 gimple_call_set_lhs (stmt, res);
6115 gimple_set_location (stmt, loc);
6116 gimple_seq_add_stmt_without_update (seq, stmt);
6118 return res;
6121 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6122 (or no result if TYPE is void) with location LOC,
6123 simplifying it first if possible. Returns the built
6124 expression value (or NULL_TREE if TYPE is void) and appends
6125 statements possibly defining it to SEQ. */
6127 tree
6128 gimple_build (gimple_seq *seq, location_t loc,
6129 enum built_in_function fn, tree type,
6130 tree arg0, tree arg1, tree arg2)
6132 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6133 seq, gimple_build_valueize);
6134 if (!res)
6136 tree decl = builtin_decl_implicit (fn);
6137 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6138 if (!VOID_TYPE_P (type))
6140 if (gimple_in_ssa_p (cfun))
6141 res = make_ssa_name (type);
6142 else
6143 res = create_tmp_reg (type);
6144 gimple_call_set_lhs (stmt, res);
6146 gimple_set_location (stmt, loc);
6147 gimple_seq_add_stmt_without_update (seq, stmt);
6149 return res;
6152 /* Build the conversion (TYPE) OP with a result of type TYPE
6153 with location LOC if such conversion is neccesary in GIMPLE,
6154 simplifying it first.
6155 Returns the built expression value and appends
6156 statements possibly defining it to SEQ. */
6158 tree
6159 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6161 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6162 return op;
6163 return gimple_build (seq, loc, NOP_EXPR, type, op);
6166 /* Build the conversion (ptrofftype) OP with a result of a type
6167 compatible with ptrofftype with location LOC if such conversion
6168 is neccesary in GIMPLE, simplifying it first.
6169 Returns the built expression value and appends
6170 statements possibly defining it to SEQ. */
6172 tree
6173 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6175 if (ptrofftype_p (TREE_TYPE (op)))
6176 return op;
6177 return gimple_convert (seq, loc, sizetype, op);
6180 /* Return true if the result of assignment STMT is known to be non-negative.
6181 If the return value is based on the assumption that signed overflow is
6182 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6183 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6185 static bool
6186 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6187 int depth)
6189 enum tree_code code = gimple_assign_rhs_code (stmt);
6190 switch (get_gimple_rhs_class (code))
6192 case GIMPLE_UNARY_RHS:
6193 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6194 gimple_expr_type (stmt),
6195 gimple_assign_rhs1 (stmt),
6196 strict_overflow_p, depth);
6197 case GIMPLE_BINARY_RHS:
6198 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6199 gimple_expr_type (stmt),
6200 gimple_assign_rhs1 (stmt),
6201 gimple_assign_rhs2 (stmt),
6202 strict_overflow_p, depth);
6203 case GIMPLE_TERNARY_RHS:
6204 return false;
6205 case GIMPLE_SINGLE_RHS:
6206 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6207 strict_overflow_p, depth);
6208 case GIMPLE_INVALID_RHS:
6209 break;
6211 gcc_unreachable ();
6214 /* Return true if return value of call STMT is known to be non-negative.
6215 If the return value is based on the assumption that signed overflow is
6216 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6217 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6219 static bool
6220 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6221 int depth)
6223 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6224 gimple_call_arg (stmt, 0) : NULL_TREE;
6225 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6226 gimple_call_arg (stmt, 1) : NULL_TREE;
6228 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6229 gimple_call_combined_fn (stmt),
6230 arg0,
6231 arg1,
6232 strict_overflow_p, depth);
6235 /* Return true if return value of call STMT is known to be non-negative.
6236 If the return value is based on the assumption that signed overflow is
6237 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6238 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6240 static bool
6241 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6242 int depth)
6244 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6246 tree arg = gimple_phi_arg_def (stmt, i);
6247 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6248 return false;
6250 return true;
6253 /* Return true if STMT is known to compute a non-negative value.
6254 If the return value is based on the assumption that signed overflow is
6255 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6256 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6258 bool
6259 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6260 int depth)
6262 switch (gimple_code (stmt))
6264 case GIMPLE_ASSIGN:
6265 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6266 depth);
6267 case GIMPLE_CALL:
6268 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6269 depth);
6270 case GIMPLE_PHI:
6271 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6272 depth);
6273 default:
6274 return false;
6278 /* Return true if the floating-point value computed by assignment STMT
6279 is known to have an integer value. We also allow +Inf, -Inf and NaN
6280 to be considered integer values. Return false for signaling NaN.
6282 DEPTH is the current nesting depth of the query. */
6284 static bool
6285 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6287 enum tree_code code = gimple_assign_rhs_code (stmt);
6288 switch (get_gimple_rhs_class (code))
6290 case GIMPLE_UNARY_RHS:
6291 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6292 gimple_assign_rhs1 (stmt), depth);
6293 case GIMPLE_BINARY_RHS:
6294 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6295 gimple_assign_rhs1 (stmt),
6296 gimple_assign_rhs2 (stmt), depth);
6297 case GIMPLE_TERNARY_RHS:
6298 return false;
6299 case GIMPLE_SINGLE_RHS:
6300 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6301 case GIMPLE_INVALID_RHS:
6302 break;
6304 gcc_unreachable ();
6307 /* Return true if the floating-point value computed by call STMT is known
6308 to have an integer value. We also allow +Inf, -Inf and NaN to be
6309 considered integer values. Return false for signaling NaN.
6311 DEPTH is the current nesting depth of the query. */
6313 static bool
6314 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6316 tree arg0 = (gimple_call_num_args (stmt) > 0
6317 ? gimple_call_arg (stmt, 0)
6318 : NULL_TREE);
6319 tree arg1 = (gimple_call_num_args (stmt) > 1
6320 ? gimple_call_arg (stmt, 1)
6321 : NULL_TREE);
6322 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
6323 arg0, arg1, depth);
6326 /* Return true if the floating-point result of phi STMT is known to have
6327 an integer value. We also allow +Inf, -Inf and NaN to be considered
6328 integer values. Return false for signaling NaN.
6330 DEPTH is the current nesting depth of the query. */
6332 static bool
6333 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6335 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6337 tree arg = gimple_phi_arg_def (stmt, i);
6338 if (!integer_valued_real_single_p (arg, depth + 1))
6339 return false;
6341 return true;
6344 /* Return true if the floating-point value computed by STMT is known
6345 to have an integer value. We also allow +Inf, -Inf and NaN to be
6346 considered integer values. Return false for signaling NaN.
6348 DEPTH is the current nesting depth of the query. */
6350 bool
6351 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6353 switch (gimple_code (stmt))
6355 case GIMPLE_ASSIGN:
6356 return gimple_assign_integer_valued_real_p (stmt, depth);
6357 case GIMPLE_CALL:
6358 return gimple_call_integer_valued_real_p (stmt, depth);
6359 case GIMPLE_PHI:
6360 return gimple_phi_integer_valued_real_p (stmt, depth);
6361 default:
6362 return false;